mirror of
https://github.com/ankitects/anki.git
synced 2025-09-20 06:52:21 -04:00
treat 'not' as a normal search string
This commit is contained in:
parent
7130f2c1fe
commit
ce83001be6
2 changed files with 7 additions and 6 deletions
|
@ -93,8 +93,8 @@ select distinct(n.id) from cards c, notes n where c.nid=n.id and """+preds
|
||||||
elif c == "-":
|
elif c == "-":
|
||||||
if token:
|
if token:
|
||||||
token += c
|
token += c
|
||||||
elif not tokens or tokens[-1] != "not":
|
elif not tokens or tokens[-1] != "-":
|
||||||
tokens.append("not")
|
tokens.append("-")
|
||||||
# normal character
|
# normal character
|
||||||
else:
|
else:
|
||||||
token += c
|
token += c
|
||||||
|
@ -140,7 +140,7 @@ select distinct(n.id) from cards c, notes n where c.nid=n.id and """+preds
|
||||||
if s['bad']:
|
if s['bad']:
|
||||||
return None, None
|
return None, None
|
||||||
# special tokens
|
# special tokens
|
||||||
if token == "not":
|
if token == "-":
|
||||||
s['isnot'] = True
|
s['isnot'] = True
|
||||||
elif token.lower() == "or":
|
elif token.lower() == "or":
|
||||||
s['isor'] = True
|
s['isor'] = True
|
||||||
|
|
|
@ -7,9 +7,10 @@ def test_parse():
|
||||||
f = Finder(None)
|
f = Finder(None)
|
||||||
assert f._tokenize("hello world") == ["hello", "world"]
|
assert f._tokenize("hello world") == ["hello", "world"]
|
||||||
assert f._tokenize("hello world") == ["hello", "world"]
|
assert f._tokenize("hello world") == ["hello", "world"]
|
||||||
assert f._tokenize("one -two") == ["one", "not", "two"]
|
assert f._tokenize("one -two") == ["one", "-", "two"]
|
||||||
assert f._tokenize("one --two") == ["one", "not", "two"]
|
assert f._tokenize("one --two") == ["one", "-", "two"]
|
||||||
assert f._tokenize("one or -two") == ["one", "or", "not", "two"]
|
assert f._tokenize("one - two") == ["one", "-", "two"]
|
||||||
|
assert f._tokenize("one or -two") == ["one", "or", "-", "two"]
|
||||||
assert f._tokenize("'hello \"world\"'") == ["hello \"world\""]
|
assert f._tokenize("'hello \"world\"'") == ["hello \"world\""]
|
||||||
assert f._tokenize('"hello world"') == ["hello world"]
|
assert f._tokenize('"hello world"') == ["hello world"]
|
||||||
assert f._tokenize("one (two or ( three or four))") == [
|
assert f._tokenize("one (two or ( three or four))") == [
|
||||||
|
|
Loading…
Reference in a new issue