mirror of
https://github.com/ankitects/anki.git
synced 2025-09-20 15:02:21 -04:00
treat 'not' as a normal search string
This commit is contained in:
parent
7130f2c1fe
commit
ce83001be6
2 changed files with 7 additions and 6 deletions
|
@ -93,8 +93,8 @@ select distinct(n.id) from cards c, notes n where c.nid=n.id and """+preds
|
|||
elif c == "-":
|
||||
if token:
|
||||
token += c
|
||||
elif not tokens or tokens[-1] != "not":
|
||||
tokens.append("not")
|
||||
elif not tokens or tokens[-1] != "-":
|
||||
tokens.append("-")
|
||||
# normal character
|
||||
else:
|
||||
token += c
|
||||
|
@ -140,7 +140,7 @@ select distinct(n.id) from cards c, notes n where c.nid=n.id and """+preds
|
|||
if s['bad']:
|
||||
return None, None
|
||||
# special tokens
|
||||
if token == "not":
|
||||
if token == "-":
|
||||
s['isnot'] = True
|
||||
elif token.lower() == "or":
|
||||
s['isor'] = True
|
||||
|
|
|
@ -7,9 +7,10 @@ def test_parse():
|
|||
f = Finder(None)
|
||||
assert f._tokenize("hello world") == ["hello", "world"]
|
||||
assert f._tokenize("hello world") == ["hello", "world"]
|
||||
assert f._tokenize("one -two") == ["one", "not", "two"]
|
||||
assert f._tokenize("one --two") == ["one", "not", "two"]
|
||||
assert f._tokenize("one or -two") == ["one", "or", "not", "two"]
|
||||
assert f._tokenize("one -two") == ["one", "-", "two"]
|
||||
assert f._tokenize("one --two") == ["one", "-", "two"]
|
||||
assert f._tokenize("one - two") == ["one", "-", "two"]
|
||||
assert f._tokenize("one or -two") == ["one", "or", "-", "two"]
|
||||
assert f._tokenize("'hello \"world\"'") == ["hello \"world\""]
|
||||
assert f._tokenize('"hello world"') == ["hello world"]
|
||||
assert f._tokenize("one (two or ( three or four))") == [
|
||||
|
|
Loading…
Reference in a new issue