mirror of
https://github.com/ankitects/anki.git
synced 2025-09-20 23:12:21 -04:00
commit
03b9f2a3f6
18 changed files with 423 additions and 323 deletions
|
@ -11,7 +11,7 @@ import sys
|
|||
import time
|
||||
import traceback
|
||||
import weakref
|
||||
from typing import Any, List, Optional, Sequence, Tuple, Union
|
||||
from typing import Any, List, Literal, Optional, Sequence, Tuple, Union
|
||||
|
||||
import anki._backend.backend_pb2 as _pb
|
||||
import anki.find
|
||||
|
@ -43,7 +43,8 @@ from anki.utils import (
|
|||
)
|
||||
|
||||
# public exports
|
||||
SearchTerm = _pb.SearchTerm
|
||||
SearchNode = _pb.SearchNode
|
||||
SearchJoiner = Literal["AND", "OR"]
|
||||
Progress = _pb.Progress
|
||||
Config = _pb.Config
|
||||
EmptyCardsReport = _pb.EmptyCardsReport
|
||||
|
@ -471,7 +472,7 @@ class Collection:
|
|||
)
|
||||
return self._backend.search_cards(search=query, order=mode)
|
||||
|
||||
def find_notes(self, *terms: Union[str, SearchTerm]) -> Sequence[int]:
|
||||
def find_notes(self, *terms: Union[str, SearchNode]) -> Sequence[int]:
|
||||
return self._backend.search_notes(self.build_search_string(*terms))
|
||||
|
||||
def find_and_replace(
|
||||
|
@ -487,7 +488,7 @@ class Collection:
|
|||
|
||||
# returns array of ("dupestr", [nids])
|
||||
def findDupes(self, fieldName: str, search: str = "") -> List[Tuple[Any, list]]:
|
||||
nids = self.findNotes(search, SearchTerm(field_name=fieldName))
|
||||
nids = self.findNotes(search, SearchNode(field_name=fieldName))
|
||||
# go through notes
|
||||
vals: Dict[str, List[int]] = {}
|
||||
dupes = []
|
||||
|
@ -526,38 +527,85 @@ class Collection:
|
|||
# Search Strings
|
||||
##########################################################################
|
||||
|
||||
# pylint: disable=no-member
|
||||
def build_search_string(
|
||||
self,
|
||||
*terms: Union[str, SearchTerm],
|
||||
negate: bool = False,
|
||||
match_any: bool = False,
|
||||
*nodes: Union[str, SearchNode],
|
||||
joiner: SearchJoiner = "AND",
|
||||
) -> str:
|
||||
"""Helper function for the backend's search string operations.
|
||||
"""Join one or more searches, and return a normalized search string.
|
||||
|
||||
Pass terms as strings to normalize.
|
||||
Pass fields of backend.proto/FilterToSearchIn as valid SearchTerms.
|
||||
Pass multiple terms to concatenate (defaults to 'and', 'or' when 'match_any=True').
|
||||
Pass 'negate=True' to negate the end result.
|
||||
May raise InvalidInput.
|
||||
To negate, wrap in a negated search term:
|
||||
|
||||
term = SearchNode(negated=col.group_searches(...))
|
||||
|
||||
Invalid searches will throw an exception.
|
||||
"""
|
||||
term = self.group_searches(*nodes, joiner=joiner)
|
||||
return self._backend.build_search_string(term)
|
||||
|
||||
searches = []
|
||||
for term in terms:
|
||||
if isinstance(term, SearchTerm):
|
||||
term = self._backend.filter_to_search(term)
|
||||
searches.append(term)
|
||||
if match_any:
|
||||
sep = _pb.ConcatenateSearchesIn.OR
|
||||
def group_searches(
|
||||
self,
|
||||
*nodes: Union[str, SearchNode],
|
||||
joiner: SearchJoiner = "AND",
|
||||
) -> SearchNode:
|
||||
"""Join provided search nodes and strings into a single SearchNode.
|
||||
If a single SearchNode is provided, it is returned as-is.
|
||||
At least one node must be provided.
|
||||
"""
|
||||
assert nodes
|
||||
|
||||
# convert raw text to SearchNodes
|
||||
search_nodes = [
|
||||
node if isinstance(node, SearchNode) else SearchNode(parsable_text=node)
|
||||
for node in nodes
|
||||
]
|
||||
|
||||
# if there's more than one, wrap them in a group
|
||||
if len(search_nodes) > 1:
|
||||
return SearchNode(
|
||||
group=SearchNode.Group(
|
||||
nodes=search_nodes, joiner=self._pb_search_separator(joiner)
|
||||
)
|
||||
)
|
||||
else:
|
||||
sep = _pb.ConcatenateSearchesIn.AND
|
||||
search_string = self._backend.concatenate_searches(sep=sep, searches=searches)
|
||||
if negate:
|
||||
search_string = self._backend.negate_search(search_string)
|
||||
return search_nodes[0]
|
||||
|
||||
def join_searches(
|
||||
self,
|
||||
existing_node: SearchNode,
|
||||
additional_node: SearchNode,
|
||||
operator: Literal["AND", "OR"],
|
||||
) -> str:
|
||||
"""
|
||||
AND or OR `additional_term` to `existing_term`, without wrapping `existing_term` in brackets.
|
||||
Used by the Browse screen to avoid adding extra brackets when joining.
|
||||
If you're building a search query yourself, you probably don't need this.
|
||||
"""
|
||||
search_string = self._backend.join_search_nodes(
|
||||
joiner=self._pb_search_separator(operator),
|
||||
existing_node=existing_node,
|
||||
additional_node=additional_node,
|
||||
)
|
||||
|
||||
return search_string
|
||||
|
||||
def replace_search_term(self, search: str, replacement: str) -> str:
|
||||
return self._backend.replace_search_term(search=search, replacement=replacement)
|
||||
def replace_in_search_node(
|
||||
self, existing_node: SearchNode, replacement_node: SearchNode
|
||||
) -> str:
|
||||
"""If nodes of the same type as `replacement_node` are found in existing_node, replace them.
|
||||
|
||||
You can use this to replace any "deck" clauses in a search with a different deck for example.
|
||||
"""
|
||||
return self._backend.replace_search_node(
|
||||
existing_node=existing_node, replacement_node=replacement_node
|
||||
)
|
||||
|
||||
def _pb_search_separator(self, operator: SearchJoiner) -> SearchNode.Group.Joiner.V:
|
||||
# pylint: disable=no-member
|
||||
if operator == "AND":
|
||||
return SearchNode.Group.Joiner.AND
|
||||
else:
|
||||
return SearchNode.Group.Joiner.OR
|
||||
|
||||
# Config
|
||||
##########################################################################
|
||||
|
|
|
@ -90,7 +90,7 @@ class TagManager:
|
|||
|
||||
def rename(self, old: str, new: str) -> int:
|
||||
"Rename provided tag, returning number of changed notes."
|
||||
nids = self.col.find_notes(anki.collection.SearchTerm(tag=old))
|
||||
nids = self.col.find_notes(anki.collection.SearchNode(tag=old))
|
||||
if not nids:
|
||||
return 0
|
||||
escaped_name = re.sub(r"[*_\\]", r"\\\g<0>", old)
|
||||
|
|
|
@ -51,11 +51,9 @@ fn want_release_gil(method: u32) -> bool {
|
|||
| BackendMethod::LatestProgress
|
||||
| BackendMethod::SetWantsAbort
|
||||
| BackendMethod::I18nResources
|
||||
| BackendMethod::NormalizeSearch
|
||||
| BackendMethod::NegateSearch
|
||||
| BackendMethod::ConcatenateSearches
|
||||
| BackendMethod::ReplaceSearchTerm
|
||||
| BackendMethod::FilterToSearch
|
||||
| BackendMethod::JoinSearchNodes
|
||||
| BackendMethod::ReplaceSearchNode
|
||||
| BackendMethod::BuildSearchString
|
||||
)
|
||||
} else {
|
||||
false
|
||||
|
|
|
@ -6,7 +6,7 @@ ignore = forms,hooks_gen.py
|
|||
[TYPECHECK]
|
||||
ignored-modules=win32file,pywintypes,socket,win32pipe,winrt,pyaudio
|
||||
ignored-classes=
|
||||
SearchTerm,
|
||||
SearchNode,
|
||||
Config,
|
||||
|
||||
[REPORTS]
|
||||
|
|
|
@ -6,7 +6,7 @@ import aqt.deckchooser
|
|||
import aqt.editor
|
||||
import aqt.forms
|
||||
import aqt.modelchooser
|
||||
from anki.collection import SearchTerm
|
||||
from anki.collection import SearchNode
|
||||
from anki.consts import MODEL_CLOZE
|
||||
from anki.notes import Note
|
||||
from anki.utils import htmlToTextLine, isMac
|
||||
|
@ -144,7 +144,7 @@ class AddCards(QDialog):
|
|||
def onHistory(self) -> None:
|
||||
m = QMenu(self)
|
||||
for nid in self.history:
|
||||
if self.mw.col.findNotes(SearchTerm(nid=nid)):
|
||||
if self.mw.col.findNotes(SearchNode(nid=nid)):
|
||||
note = self.mw.col.getNote(nid)
|
||||
fields = note.fields
|
||||
txt = htmlToTextLine(", ".join(fields))
|
||||
|
@ -161,7 +161,7 @@ class AddCards(QDialog):
|
|||
m.exec_(self.historyButton.mapToGlobal(QPoint(0, 0)))
|
||||
|
||||
def editHistory(self, nid: int) -> None:
|
||||
aqt.dialogs.open("Browser", self.mw, search=(SearchTerm(nid=nid),))
|
||||
aqt.dialogs.open("Browser", self.mw, search=(SearchNode(nid=nid),))
|
||||
|
||||
def addNote(self, note: Note) -> Optional[Note]:
|
||||
note.model()["did"] = self.deckChooser.selectedId()
|
||||
|
|
|
@ -12,7 +12,7 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union,
|
|||
import aqt
|
||||
import aqt.forms
|
||||
from anki.cards import Card
|
||||
from anki.collection import Collection, Config, SearchTerm
|
||||
from anki.collection import Collection, Config, SearchNode
|
||||
from anki.consts import *
|
||||
from anki.errors import InvalidInput
|
||||
from anki.lang import without_unicode_isolation
|
||||
|
@ -442,7 +442,7 @@ class Browser(QMainWindow):
|
|||
self,
|
||||
mw: AnkiQt,
|
||||
card: Optional[Card] = None,
|
||||
search: Optional[Tuple[Union[str, SearchTerm]]] = None,
|
||||
search: Optional[Tuple[Union[str, SearchNode]]] = None,
|
||||
) -> None:
|
||||
"""
|
||||
card : try to search for its note and select it
|
||||
|
@ -615,7 +615,7 @@ class Browser(QMainWindow):
|
|||
self,
|
||||
_mw: AnkiQt,
|
||||
card: Optional[Card] = None,
|
||||
search: Optional[Tuple[Union[str, SearchTerm]]] = None,
|
||||
search: Optional[Tuple[Union[str, SearchNode]]] = None,
|
||||
) -> None:
|
||||
if search is not None:
|
||||
self.search_for_terms(*search)
|
||||
|
@ -630,7 +630,7 @@ class Browser(QMainWindow):
|
|||
def setupSearch(
|
||||
self,
|
||||
card: Optional[Card] = None,
|
||||
search: Optional[Tuple[Union[str, SearchTerm]]] = None,
|
||||
search: Optional[Tuple[Union[str, SearchNode]]] = None,
|
||||
) -> None:
|
||||
qconnect(self.form.searchEdit.lineEdit().returnPressed, self.onSearchActivated)
|
||||
self.form.searchEdit.setCompleter(None)
|
||||
|
@ -644,7 +644,7 @@ class Browser(QMainWindow):
|
|||
self.show_single_card(card)
|
||||
else:
|
||||
self.search_for(
|
||||
self.col.build_search_string(SearchTerm(deck="current")), ""
|
||||
self.col.build_search_string(SearchNode(deck="current")), ""
|
||||
)
|
||||
self.form.searchEdit.setFocus()
|
||||
|
||||
|
@ -707,7 +707,7 @@ class Browser(QMainWindow):
|
|||
)
|
||||
return selected
|
||||
|
||||
def search_for_terms(self, *search_terms: Union[str, SearchTerm]) -> None:
|
||||
def search_for_terms(self, *search_terms: Union[str, SearchNode]) -> None:
|
||||
search = self.col.build_search_string(*search_terms)
|
||||
self.form.searchEdit.setEditText(search)
|
||||
self.onSearchActivated()
|
||||
|
@ -717,7 +717,7 @@ class Browser(QMainWindow):
|
|||
|
||||
def on_show_single_card() -> None:
|
||||
self.card = card
|
||||
search = self.col.build_search_string(SearchTerm(nid=card.nid))
|
||||
search = self.col.build_search_string(SearchNode(nid=card.nid))
|
||||
search = gui_hooks.default_search(search, card)
|
||||
self.search_for(search, "")
|
||||
self.focusCid(card.id)
|
||||
|
@ -1407,7 +1407,7 @@ where id in %s"""
|
|||
tv.selectionModel().clear()
|
||||
|
||||
search = self.col.build_search_string(
|
||||
SearchTerm(nids=SearchTerm.IdList(ids=nids))
|
||||
SearchNode(nids=SearchNode.IdList(ids=nids))
|
||||
)
|
||||
self.search_for(search)
|
||||
|
||||
|
@ -1626,7 +1626,7 @@ where id in %s"""
|
|||
% (
|
||||
html.escape(
|
||||
self.col.build_search_string(
|
||||
SearchTerm(nids=SearchTerm.IdList(ids=nids))
|
||||
SearchNode(nids=SearchNode.IdList(ids=nids))
|
||||
)
|
||||
),
|
||||
tr(TR.BROWSING_NOTE_COUNT, count=len(nids)),
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import aqt
|
||||
from anki.collection import SearchTerm
|
||||
from anki.collection import SearchNode
|
||||
from anki.consts import *
|
||||
from aqt.qt import *
|
||||
from aqt.utils import TR, disable_help_button, showInfo, showWarning, tr
|
||||
|
@ -164,20 +164,20 @@ class CustomStudy(QDialog):
|
|||
# and then set various options
|
||||
if i == RADIO_FORGOT:
|
||||
search = self.mw.col.build_search_string(
|
||||
SearchTerm(
|
||||
rated=SearchTerm.Rated(days=spin, rating=SearchTerm.RATING_AGAIN)
|
||||
SearchNode(
|
||||
rated=SearchNode.Rated(days=spin, rating=SearchNode.RATING_AGAIN)
|
||||
)
|
||||
)
|
||||
dyn["terms"][0] = [search, DYN_MAX_SIZE, DYN_RANDOM]
|
||||
dyn["resched"] = False
|
||||
elif i == RADIO_AHEAD:
|
||||
search = self.mw.col.build_search_string(SearchTerm(due_in_days=spin))
|
||||
search = self.mw.col.build_search_string(SearchNode(due_in_days=spin))
|
||||
dyn["terms"][0] = [search, DYN_MAX_SIZE, DYN_DUE]
|
||||
dyn["resched"] = True
|
||||
elif i == RADIO_PREVIEW:
|
||||
search = self.mw.col.build_search_string(
|
||||
SearchTerm(card_state=SearchTerm.CARD_STATE_NEW),
|
||||
SearchTerm(added_in_days=spin),
|
||||
SearchNode(card_state=SearchNode.CARD_STATE_NEW),
|
||||
SearchNode(added_in_days=spin),
|
||||
)
|
||||
dyn["terms"][0] = [search, DYN_MAX_SIZE, DYN_OLDEST]
|
||||
dyn["resched"] = False
|
||||
|
@ -185,19 +185,19 @@ class CustomStudy(QDialog):
|
|||
type = f.cardType.currentRow()
|
||||
if type == TYPE_NEW:
|
||||
terms = self.mw.col.build_search_string(
|
||||
SearchTerm(card_state=SearchTerm.CARD_STATE_NEW)
|
||||
SearchNode(card_state=SearchNode.CARD_STATE_NEW)
|
||||
)
|
||||
ord = DYN_ADDED
|
||||
dyn["resched"] = True
|
||||
elif type == TYPE_DUE:
|
||||
terms = self.mw.col.build_search_string(
|
||||
SearchTerm(card_state=SearchTerm.CARD_STATE_DUE)
|
||||
SearchNode(card_state=SearchNode.CARD_STATE_DUE)
|
||||
)
|
||||
ord = DYN_DUE
|
||||
dyn["resched"] = True
|
||||
elif type == TYPE_REVIEW:
|
||||
terms = self.mw.col.build_search_string(
|
||||
SearchTerm(negated=SearchTerm(card_state=SearchTerm.CARD_STATE_NEW))
|
||||
SearchNode(negated=SearchNode(card_state=SearchNode.CARD_STATE_NEW))
|
||||
)
|
||||
ord = DYN_RANDOM
|
||||
dyn["resched"] = True
|
||||
|
@ -208,7 +208,7 @@ class CustomStudy(QDialog):
|
|||
dyn["terms"][0] = [(terms + tags).strip(), spin, ord]
|
||||
# add deck limit
|
||||
dyn["terms"][0][0] = self.mw.col.build_search_string(
|
||||
dyn["terms"][0][0], SearchTerm(deck=self.deck["name"])
|
||||
dyn["terms"][0][0], SearchNode(deck=self.deck["name"])
|
||||
)
|
||||
self.mw.col.decks.save(dyn)
|
||||
# generate cards
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
from typing import Callable, List, Optional
|
||||
|
||||
import aqt
|
||||
from anki.collection import SearchTerm
|
||||
from anki.collection import SearchNode
|
||||
from anki.decks import Deck, DeckRenameError
|
||||
from anki.errors import InvalidInput
|
||||
from anki.lang import without_unicode_isolation
|
||||
|
@ -111,14 +111,14 @@ class DeckConf(QDialog):
|
|||
def set_default_searches(self, deck_name: str) -> None:
|
||||
self.form.search.setText(
|
||||
self.mw.col.build_search_string(
|
||||
SearchTerm(deck=deck_name),
|
||||
SearchTerm(card_state=SearchTerm.CARD_STATE_DUE),
|
||||
SearchNode(deck=deck_name),
|
||||
SearchNode(card_state=SearchNode.CARD_STATE_DUE),
|
||||
)
|
||||
)
|
||||
self.form.search_2.setText(
|
||||
self.mw.col.build_search_string(
|
||||
SearchTerm(deck=deck_name),
|
||||
SearchTerm(card_state=SearchTerm.CARD_STATE_NEW),
|
||||
SearchNode(deck=deck_name),
|
||||
SearchNode(card_state=SearchNode.CARD_STATE_NEW),
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ from bs4 import BeautifulSoup
|
|||
import aqt
|
||||
import aqt.sound
|
||||
from anki.cards import Card
|
||||
from anki.collection import SearchTerm
|
||||
from anki.collection import SearchNode
|
||||
from anki.consts import MODEL_CLOZE
|
||||
from anki.hooks import runFilter
|
||||
from anki.httpclient import HttpClient
|
||||
|
@ -546,8 +546,8 @@ class Editor:
|
|||
"Browser",
|
||||
self.mw,
|
||||
search=(
|
||||
SearchTerm(
|
||||
dupe=SearchTerm.Dupe(
|
||||
SearchNode(
|
||||
dupe=SearchNode.Dupe(
|
||||
notetype_id=self.note.model()["id"],
|
||||
first_field=self.note.fields[0],
|
||||
)
|
||||
|
|
|
@ -9,7 +9,7 @@ from concurrent.futures import Future
|
|||
from typing import Iterable, List, Optional, Sequence, TypeVar
|
||||
|
||||
import aqt
|
||||
from anki.collection import SearchTerm
|
||||
from anki.collection import SearchNode
|
||||
from anki.errors import Interrupted
|
||||
from anki.lang import TR
|
||||
from anki.media import CheckMediaOut
|
||||
|
@ -154,7 +154,7 @@ class MediaChecker:
|
|||
|
||||
if out is not None:
|
||||
nid, err = out
|
||||
aqt.dialogs.open("Browser", self.mw, search=(SearchTerm(nid=nid),))
|
||||
aqt.dialogs.open("Browser", self.mw, search=(SearchNode(nid=nid),))
|
||||
showText(err, type="html")
|
||||
else:
|
||||
tooltip(tr(TR.MEDIA_CHECK_ALL_LATEX_RENDERED))
|
||||
|
|
|
@ -8,7 +8,7 @@ from enum import Enum, auto
|
|||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, cast
|
||||
|
||||
import aqt
|
||||
from anki.collection import Config, SearchTerm
|
||||
from anki.collection import Config, SearchNode
|
||||
from anki.decks import DeckTreeNode
|
||||
from anki.errors import DeckRenameError, InvalidInput
|
||||
from anki.tags import TagTreeNode
|
||||
|
@ -391,20 +391,28 @@ class SidebarTreeView(QTreeView):
|
|||
if item.is_expanded(searching):
|
||||
self.setExpanded(idx, True)
|
||||
|
||||
def update_search(self, *terms: Union[str, SearchTerm]) -> None:
|
||||
"""Modify the current search string based on modified keys, then refresh."""
|
||||
def update_search(self, *terms: Union[str, SearchNode]) -> None:
|
||||
"""Modify the current search string based on modifier keys, then refresh."""
|
||||
mods = self.mw.app.keyboardModifiers()
|
||||
previous = SearchNode(parsable_text=self.browser.current_search())
|
||||
current = self.mw.col.group_searches(*terms)
|
||||
|
||||
# if Alt pressed, invert
|
||||
if mods & Qt.AltModifier:
|
||||
current = SearchNode(negated=current)
|
||||
|
||||
try:
|
||||
search = self.col.build_search_string(*terms)
|
||||
mods = self.mw.app.keyboardModifiers()
|
||||
if mods & Qt.AltModifier:
|
||||
search = self.col.build_search_string(search, negate=True)
|
||||
current = self.browser.current_search()
|
||||
if mods & Qt.ControlModifier and mods & Qt.ShiftModifier:
|
||||
search = self.col.replace_search_term(current, search)
|
||||
# If Ctrl+Shift, replace searches nodes of the same type.
|
||||
search = self.col.replace_in_search_node(previous, current)
|
||||
elif mods & Qt.ControlModifier:
|
||||
search = self.col.build_search_string(current, search)
|
||||
# If Ctrl, AND with previous
|
||||
search = self.col.join_searches(previous, current, "AND")
|
||||
elif mods & Qt.ShiftModifier:
|
||||
search = self.col.build_search_string(current, search, match_any=True)
|
||||
# If Shift, OR with previous
|
||||
search = self.col.join_searches(previous, current, "OR")
|
||||
else:
|
||||
search = self.col.build_search_string(current)
|
||||
except InvalidInput as e:
|
||||
show_invalid_search_error(e)
|
||||
else:
|
||||
|
@ -589,8 +597,8 @@ class SidebarTreeView(QTreeView):
|
|||
|
||||
return top
|
||||
|
||||
def _filter_func(self, *terms: Union[str, SearchTerm]) -> Callable:
|
||||
return lambda: self.update_search(self.col.build_search_string(*terms))
|
||||
def _filter_func(self, *terms: Union[str, SearchNode]) -> Callable:
|
||||
return lambda: self.update_search(*terms)
|
||||
|
||||
# Tree: Saved Searches
|
||||
###########################
|
||||
|
@ -640,33 +648,33 @@ class SidebarTreeView(QTreeView):
|
|||
name=TR.BROWSING_SIDEBAR_DUE_TODAY,
|
||||
icon=icon,
|
||||
type=type,
|
||||
on_click=search(SearchTerm(due_on_day=0)),
|
||||
on_click=search(SearchNode(due_on_day=0)),
|
||||
)
|
||||
root.add_simple(
|
||||
name=TR.BROWSING_ADDED_TODAY,
|
||||
icon=icon,
|
||||
type=type,
|
||||
on_click=search(SearchTerm(added_in_days=1)),
|
||||
on_click=search(SearchNode(added_in_days=1)),
|
||||
)
|
||||
root.add_simple(
|
||||
name=TR.BROWSING_EDITED_TODAY,
|
||||
icon=icon,
|
||||
type=type,
|
||||
on_click=search(SearchTerm(edited_in_days=1)),
|
||||
on_click=search(SearchNode(edited_in_days=1)),
|
||||
)
|
||||
root.add_simple(
|
||||
name=TR.BROWSING_STUDIED_TODAY,
|
||||
icon=icon,
|
||||
type=type,
|
||||
on_click=search(SearchTerm(rated=SearchTerm.Rated(days=1))),
|
||||
on_click=search(SearchNode(rated=SearchNode.Rated(days=1))),
|
||||
)
|
||||
root.add_simple(
|
||||
name=TR.BROWSING_AGAIN_TODAY,
|
||||
icon=icon,
|
||||
type=type,
|
||||
on_click=search(
|
||||
SearchTerm(
|
||||
rated=SearchTerm.Rated(days=1, rating=SearchTerm.RATING_AGAIN)
|
||||
SearchNode(
|
||||
rated=SearchNode.Rated(days=1, rating=SearchNode.RATING_AGAIN)
|
||||
)
|
||||
),
|
||||
)
|
||||
|
@ -675,8 +683,8 @@ class SidebarTreeView(QTreeView):
|
|||
icon=icon,
|
||||
type=type,
|
||||
on_click=search(
|
||||
SearchTerm(card_state=SearchTerm.CARD_STATE_DUE),
|
||||
SearchTerm(negated=SearchTerm(due_on_day=0)),
|
||||
SearchNode(card_state=SearchNode.CARD_STATE_DUE),
|
||||
SearchNode(negated=SearchNode(due_on_day=0)),
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -699,32 +707,32 @@ class SidebarTreeView(QTreeView):
|
|||
TR.ACTIONS_NEW,
|
||||
icon=icon.with_color(colors.NEW_COUNT),
|
||||
type=type,
|
||||
on_click=search(SearchTerm(card_state=SearchTerm.CARD_STATE_NEW)),
|
||||
on_click=search(SearchNode(card_state=SearchNode.CARD_STATE_NEW)),
|
||||
)
|
||||
|
||||
root.add_simple(
|
||||
name=TR.SCHEDULING_LEARNING,
|
||||
icon=icon.with_color(colors.LEARN_COUNT),
|
||||
type=type,
|
||||
on_click=search(SearchTerm(card_state=SearchTerm.CARD_STATE_LEARN)),
|
||||
on_click=search(SearchNode(card_state=SearchNode.CARD_STATE_LEARN)),
|
||||
)
|
||||
root.add_simple(
|
||||
name=TR.SCHEDULING_REVIEW,
|
||||
icon=icon.with_color(colors.REVIEW_COUNT),
|
||||
type=type,
|
||||
on_click=search(SearchTerm(card_state=SearchTerm.CARD_STATE_REVIEW)),
|
||||
on_click=search(SearchNode(card_state=SearchNode.CARD_STATE_REVIEW)),
|
||||
)
|
||||
root.add_simple(
|
||||
name=TR.BROWSING_SUSPENDED,
|
||||
icon=icon.with_color(colors.SUSPENDED_FG),
|
||||
type=type,
|
||||
on_click=search(SearchTerm(card_state=SearchTerm.CARD_STATE_SUSPENDED)),
|
||||
on_click=search(SearchNode(card_state=SearchNode.CARD_STATE_SUSPENDED)),
|
||||
)
|
||||
root.add_simple(
|
||||
name=TR.BROWSING_BURIED,
|
||||
icon=icon.with_color(colors.BURIED_FG),
|
||||
type=type,
|
||||
on_click=search(SearchTerm(card_state=SearchTerm.CARD_STATE_BURIED)),
|
||||
on_click=search(SearchNode(card_state=SearchNode.CARD_STATE_BURIED)),
|
||||
)
|
||||
|
||||
# Tree: Flags
|
||||
|
@ -740,38 +748,38 @@ class SidebarTreeView(QTreeView):
|
|||
collapse_key=Config.Bool.COLLAPSE_FLAGS,
|
||||
type=SidebarItemType.FLAG_ROOT,
|
||||
)
|
||||
root.on_click = search(SearchTerm(flag=SearchTerm.FLAG_ANY))
|
||||
root.on_click = search(SearchNode(flag=SearchNode.FLAG_ANY))
|
||||
|
||||
type = SidebarItemType.FLAG
|
||||
root.add_simple(
|
||||
TR.ACTIONS_RED_FLAG,
|
||||
icon=icon.with_color(colors.FLAG1_FG),
|
||||
type=type,
|
||||
on_click=search(SearchTerm(flag=SearchTerm.FLAG_RED)),
|
||||
on_click=search(SearchNode(flag=SearchNode.FLAG_RED)),
|
||||
)
|
||||
root.add_simple(
|
||||
TR.ACTIONS_ORANGE_FLAG,
|
||||
icon=icon.with_color(colors.FLAG2_FG),
|
||||
type=type,
|
||||
on_click=search(SearchTerm(flag=SearchTerm.FLAG_ORANGE)),
|
||||
on_click=search(SearchNode(flag=SearchNode.FLAG_ORANGE)),
|
||||
)
|
||||
root.add_simple(
|
||||
TR.ACTIONS_GREEN_FLAG,
|
||||
icon=icon.with_color(colors.FLAG3_FG),
|
||||
type=type,
|
||||
on_click=search(SearchTerm(flag=SearchTerm.FLAG_GREEN)),
|
||||
on_click=search(SearchNode(flag=SearchNode.FLAG_GREEN)),
|
||||
)
|
||||
root.add_simple(
|
||||
TR.ACTIONS_BLUE_FLAG,
|
||||
icon=icon.with_color(colors.FLAG4_FG),
|
||||
type=type,
|
||||
on_click=search(SearchTerm(flag=SearchTerm.FLAG_BLUE)),
|
||||
on_click=search(SearchNode(flag=SearchNode.FLAG_BLUE)),
|
||||
)
|
||||
root.add_simple(
|
||||
TR.BROWSING_NO_FLAG,
|
||||
icon=icon.with_color(colors.DISABLED),
|
||||
type=type,
|
||||
on_click=search(SearchTerm(flag=SearchTerm.FLAG_NONE)),
|
||||
on_click=search(SearchNode(flag=SearchNode.FLAG_NONE)),
|
||||
)
|
||||
|
||||
# Tree: Tags
|
||||
|
@ -794,7 +802,7 @@ class SidebarTreeView(QTreeView):
|
|||
item = SidebarItem(
|
||||
node.name,
|
||||
icon,
|
||||
self._filter_func(SearchTerm(tag=head + node.name)),
|
||||
self._filter_func(SearchNode(tag=head + node.name)),
|
||||
toggle_expand(),
|
||||
node.expanded,
|
||||
item_type=SidebarItemType.TAG,
|
||||
|
@ -812,12 +820,12 @@ class SidebarTreeView(QTreeView):
|
|||
collapse_key=Config.Bool.COLLAPSE_TAGS,
|
||||
type=SidebarItemType.TAG_ROOT,
|
||||
)
|
||||
root.on_click = self._filter_func(SearchTerm(negated=SearchTerm(tag="none")))
|
||||
root.on_click = self._filter_func(SearchNode(negated=SearchNode(tag="none")))
|
||||
root.add_simple(
|
||||
name=tr(TR.BROWSING_SIDEBAR_UNTAGGED),
|
||||
icon=icon,
|
||||
type=SidebarItemType.TAG_NONE,
|
||||
on_click=self._filter_func(SearchTerm(tag="none")),
|
||||
on_click=self._filter_func(SearchNode(tag="none")),
|
||||
)
|
||||
|
||||
render(root, tree.children)
|
||||
|
@ -840,7 +848,7 @@ class SidebarTreeView(QTreeView):
|
|||
item = SidebarItem(
|
||||
node.name,
|
||||
icon,
|
||||
self._filter_func(SearchTerm(deck=head + node.name)),
|
||||
self._filter_func(SearchNode(deck=head + node.name)),
|
||||
toggle_expand(),
|
||||
not node.collapsed,
|
||||
item_type=SidebarItemType.DECK,
|
||||
|
@ -859,12 +867,12 @@ class SidebarTreeView(QTreeView):
|
|||
collapse_key=Config.Bool.COLLAPSE_DECKS,
|
||||
type=SidebarItemType.DECK_ROOT,
|
||||
)
|
||||
root.on_click = self._filter_func(SearchTerm(deck="*"))
|
||||
root.on_click = self._filter_func(SearchNode(deck="*"))
|
||||
current = root.add_simple(
|
||||
name=tr(TR.BROWSING_CURRENT_DECK),
|
||||
icon=icon,
|
||||
type=SidebarItemType.DECK,
|
||||
on_click=self._filter_func(SearchTerm(deck="current")),
|
||||
on_click=self._filter_func(SearchNode(deck="current")),
|
||||
)
|
||||
current.id = self.mw.col.decks.selected()
|
||||
|
||||
|
@ -887,7 +895,7 @@ class SidebarTreeView(QTreeView):
|
|||
item = SidebarItem(
|
||||
nt["name"],
|
||||
icon,
|
||||
self._filter_func(SearchTerm(note=nt["name"])),
|
||||
self._filter_func(SearchNode(note=nt["name"])),
|
||||
item_type=SidebarItemType.NOTETYPE,
|
||||
id=nt["id"],
|
||||
)
|
||||
|
@ -897,7 +905,7 @@ class SidebarTreeView(QTreeView):
|
|||
tmpl["name"],
|
||||
icon,
|
||||
self._filter_func(
|
||||
SearchTerm(note=nt["name"]), SearchTerm(template=c)
|
||||
SearchNode(note=nt["name"]), SearchNode(template=c)
|
||||
),
|
||||
item_type=SidebarItemType.NOTETYPE_TEMPLATE,
|
||||
full_name=f"{nt['name']}::{tmpl['name']}",
|
||||
|
|
|
@ -90,13 +90,11 @@ service BackendService {
|
|||
|
||||
// searching
|
||||
|
||||
rpc FilterToSearch(SearchTerm) returns (String);
|
||||
rpc NormalizeSearch(String) returns (String);
|
||||
rpc BuildSearchString(SearchNode) returns (String);
|
||||
rpc SearchCards(SearchCardsIn) returns (SearchCardsOut);
|
||||
rpc SearchNotes(SearchNotesIn) returns (SearchNotesOut);
|
||||
rpc NegateSearch(String) returns (String);
|
||||
rpc ConcatenateSearches(ConcatenateSearchesIn) returns (String);
|
||||
rpc ReplaceSearchTerm(ReplaceSearchTermIn) returns (String);
|
||||
rpc JoinSearchNodes(JoinSearchNodesIn) returns (String);
|
||||
rpc ReplaceSearchNode(ReplaceSearchNodeIn) returns (String);
|
||||
rpc FindAndReplace(FindAndReplaceIn) returns (UInt32);
|
||||
|
||||
// scheduling
|
||||
|
@ -773,7 +771,7 @@ message SearchNotesOut {
|
|||
repeated int64 note_ids = 2;
|
||||
}
|
||||
|
||||
message SearchTerm {
|
||||
message SearchNode {
|
||||
message Dupe {
|
||||
int64 notetype_id = 1;
|
||||
string first_field = 2;
|
||||
|
@ -809,10 +807,18 @@ message SearchTerm {
|
|||
message IdList {
|
||||
repeated int64 ids = 1;
|
||||
}
|
||||
message Group {
|
||||
enum Joiner {
|
||||
AND = 0;
|
||||
OR = 1;
|
||||
}
|
||||
repeated SearchNode nodes = 1;
|
||||
Joiner joiner = 2;
|
||||
}
|
||||
oneof filter {
|
||||
string tag = 1;
|
||||
string deck = 2;
|
||||
string note = 3;
|
||||
Group group = 1;
|
||||
SearchNode negated = 2;
|
||||
string parsable_text = 3;
|
||||
uint32 template = 4;
|
||||
int64 nid = 5;
|
||||
Dupe dupe = 6;
|
||||
|
@ -824,23 +830,22 @@ message SearchTerm {
|
|||
CardState card_state = 12;
|
||||
IdList nids = 13;
|
||||
uint32 edited_in_days = 14;
|
||||
SearchTerm negated = 15;
|
||||
string deck = 15;
|
||||
int32 due_on_day = 16;
|
||||
string tag = 17;
|
||||
string note = 18;
|
||||
}
|
||||
}
|
||||
|
||||
message ConcatenateSearchesIn {
|
||||
enum Separator {
|
||||
AND = 0;
|
||||
OR = 1;
|
||||
}
|
||||
Separator sep = 1;
|
||||
repeated string searches = 2;
|
||||
message JoinSearchNodesIn {
|
||||
SearchNode.Group.Joiner joiner = 1;
|
||||
SearchNode existing_node = 2;
|
||||
SearchNode additional_node = 3;
|
||||
}
|
||||
|
||||
message ReplaceSearchTermIn {
|
||||
string search = 1;
|
||||
string replacement = 2;
|
||||
message ReplaceSearchNodeIn {
|
||||
SearchNode existing_node = 1;
|
||||
SearchNode replacement_node = 2;
|
||||
}
|
||||
|
||||
message CloseCollectionIn {
|
||||
|
|
|
@ -6,7 +6,6 @@ use crate::{
|
|||
backend::dbproxy::db_command_bytes,
|
||||
backend_proto as pb,
|
||||
backend_proto::{
|
||||
concatenate_searches_in::Separator as BoolSeparatorProto,
|
||||
sort_order::builtin::Kind as SortKindProto, sort_order::Value as SortOrderProto,
|
||||
AddOrUpdateDeckConfigLegacyIn, BackendResult, Empty, RenderedTemplateReplacement,
|
||||
},
|
||||
|
@ -38,9 +37,8 @@ use crate::{
|
|||
timespan::{answer_button_time, time_span},
|
||||
},
|
||||
search::{
|
||||
concatenate_searches, negate_search, normalize_search, replace_search_term, write_nodes,
|
||||
BoolSeparator, Node, PropertyKind, RatingKind, SearchNode, SortMode, StateKind,
|
||||
TemplateKind,
|
||||
concatenate_searches, parse_search, replace_search_node, write_nodes, BoolSeparator, Node,
|
||||
PropertyKind, RatingKind, SearchNode, SortMode, StateKind, TemplateKind,
|
||||
},
|
||||
stats::studied_today,
|
||||
sync::{
|
||||
|
@ -55,14 +53,15 @@ use crate::{
|
|||
};
|
||||
use fluent::FluentValue;
|
||||
use futures::future::{AbortHandle, AbortRegistration, Abortable};
|
||||
use itertools::Itertools;
|
||||
use log::error;
|
||||
use once_cell::sync::OnceCell;
|
||||
use pb::{sync_status_out, BackendService};
|
||||
use prost::Message;
|
||||
use serde_json::Value as JsonValue;
|
||||
use slog::warn;
|
||||
use std::collections::HashSet;
|
||||
use std::convert::TryFrom;
|
||||
use std::{collections::HashSet, convert::TryInto};
|
||||
use std::{
|
||||
result,
|
||||
sync::{Arc, Mutex},
|
||||
|
@ -267,7 +266,7 @@ impl From<pb::NoteId> for NoteID {
|
|||
}
|
||||
}
|
||||
|
||||
impl pb::search_term::IdList {
|
||||
impl pb::search_node::IdList {
|
||||
fn into_id_string(self) -> String {
|
||||
self.ids
|
||||
.iter()
|
||||
|
@ -295,40 +294,34 @@ impl From<pb::DeckConfigId> for DeckConfID {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<pb::SearchTerm> for Node<'_> {
|
||||
fn from(msg: pb::SearchTerm) -> Self {
|
||||
use pb::search_term::Filter;
|
||||
use pb::search_term::Flag;
|
||||
if let Some(filter) = msg.filter {
|
||||
impl TryFrom<pb::SearchNode> for Node {
|
||||
type Error = AnkiError;
|
||||
|
||||
fn try_from(msg: pb::SearchNode) -> std::result::Result<Self, Self::Error> {
|
||||
use pb::search_node::group::Joiner;
|
||||
use pb::search_node::Filter;
|
||||
use pb::search_node::Flag;
|
||||
Ok(if let Some(filter) = msg.filter {
|
||||
match filter {
|
||||
Filter::Tag(s) => Node::Search(SearchNode::Tag(
|
||||
escape_anki_wildcards(&s).into_owned().into(),
|
||||
)),
|
||||
Filter::Deck(s) => Node::Search(SearchNode::Deck(
|
||||
if s == "*" {
|
||||
s
|
||||
} else {
|
||||
escape_anki_wildcards(&s).into_owned()
|
||||
}
|
||||
.into(),
|
||||
)),
|
||||
Filter::Note(s) => Node::Search(SearchNode::NoteType(
|
||||
escape_anki_wildcards(&s).into_owned().into(),
|
||||
)),
|
||||
Filter::Tag(s) => Node::Search(SearchNode::Tag(escape_anki_wildcards(&s))),
|
||||
Filter::Deck(s) => Node::Search(SearchNode::Deck(if s == "*" {
|
||||
s
|
||||
} else {
|
||||
escape_anki_wildcards(&s)
|
||||
})),
|
||||
Filter::Note(s) => Node::Search(SearchNode::NoteType(escape_anki_wildcards(&s))),
|
||||
Filter::Template(u) => {
|
||||
Node::Search(SearchNode::CardTemplate(TemplateKind::Ordinal(u as u16)))
|
||||
}
|
||||
Filter::Nid(nid) => Node::Search(SearchNode::NoteIDs(nid.to_string().into())),
|
||||
Filter::Nids(nids) => {
|
||||
Node::Search(SearchNode::NoteIDs(nids.into_id_string().into()))
|
||||
}
|
||||
Filter::Nid(nid) => Node::Search(SearchNode::NoteIDs(nid.to_string())),
|
||||
Filter::Nids(nids) => Node::Search(SearchNode::NoteIDs(nids.into_id_string())),
|
||||
Filter::Dupe(dupe) => Node::Search(SearchNode::Duplicates {
|
||||
note_type_id: dupe.notetype_id.into(),
|
||||
text: dupe.first_field.into(),
|
||||
text: dupe.first_field,
|
||||
}),
|
||||
Filter::FieldName(s) => Node::Search(SearchNode::SingleField {
|
||||
field: escape_anki_wildcards(&s).into_owned().into(),
|
||||
text: "*".to_string().into(),
|
||||
field: escape_anki_wildcards(&s),
|
||||
text: "*".to_string(),
|
||||
is_re: false,
|
||||
}),
|
||||
Filter::Rated(rated) => Node::Search(SearchNode::Rated {
|
||||
|
@ -346,7 +339,7 @@ impl From<pb::SearchTerm> for Node<'_> {
|
|||
}),
|
||||
Filter::EditedInDays(u) => Node::Search(SearchNode::EditedInDays(u)),
|
||||
Filter::CardState(state) => Node::Search(SearchNode::State(
|
||||
pb::search_term::CardState::from_i32(state)
|
||||
pb::search_node::CardState::from_i32(state)
|
||||
.unwrap_or_default()
|
||||
.into(),
|
||||
)),
|
||||
|
@ -358,45 +351,74 @@ impl From<pb::SearchTerm> for Node<'_> {
|
|||
Flag::Green => Node::Search(SearchNode::Flag(3)),
|
||||
Flag::Blue => Node::Search(SearchNode::Flag(4)),
|
||||
},
|
||||
Filter::Negated(term) => Node::Not(Box::new((*term).into())),
|
||||
Filter::Negated(term) => Node::try_from(*term)?.negated(),
|
||||
Filter::Group(mut group) => {
|
||||
match group.nodes.len() {
|
||||
0 => return Err(AnkiError::invalid_input("empty group")),
|
||||
// a group of 1 doesn't need to be a group
|
||||
1 => group.nodes.pop().unwrap().try_into()?,
|
||||
// 2+ nodes
|
||||
_ => {
|
||||
let joiner = match group.joiner() {
|
||||
Joiner::And => Node::And,
|
||||
Joiner::Or => Node::Or,
|
||||
};
|
||||
let parsed: Vec<_> = group
|
||||
.nodes
|
||||
.into_iter()
|
||||
.map(TryFrom::try_from)
|
||||
.collect::<Result<_>>()?;
|
||||
let joined = parsed.into_iter().intersperse(joiner).collect();
|
||||
Node::Group(joined)
|
||||
}
|
||||
}
|
||||
}
|
||||
Filter::ParsableText(text) => {
|
||||
let mut nodes = parse_search(&text)?;
|
||||
if nodes.len() == 1 {
|
||||
nodes.pop().unwrap()
|
||||
} else {
|
||||
Node::Group(nodes)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Node::Search(SearchNode::WholeCollection)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BoolSeparatorProto> for BoolSeparator {
|
||||
fn from(sep: BoolSeparatorProto) -> Self {
|
||||
impl From<pb::search_node::group::Joiner> for BoolSeparator {
|
||||
fn from(sep: pb::search_node::group::Joiner) -> Self {
|
||||
match sep {
|
||||
BoolSeparatorProto::And => BoolSeparator::And,
|
||||
BoolSeparatorProto::Or => BoolSeparator::Or,
|
||||
pb::search_node::group::Joiner::And => BoolSeparator::And,
|
||||
pb::search_node::group::Joiner::Or => BoolSeparator::Or,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<pb::search_term::Rating> for RatingKind {
|
||||
fn from(r: pb::search_term::Rating) -> Self {
|
||||
impl From<pb::search_node::Rating> for RatingKind {
|
||||
fn from(r: pb::search_node::Rating) -> Self {
|
||||
match r {
|
||||
pb::search_term::Rating::Again => RatingKind::AnswerButton(1),
|
||||
pb::search_term::Rating::Hard => RatingKind::AnswerButton(2),
|
||||
pb::search_term::Rating::Good => RatingKind::AnswerButton(3),
|
||||
pb::search_term::Rating::Easy => RatingKind::AnswerButton(4),
|
||||
pb::search_term::Rating::Any => RatingKind::AnyAnswerButton,
|
||||
pb::search_term::Rating::ByReschedule => RatingKind::ManualReschedule,
|
||||
pb::search_node::Rating::Again => RatingKind::AnswerButton(1),
|
||||
pb::search_node::Rating::Hard => RatingKind::AnswerButton(2),
|
||||
pb::search_node::Rating::Good => RatingKind::AnswerButton(3),
|
||||
pb::search_node::Rating::Easy => RatingKind::AnswerButton(4),
|
||||
pb::search_node::Rating::Any => RatingKind::AnyAnswerButton,
|
||||
pb::search_node::Rating::ByReschedule => RatingKind::ManualReschedule,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<pb::search_term::CardState> for StateKind {
|
||||
fn from(k: pb::search_term::CardState) -> Self {
|
||||
impl From<pb::search_node::CardState> for StateKind {
|
||||
fn from(k: pb::search_node::CardState) -> Self {
|
||||
match k {
|
||||
pb::search_term::CardState::New => StateKind::New,
|
||||
pb::search_term::CardState::Learn => StateKind::Learning,
|
||||
pb::search_term::CardState::Review => StateKind::Review,
|
||||
pb::search_term::CardState::Due => StateKind::Due,
|
||||
pb::search_term::CardState::Suspended => StateKind::Suspended,
|
||||
pb::search_term::CardState::Buried => StateKind::Buried,
|
||||
pb::search_node::CardState::New => StateKind::New,
|
||||
pb::search_node::CardState::Learn => StateKind::Learning,
|
||||
pb::search_node::CardState::Review => StateKind::Review,
|
||||
pb::search_node::CardState::Due => StateKind::Due,
|
||||
pb::search_node::CardState::Suspended => StateKind::Suspended,
|
||||
pb::search_node::CardState::Buried => StateKind::Buried,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -525,12 +547,8 @@ impl BackendService for Backend {
|
|||
// searching
|
||||
//-----------------------------------------------
|
||||
|
||||
fn filter_to_search(&self, input: pb::SearchTerm) -> Result<pb::String> {
|
||||
Ok(write_nodes(&[input.into()]).into())
|
||||
}
|
||||
|
||||
fn normalize_search(&self, input: pb::String) -> Result<pb::String> {
|
||||
Ok(normalize_search(&input.val)?.into())
|
||||
fn build_search_string(&self, input: pb::SearchNode) -> Result<pb::String> {
|
||||
Ok(write_nodes(&[input.try_into()?]).into())
|
||||
}
|
||||
|
||||
fn search_cards(&self, input: pb::SearchCardsIn) -> Result<pb::SearchCardsOut> {
|
||||
|
@ -552,16 +570,31 @@ impl BackendService for Backend {
|
|||
})
|
||||
}
|
||||
|
||||
fn negate_search(&self, input: pb::String) -> Result<pb::String> {
|
||||
Ok(negate_search(&input.val)?.into())
|
||||
fn join_search_nodes(&self, input: pb::JoinSearchNodesIn) -> Result<pb::String> {
|
||||
let sep = input.joiner().into();
|
||||
let existing_nodes = {
|
||||
let node = input.existing_node.unwrap_or_default().try_into()?;
|
||||
if let Node::Group(nodes) = node {
|
||||
nodes
|
||||
} else {
|
||||
vec![node]
|
||||
}
|
||||
};
|
||||
let additional_node = input.additional_node.unwrap_or_default().try_into()?;
|
||||
Ok(concatenate_searches(sep, existing_nodes, additional_node).into())
|
||||
}
|
||||
|
||||
fn concatenate_searches(&self, input: pb::ConcatenateSearchesIn) -> Result<pb::String> {
|
||||
Ok(concatenate_searches(input.sep().into(), &input.searches)?.into())
|
||||
}
|
||||
|
||||
fn replace_search_term(&self, input: pb::ReplaceSearchTermIn) -> Result<pb::String> {
|
||||
Ok(replace_search_term(&input.search, &input.replacement)?.into())
|
||||
fn replace_search_node(&self, input: pb::ReplaceSearchNodeIn) -> Result<pb::String> {
|
||||
let existing = {
|
||||
let node = input.existing_node.unwrap_or_default().try_into()?;
|
||||
if let Node::Group(nodes) = node {
|
||||
nodes
|
||||
} else {
|
||||
vec![node]
|
||||
}
|
||||
};
|
||||
let replacement = input.replacement_node.unwrap_or_default().try_into()?;
|
||||
Ok(replace_search_node(existing, replacement).into())
|
||||
}
|
||||
|
||||
fn find_and_replace(&self, input: pb::FindAndReplaceIn) -> BackendResult<pb::UInt32> {
|
||||
|
|
|
@ -8,8 +8,7 @@ mod sqlwriter;
|
|||
mod writer;
|
||||
|
||||
pub use cards::SortMode;
|
||||
pub use parser::{Node, PropertyKind, RatingKind, SearchNode, StateKind, TemplateKind};
|
||||
pub use writer::{
|
||||
concatenate_searches, negate_search, normalize_search, replace_search_term, write_nodes,
|
||||
BoolSeparator,
|
||||
pub use parser::{
|
||||
parse as parse_search, Node, PropertyKind, RatingKind, SearchNode, StateKind, TemplateKind,
|
||||
};
|
||||
pub use writer::{concatenate_searches, replace_search_node, write_nodes, BoolSeparator};
|
||||
|
|
|
@ -17,7 +17,6 @@ use nom::{
|
|||
sequence::{preceded, separated_pair},
|
||||
};
|
||||
use regex::{Captures, Regex};
|
||||
use std::borrow::Cow;
|
||||
|
||||
type IResult<'a, O> = std::result::Result<(&'a str, O), nom::Err<ParseError<'a>>>;
|
||||
type ParseResult<'a, O> = std::result::Result<O, nom::Err<ParseError<'a>>>;
|
||||
|
@ -30,53 +29,63 @@ fn parse_error(input: &str) -> nom::Err<ParseError<'_>> {
|
|||
nom::Err::Error(ParseError::Anki(input, FailKind::Other(None)))
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum Node<'a> {
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Node {
|
||||
And,
|
||||
Or,
|
||||
Not(Box<Node<'a>>),
|
||||
Group(Vec<Node<'a>>),
|
||||
Search(SearchNode<'a>),
|
||||
Not(Box<Node>),
|
||||
Group(Vec<Node>),
|
||||
Search(SearchNode),
|
||||
}
|
||||
|
||||
impl Node {
|
||||
pub fn negated(self) -> Node {
|
||||
if let Node::Not(inner) = self {
|
||||
*inner
|
||||
} else {
|
||||
Node::Not(Box::new(self))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum SearchNode<'a> {
|
||||
pub enum SearchNode {
|
||||
// text without a colon
|
||||
UnqualifiedText(Cow<'a, str>),
|
||||
UnqualifiedText(String),
|
||||
// foo:bar, where foo doesn't match a term below
|
||||
SingleField {
|
||||
field: Cow<'a, str>,
|
||||
text: Cow<'a, str>,
|
||||
field: String,
|
||||
text: String,
|
||||
is_re: bool,
|
||||
},
|
||||
AddedInDays(u32),
|
||||
EditedInDays(u32),
|
||||
CardTemplate(TemplateKind<'a>),
|
||||
Deck(Cow<'a, str>),
|
||||
CardTemplate(TemplateKind),
|
||||
Deck(String),
|
||||
DeckID(DeckID),
|
||||
NoteTypeID(NoteTypeID),
|
||||
NoteType(Cow<'a, str>),
|
||||
NoteType(String),
|
||||
Rated {
|
||||
days: u32,
|
||||
ease: RatingKind,
|
||||
},
|
||||
Tag(Cow<'a, str>),
|
||||
Tag(String),
|
||||
Duplicates {
|
||||
note_type_id: NoteTypeID,
|
||||
text: Cow<'a, str>,
|
||||
text: String,
|
||||
},
|
||||
State(StateKind),
|
||||
Flag(u8),
|
||||
NoteIDs(Cow<'a, str>),
|
||||
CardIDs(&'a str),
|
||||
NoteIDs(String),
|
||||
CardIDs(String),
|
||||
Property {
|
||||
operator: String,
|
||||
kind: PropertyKind,
|
||||
},
|
||||
WholeCollection,
|
||||
Regex(Cow<'a, str>),
|
||||
NoCombining(Cow<'a, str>),
|
||||
WordBoundary(Cow<'a, str>),
|
||||
Regex(String),
|
||||
NoCombining(String),
|
||||
WordBoundary(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
|
@ -103,9 +112,9 @@ pub enum StateKind {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum TemplateKind<'a> {
|
||||
pub enum TemplateKind {
|
||||
Ordinal(u16),
|
||||
Name(Cow<'a, str>),
|
||||
Name(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
|
@ -116,7 +125,7 @@ pub enum RatingKind {
|
|||
}
|
||||
|
||||
/// Parse the input string into a list of nodes.
|
||||
pub(super) fn parse(input: &str) -> Result<Vec<Node>> {
|
||||
pub fn parse(input: &str) -> Result<Vec<Node>> {
|
||||
let input = input.trim();
|
||||
if input.is_empty() {
|
||||
return Ok(vec![Node::Search(SearchNode::WholeCollection)]);
|
||||
|
@ -303,7 +312,7 @@ fn search_node_for_text(s: &str) -> ParseResult<SearchNode> {
|
|||
fn search_node_for_text_with_argument<'a>(
|
||||
key: &'a str,
|
||||
val: &'a str,
|
||||
) -> ParseResult<'a, SearchNode<'a>> {
|
||||
) -> ParseResult<'a, SearchNode> {
|
||||
Ok(match key.to_ascii_lowercase().as_str() {
|
||||
"deck" => SearchNode::Deck(unescape(val)?),
|
||||
"note" => SearchNode::NoteType(unescape(val)?),
|
||||
|
@ -319,7 +328,7 @@ fn search_node_for_text_with_argument<'a>(
|
|||
"did" => parse_did(val)?,
|
||||
"mid" => parse_mid(val)?,
|
||||
"nid" => SearchNode::NoteIDs(check_id_list(val, key)?.into()),
|
||||
"cid" => SearchNode::CardIDs(check_id_list(val, key)?),
|
||||
"cid" => SearchNode::CardIDs(check_id_list(val, key)?.into()),
|
||||
"re" => SearchNode::Regex(unescape_quotes(val)),
|
||||
"nc" => SearchNode::NoCombining(unescape(val)?),
|
||||
"w" => SearchNode::WordBoundary(unescape(val)?),
|
||||
|
@ -579,7 +588,7 @@ fn parse_dupe(s: &str) -> ParseResult<SearchNode> {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_single_field<'a>(key: &'a str, val: &'a str) -> ParseResult<'a, SearchNode<'a>> {
|
||||
fn parse_single_field<'a>(key: &'a str, val: &'a str) -> ParseResult<'a, SearchNode> {
|
||||
Ok(if let Some(stripped) = val.strip_prefix("re:") {
|
||||
SearchNode::SingleField {
|
||||
field: unescape(key)?,
|
||||
|
@ -596,25 +605,25 @@ fn parse_single_field<'a>(key: &'a str, val: &'a str) -> ParseResult<'a, SearchN
|
|||
}
|
||||
|
||||
/// For strings without unescaped ", convert \" to "
|
||||
fn unescape_quotes(s: &str) -> Cow<str> {
|
||||
fn unescape_quotes(s: &str) -> String {
|
||||
if s.contains('"') {
|
||||
s.replace(r#"\""#, "\"").into()
|
||||
s.replace(r#"\""#, "\"")
|
||||
} else {
|
||||
s.into()
|
||||
}
|
||||
}
|
||||
|
||||
/// For non-globs like dupe text without any assumption about the content
|
||||
fn unescape_quotes_and_backslashes(s: &str) -> Cow<str> {
|
||||
fn unescape_quotes_and_backslashes(s: &str) -> String {
|
||||
if s.contains('"') || s.contains('\\') {
|
||||
s.replace(r#"\""#, "\"").replace(r"\\", r"\").into()
|
||||
s.replace(r#"\""#, "\"").replace(r"\\", r"\")
|
||||
} else {
|
||||
s.into()
|
||||
}
|
||||
}
|
||||
|
||||
/// Unescape chars with special meaning to the parser.
|
||||
fn unescape(txt: &str) -> ParseResult<Cow<str>> {
|
||||
fn unescape(txt: &str) -> ParseResult<String> {
|
||||
if let Some(seq) = invalid_escape_sequence(txt) {
|
||||
Err(parse_failure(txt, FailKind::UnknownEscape(seq)))
|
||||
} else {
|
||||
|
@ -631,6 +640,7 @@ fn unescape(txt: &str) -> ParseResult<Cow<str>> {
|
|||
r"\-" => "-",
|
||||
_ => unreachable!(),
|
||||
})
|
||||
.into()
|
||||
} else {
|
||||
txt.into()
|
||||
})
|
||||
|
@ -980,4 +990,14 @@ mod test {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn negating() {
|
||||
let node = Node::Search(SearchNode::UnqualifiedText("foo".to_string()));
|
||||
let neg_node = Node::Not(Box::new(Node::Search(SearchNode::UnqualifiedText(
|
||||
"foo".to_string(),
|
||||
))));
|
||||
assert_eq!(node.clone().negated(), neg_node);
|
||||
assert_eq!(node.clone().negated().negated(), node);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -134,7 +134,9 @@ impl SqlWriter<'_> {
|
|||
SearchNode::EditedInDays(days) => self.write_edited(*days)?,
|
||||
SearchNode::CardTemplate(template) => match template {
|
||||
TemplateKind::Ordinal(_) => self.write_template(template)?,
|
||||
TemplateKind::Name(name) => self.write_template(&TemplateKind::Name(norm(name)))?,
|
||||
TemplateKind::Name(name) => {
|
||||
self.write_template(&TemplateKind::Name(norm(name).into()))?
|
||||
}
|
||||
},
|
||||
SearchNode::Deck(deck) => self.write_deck(&norm(deck))?,
|
||||
SearchNode::NoteTypeID(ntid) => {
|
||||
|
@ -532,7 +534,7 @@ impl RequiredTable {
|
|||
}
|
||||
}
|
||||
|
||||
impl Node<'_> {
|
||||
impl Node {
|
||||
fn required_table(&self) -> RequiredTable {
|
||||
match self {
|
||||
Node::And => RequiredTable::CardsOrNotes,
|
||||
|
@ -546,7 +548,7 @@ impl Node<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl SearchNode<'_> {
|
||||
impl SearchNode {
|
||||
fn required_table(&self) -> RequiredTable {
|
||||
match self {
|
||||
SearchNode::AddedInDays(_) => RequiredTable::Cards,
|
||||
|
|
|
@ -3,11 +3,9 @@
|
|||
|
||||
use crate::{
|
||||
decks::DeckID as DeckIDType,
|
||||
err::Result,
|
||||
notetype::NoteTypeID as NoteTypeIDType,
|
||||
search::parser::{parse, Node, PropertyKind, RatingKind, SearchNode, StateKind, TemplateKind},
|
||||
search::parser::{Node, PropertyKind, RatingKind, SearchNode, StateKind, TemplateKind},
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use std::mem;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
|
@ -16,59 +14,33 @@ pub enum BoolSeparator {
|
|||
Or,
|
||||
}
|
||||
|
||||
/// Take an Anki-style search string and convert it into an equivalent
|
||||
/// search string with normalized syntax.
|
||||
pub fn normalize_search(input: &str) -> Result<String> {
|
||||
Ok(write_nodes(&parse(input)?))
|
||||
/// Take an existing search, and AND/OR it with the provided additional search.
|
||||
/// This is required because when the user has "a AND b" in an existing search and
|
||||
/// wants to add "c", we want "a AND b AND c", not "(a AND b) AND C", which is what we'd
|
||||
/// get if we tried to join the existing search string with a new SearchTerm on the
|
||||
/// client side.
|
||||
pub fn concatenate_searches(
|
||||
sep: BoolSeparator,
|
||||
mut existing: Vec<Node>,
|
||||
additional: Node,
|
||||
) -> String {
|
||||
if !existing.is_empty() {
|
||||
existing.push(match sep {
|
||||
BoolSeparator::And => Node::And,
|
||||
BoolSeparator::Or => Node::Or,
|
||||
});
|
||||
}
|
||||
existing.push(additional);
|
||||
write_nodes(&existing)
|
||||
}
|
||||
|
||||
/// Take an Anki-style search string and return the negated counterpart.
|
||||
/// Empty searches (whole collection) remain unchanged.
|
||||
pub fn negate_search(input: &str) -> Result<String> {
|
||||
let mut nodes = parse(input)?;
|
||||
use Node::*;
|
||||
Ok(if nodes.len() == 1 {
|
||||
let node = nodes.remove(0);
|
||||
match node {
|
||||
Not(n) => write_node(&n),
|
||||
Search(SearchNode::WholeCollection) => "".to_string(),
|
||||
Group(_) | Search(_) => write_node(&Not(Box::new(node))),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
} else {
|
||||
write_node(&Not(Box::new(Group(nodes))))
|
||||
})
|
||||
}
|
||||
|
||||
/// Take arbitrary Anki-style search strings and return their concatenation where they
|
||||
/// are separated by the provided boolean operator.
|
||||
/// Empty searches (whole collection) are left out.
|
||||
pub fn concatenate_searches(sep: BoolSeparator, searches: &[String]) -> Result<String> {
|
||||
let bool_node = vec![match sep {
|
||||
BoolSeparator::And => Node::And,
|
||||
BoolSeparator::Or => Node::Or,
|
||||
}];
|
||||
Ok(write_nodes(
|
||||
searches
|
||||
.iter()
|
||||
.map(|s| parse(s))
|
||||
.collect::<Result<Vec<Vec<Node>>>>()?
|
||||
.iter()
|
||||
.filter(|v| v[0] != Node::Search(SearchNode::WholeCollection))
|
||||
.intersperse(&&bool_node)
|
||||
.flat_map(|v| v.iter()),
|
||||
))
|
||||
}
|
||||
|
||||
/// Take two Anki-style search strings. If the second one evaluates to a single search
|
||||
/// node, replace with it all search terms of the same kind in the first search.
|
||||
/// Then return the possibly modified first search.
|
||||
pub fn replace_search_term(search: &str, replacement: &str) -> Result<String> {
|
||||
let mut nodes = parse(search)?;
|
||||
let new = parse(replacement)?;
|
||||
if let [Node::Search(search_node)] = &new[..] {
|
||||
fn update_node_vec<'a>(old_nodes: &mut [Node<'a>], new_node: &SearchNode<'a>) {
|
||||
fn update_node<'a>(old_node: &mut Node<'a>, new_node: &SearchNode<'a>) {
|
||||
/// Given an existing parsed search, if the provided `replacement` is a single search node such
|
||||
/// as a deck:xxx search, replace any instances of that search in `existing` with the new value.
|
||||
/// Then return the possibly modified first search as a string.
|
||||
pub fn replace_search_node(mut existing: Vec<Node>, replacement: Node) -> String {
|
||||
if let Node::Search(search_node) = replacement {
|
||||
fn update_node_vec(old_nodes: &mut [Node], new_node: &SearchNode) {
|
||||
fn update_node(old_node: &mut Node, new_node: &SearchNode) {
|
||||
match old_node {
|
||||
Node::Not(n) => update_node(n, new_node),
|
||||
Node::Group(ns) => update_node_vec(ns, new_node),
|
||||
|
@ -82,16 +54,13 @@ pub fn replace_search_term(search: &str, replacement: &str) -> Result<String> {
|
|||
}
|
||||
old_nodes.iter_mut().for_each(|n| update_node(n, new_node));
|
||||
}
|
||||
update_node_vec(&mut nodes, search_node);
|
||||
update_node_vec(&mut existing, &search_node);
|
||||
}
|
||||
Ok(write_nodes(&nodes))
|
||||
write_nodes(&existing)
|
||||
}
|
||||
|
||||
pub fn write_nodes<'a, I>(nodes: I) -> String
|
||||
where
|
||||
I: IntoIterator<Item = &'a Node<'a>>,
|
||||
{
|
||||
nodes.into_iter().map(|node| write_node(node)).collect()
|
||||
pub fn write_nodes(nodes: &[Node]) -> String {
|
||||
nodes.iter().map(|node| write_node(node)).collect()
|
||||
}
|
||||
|
||||
fn write_node(node: &Node) -> String {
|
||||
|
@ -125,7 +94,7 @@ fn write_search_node(node: &SearchNode) -> String {
|
|||
NoteIDs(s) => format!("\"nid:{}\"", s),
|
||||
CardIDs(s) => format!("\"cid:{}\"", s),
|
||||
Property { operator, kind } => write_property(operator, kind),
|
||||
WholeCollection => "".to_string(),
|
||||
WholeCollection => "\"deck:*\"".to_string(),
|
||||
Regex(s) => quote(&format!("re:{}", s)),
|
||||
NoCombining(s) => quote(&format!("nc:{}", s)),
|
||||
WordBoundary(s) => quote(&format!("w:{}", s)),
|
||||
|
@ -206,6 +175,14 @@ fn write_property(operator: &str, kind: &PropertyKind) -> String {
|
|||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::err::Result;
|
||||
use crate::search::parse_search as parse;
|
||||
|
||||
/// Take an Anki-style search string and convert it into an equivalent
|
||||
/// search string with normalized syntax.
|
||||
fn normalize_search(input: &str) -> Result<String> {
|
||||
Ok(write_nodes(&parse(input)?))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalizing() -> Result<()> {
|
||||
|
@ -224,36 +201,40 @@ mod test {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn negating() -> Result<()> {
|
||||
assert_eq!(r#"-("foo" AND "bar")"#, negate_search("foo bar").unwrap());
|
||||
assert_eq!(r#""foo""#, negate_search("-foo").unwrap());
|
||||
assert_eq!(r#"("foo")"#, negate_search("-(foo)").unwrap());
|
||||
assert_eq!("", negate_search("").unwrap());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn concatenating() -> Result<()> {
|
||||
assert_eq!(
|
||||
concatenate_searches(
|
||||
BoolSeparator::And,
|
||||
vec![Node::Search(SearchNode::UnqualifiedText("foo".to_string()))],
|
||||
Node::Search(SearchNode::UnqualifiedText("bar".to_string()))
|
||||
),
|
||||
r#""foo" AND "bar""#,
|
||||
concatenate_searches(BoolSeparator::And, &["foo".to_string(), "bar".to_string()])
|
||||
.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
r#""foo" OR "bar""#,
|
||||
concatenate_searches(
|
||||
BoolSeparator::Or,
|
||||
&["foo".to_string(), "".to_string(), "bar".to_string()]
|
||||
)
|
||||
.unwrap()
|
||||
vec![Node::Search(SearchNode::UnqualifiedText("foo".to_string()))],
|
||||
Node::Search(SearchNode::UnqualifiedText("bar".to_string()))
|
||||
),
|
||||
r#""foo" OR "bar""#,
|
||||
);
|
||||
assert_eq!(
|
||||
"",
|
||||
concatenate_searches(BoolSeparator::Or, &["".to_string()]).unwrap()
|
||||
concatenate_searches(
|
||||
BoolSeparator::Or,
|
||||
vec![Node::Search(SearchNode::WholeCollection)],
|
||||
Node::Search(SearchNode::UnqualifiedText("bar".to_string()))
|
||||
),
|
||||
r#""deck:*" OR "bar""#,
|
||||
);
|
||||
assert_eq!(
|
||||
concatenate_searches(
|
||||
BoolSeparator::Or,
|
||||
vec![],
|
||||
Node::Search(SearchNode::UnqualifiedText("bar".to_string()))
|
||||
),
|
||||
r#""bar""#,
|
||||
);
|
||||
assert_eq!("", concatenate_searches(BoolSeparator::Or, &[]).unwrap());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -261,24 +242,30 @@ mod test {
|
|||
#[test]
|
||||
fn replacing() -> Result<()> {
|
||||
assert_eq!(
|
||||
replace_search_node(parse("deck:baz bar")?, parse("deck:foo")?.pop().unwrap()),
|
||||
r#""deck:foo" AND "bar""#,
|
||||
replace_search_term("deck:baz bar", "deck:foo").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
replace_search_node(
|
||||
parse("tag:foo Or tag:bar")?,
|
||||
parse("tag:baz")?.pop().unwrap()
|
||||
),
|
||||
r#""tag:baz" OR "tag:baz""#,
|
||||
replace_search_term("tag:foo Or tag:bar", "tag:baz").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
replace_search_node(
|
||||
parse("foo or (-foo tag:baz)")?,
|
||||
parse("bar")?.pop().unwrap()
|
||||
),
|
||||
r#""bar" OR (-"bar" AND "tag:baz")"#,
|
||||
replace_search_term("foo or (-foo tag:baz)", "bar").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
r#""is:due""#,
|
||||
replace_search_term("is:due", "-is:new").unwrap()
|
||||
replace_search_node(parse("is:due")?, parse("-is:new")?.pop().unwrap()),
|
||||
r#""is:due""#
|
||||
);
|
||||
assert_eq!(
|
||||
r#""added:1""#,
|
||||
replace_search_term("added:1", "is:due").unwrap()
|
||||
replace_search_node(parse("added:1")?, parse("is:due")?.pop().unwrap()),
|
||||
r#""added:1""#
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -336,11 +336,11 @@ pub(crate) fn to_text(txt: &str) -> Cow<str> {
|
|||
}
|
||||
|
||||
/// Escape Anki wildcards and the backslash for escaping them: \*_
|
||||
pub(crate) fn escape_anki_wildcards(txt: &str) -> Cow<str> {
|
||||
pub(crate) fn escape_anki_wildcards(txt: &str) -> String {
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(r"[\\*_]").unwrap();
|
||||
}
|
||||
RE.replace_all(&txt, r"\$0")
|
||||
RE.replace_all(&txt, r"\$0").into()
|
||||
}
|
||||
|
||||
/// Compare text with a possible glob, folding case.
|
||||
|
|
Loading…
Reference in a new issue