mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 22:12:21 -04:00
commit
318cc01c73
20 changed files with 636 additions and 163 deletions
|
@ -25,6 +25,7 @@ actions-red-flag = Red Flag
|
||||||
actions-rename = Rename
|
actions-rename = Rename
|
||||||
actions-rename-deck = Rename Deck
|
actions-rename-deck = Rename Deck
|
||||||
actions-rename-tag = Rename Tag
|
actions-rename-tag = Rename Tag
|
||||||
|
actions-remove-tag = Remove Tag
|
||||||
actions-replay-audio = Replay Audio
|
actions-replay-audio = Replay Audio
|
||||||
actions-reposition = Reposition
|
actions-reposition = Reposition
|
||||||
actions-save = Save
|
actions-save = Save
|
||||||
|
|
|
@ -42,7 +42,8 @@ SchedTimingToday = pb.SchedTimingTodayOut
|
||||||
BuiltinSortKind = pb.BuiltinSearchOrder.BuiltinSortKind
|
BuiltinSortKind = pb.BuiltinSearchOrder.BuiltinSortKind
|
||||||
BackendCard = pb.Card
|
BackendCard = pb.Card
|
||||||
BackendNote = pb.Note
|
BackendNote = pb.Note
|
||||||
TagUsnTuple = pb.TagUsnTuple
|
Tag = pb.Tag
|
||||||
|
TagTreeNode = pb.TagTreeNode
|
||||||
NoteType = pb.NoteType
|
NoteType = pb.NoteType
|
||||||
DeckTreeNode = pb.DeckTreeNode
|
DeckTreeNode = pb.DeckTreeNode
|
||||||
StockNoteType = pb.StockNoteType
|
StockNoteType = pb.StockNoteType
|
||||||
|
|
|
@ -16,6 +16,7 @@ import re
|
||||||
from typing import Collection, List, Optional, Sequence, Tuple
|
from typing import Collection, List, Optional, Sequence, Tuple
|
||||||
|
|
||||||
import anki # pylint: disable=unused-import
|
import anki # pylint: disable=unused-import
|
||||||
|
from anki.rsbackend import FilterToSearchIn
|
||||||
from anki.utils import ids2str
|
from anki.utils import ids2str
|
||||||
|
|
||||||
|
|
||||||
|
@ -25,7 +26,7 @@ class TagManager:
|
||||||
|
|
||||||
# all tags
|
# all tags
|
||||||
def all(self) -> List[str]:
|
def all(self) -> List[str]:
|
||||||
return [t.tag for t in self.col.backend.all_tags()]
|
return [t.name for t in self.col.backend.all_tags()]
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
d = dict(self.__dict__)
|
d = dict(self.__dict__)
|
||||||
|
@ -34,7 +35,7 @@ class TagManager:
|
||||||
|
|
||||||
# # List of (tag, usn)
|
# # List of (tag, usn)
|
||||||
def allItems(self) -> List[Tuple[str, int]]:
|
def allItems(self) -> List[Tuple[str, int]]:
|
||||||
return [(t.tag, t.usn) for t in self.col.backend.all_tags()]
|
return [(t.name, t.usn) for t in self.col.backend.all_tags()]
|
||||||
|
|
||||||
# Registering and fetching tags
|
# Registering and fetching tags
|
||||||
#############################################################
|
#############################################################
|
||||||
|
@ -42,34 +43,14 @@ class TagManager:
|
||||||
def register(
|
def register(
|
||||||
self, tags: Collection[str], usn: Optional[int] = None, clear=False
|
self, tags: Collection[str], usn: Optional[int] = None, clear=False
|
||||||
) -> None:
|
) -> None:
|
||||||
if usn is None:
|
print("tags.register() is deprecated and no longer works")
|
||||||
preserve_usn = False
|
|
||||||
usn_ = 0
|
|
||||||
else:
|
|
||||||
usn_ = usn
|
|
||||||
preserve_usn = True
|
|
||||||
|
|
||||||
self.col.backend.register_tags(
|
|
||||||
tags=" ".join(tags), preserve_usn=preserve_usn, usn=usn_, clear_first=clear
|
|
||||||
)
|
|
||||||
|
|
||||||
def registerNotes(self, nids: Optional[List[int]] = None) -> None:
|
def registerNotes(self, nids: Optional[List[int]] = None) -> None:
|
||||||
"Add any missing tags from notes to the tags list."
|
"Clear unused tags and add any missing tags from notes to the tag list."
|
||||||
# when called without an argument, the old list is cleared first.
|
self.clear_unused_tags()
|
||||||
if nids:
|
|
||||||
lim = " where id in " + ids2str(nids)
|
def clear_unused_tags(self):
|
||||||
clear = False
|
self.col.backend.clear_unused_tags()
|
||||||
else:
|
|
||||||
lim = ""
|
|
||||||
clear = True
|
|
||||||
self.register(
|
|
||||||
set(
|
|
||||||
self.split(
|
|
||||||
" ".join(self.col.db.list("select distinct tags from notes" + lim))
|
|
||||||
)
|
|
||||||
),
|
|
||||||
clear=clear,
|
|
||||||
)
|
|
||||||
|
|
||||||
def byDeck(self, did, children=False) -> List[str]:
|
def byDeck(self, did, children=False) -> List[str]:
|
||||||
basequery = "select n.tags from cards c, notes n WHERE c.nid = n.id"
|
basequery = "select n.tags from cards c, notes n WHERE c.nid = n.id"
|
||||||
|
@ -84,6 +65,10 @@ class TagManager:
|
||||||
res = self.col.db.list(query)
|
res = self.col.db.list(query)
|
||||||
return list(set(self.split(" ".join(res))))
|
return list(set(self.split(" ".join(res))))
|
||||||
|
|
||||||
|
def set_collapsed(self, tag: str, collapsed: bool):
|
||||||
|
"Set browser collapse state for tag, registering the tag if missing."
|
||||||
|
self.col.backend.set_tag_collapsed(name=tag, collapsed=collapsed)
|
||||||
|
|
||||||
# Bulk addition/removal from notes
|
# Bulk addition/removal from notes
|
||||||
#############################################################
|
#############################################################
|
||||||
|
|
||||||
|
@ -102,12 +87,12 @@ class TagManager:
|
||||||
|
|
||||||
def rename_tag(self, old: str, new: str) -> int:
|
def rename_tag(self, old: str, new: str) -> int:
|
||||||
"Rename provided tag, returning number of changed notes."
|
"Rename provided tag, returning number of changed notes."
|
||||||
escaped_name = re.sub(r"[*_\\]", r"\\\g<0>", old)
|
search = self.col.backend.filter_to_search(FilterToSearchIn(tag=old))
|
||||||
escaped_name = '"{}"'.format(escaped_name.replace('"', '\\"'))
|
nids = self.col.find_notes(search)
|
||||||
nids = self.col.find_notes("tag:" + escaped_name)
|
|
||||||
if not nids:
|
if not nids:
|
||||||
return 0
|
return 0
|
||||||
return self.col.tags.bulk_update(nids, old, new, False)
|
escaped_name = re.sub(r"[*_\\]", r"\\\g<0>", old)
|
||||||
|
return self.bulk_update(nids, escaped_name, new, False)
|
||||||
|
|
||||||
# legacy routines
|
# legacy routines
|
||||||
|
|
||||||
|
|
|
@ -30,6 +30,7 @@ from anki.rsbackend import (
|
||||||
FilterToSearchIn,
|
FilterToSearchIn,
|
||||||
InvalidInput,
|
InvalidInput,
|
||||||
NamedFilter,
|
NamedFilter,
|
||||||
|
TagTreeNode,
|
||||||
)
|
)
|
||||||
from anki.stats import CardStats
|
from anki.stats import CardStats
|
||||||
from anki.utils import htmlToTextLine, ids2str, isMac, isWin
|
from anki.utils import htmlToTextLine, ids2str, isMac, isWin
|
||||||
|
@ -468,8 +469,12 @@ class SidebarItem:
|
||||||
expanded: bool = False,
|
expanded: bool = False,
|
||||||
item_type: SidebarItemType = SidebarItemType.CUSTOM,
|
item_type: SidebarItemType = SidebarItemType.CUSTOM,
|
||||||
id: int = 0,
|
id: int = 0,
|
||||||
|
full_name: str = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.name = name
|
self.name = name
|
||||||
|
if not full_name:
|
||||||
|
full_name = name
|
||||||
|
self.full_name = full_name
|
||||||
self.icon = icon
|
self.icon = icon
|
||||||
self.item_type = item_type
|
self.item_type = item_type
|
||||||
self.id = id
|
self.id = id
|
||||||
|
@ -1166,15 +1171,31 @@ QTableView {{ gridline-color: {grid} }}
|
||||||
root.addChild(item)
|
root.addChild(item)
|
||||||
|
|
||||||
def _userTagTree(self, root) -> None:
|
def _userTagTree(self, root) -> None:
|
||||||
assert self.col
|
tree = self.col.backend.tag_tree()
|
||||||
for t in self.col.tags.all():
|
|
||||||
item = SidebarItem(
|
def fillGroups(root, nodes: Sequence[TagTreeNode], head=""):
|
||||||
t,
|
for node in nodes:
|
||||||
":/icons/tag.svg",
|
|
||||||
self._tag_filter(t),
|
def toggle_expand():
|
||||||
item_type=SidebarItemType.TAG,
|
full_name = head + node.name # pylint: disable=cell-var-from-loop
|
||||||
)
|
return lambda expanded: self.mw.col.tags.set_collapsed(
|
||||||
root.addChild(item)
|
full_name, not expanded
|
||||||
|
)
|
||||||
|
|
||||||
|
item = SidebarItem(
|
||||||
|
node.name,
|
||||||
|
":/icons/tag.svg",
|
||||||
|
self._tag_filter(head + node.name),
|
||||||
|
toggle_expand(),
|
||||||
|
not node.collapsed,
|
||||||
|
item_type=SidebarItemType.TAG,
|
||||||
|
full_name=head + node.name,
|
||||||
|
)
|
||||||
|
root.addChild(item)
|
||||||
|
newhead = head + node.name + "::"
|
||||||
|
fillGroups(item, node.children, newhead)
|
||||||
|
|
||||||
|
fillGroups(root, tree.children)
|
||||||
|
|
||||||
def _decksTree(self, root) -> None:
|
def _decksTree(self, root) -> None:
|
||||||
tree = self.col.decks.deck_tree()
|
tree = self.col.decks.deck_tree()
|
||||||
|
|
|
@ -75,6 +75,7 @@ class ResetReason(enum.Enum):
|
||||||
EditorBridgeCmd = "editorBridgeCmd"
|
EditorBridgeCmd = "editorBridgeCmd"
|
||||||
BrowserSetDeck = "browserSetDeck"
|
BrowserSetDeck = "browserSetDeck"
|
||||||
BrowserAddTags = "browserAddTags"
|
BrowserAddTags = "browserAddTags"
|
||||||
|
BrowserRemoveTags = "browserRemoveTags"
|
||||||
BrowserSuspend = "browserSuspend"
|
BrowserSuspend = "browserSuspend"
|
||||||
BrowserReposition = "browserReposition"
|
BrowserReposition = "browserReposition"
|
||||||
BrowserReschedule = "browserReschedule"
|
BrowserReschedule = "browserReschedule"
|
||||||
|
|
|
@ -76,7 +76,10 @@ class NewSidebarTreeView(SidebarTreeViewBase):
|
||||||
(tr(TR.ACTIONS_RENAME), self.rename_deck),
|
(tr(TR.ACTIONS_RENAME), self.rename_deck),
|
||||||
(tr(TR.ACTIONS_DELETE), self.delete_deck),
|
(tr(TR.ACTIONS_DELETE), self.delete_deck),
|
||||||
),
|
),
|
||||||
SidebarItemType.TAG: ((tr(TR.ACTIONS_RENAME), self.rename_tag),),
|
SidebarItemType.TAG: (
|
||||||
|
(tr(TR.ACTIONS_RENAME), self.rename_tag),
|
||||||
|
(tr(TR.ACTIONS_DELETE), self.remove_tag),
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
def onContextMenu(self, point: QPoint) -> None:
|
def onContextMenu(self, point: QPoint) -> None:
|
||||||
|
@ -111,16 +114,37 @@ class NewSidebarTreeView(SidebarTreeViewBase):
|
||||||
self.browser.maybeRefreshSidebar()
|
self.browser.maybeRefreshSidebar()
|
||||||
self.mw.deckBrowser.refresh()
|
self.mw.deckBrowser.refresh()
|
||||||
|
|
||||||
|
def remove_tag(self, item: "aqt.browser.SidebarItem") -> None:
|
||||||
|
self.browser.editor.saveNow(lambda: self._remove_tag(item))
|
||||||
|
|
||||||
|
def _remove_tag(self, item: "aqt.browser.SidebarItem") -> None:
|
||||||
|
old_name = item.full_name
|
||||||
|
|
||||||
|
def do_remove():
|
||||||
|
self.mw.col.backend.clear_tag(old_name)
|
||||||
|
self.col.tags.rename_tag(old_name, "")
|
||||||
|
|
||||||
|
def on_done(fut: Future):
|
||||||
|
self.mw.requireReset(reason=ResetReason.BrowserRemoveTags, context=self)
|
||||||
|
self.browser.model.endReset()
|
||||||
|
fut.result()
|
||||||
|
self.browser.maybeRefreshSidebar()
|
||||||
|
|
||||||
|
self.mw.checkpoint(tr(TR.ACTIONS_REMOVE_TAG))
|
||||||
|
self.browser.model.beginReset()
|
||||||
|
self.mw.taskman.run_in_background(do_remove, on_done)
|
||||||
|
|
||||||
def rename_tag(self, item: "aqt.browser.SidebarItem") -> None:
|
def rename_tag(self, item: "aqt.browser.SidebarItem") -> None:
|
||||||
self.browser.editor.saveNow(lambda: self._rename_tag(item))
|
self.browser.editor.saveNow(lambda: self._rename_tag(item))
|
||||||
|
|
||||||
def _rename_tag(self, item: "aqt.browser.SidebarItem") -> None:
|
def _rename_tag(self, item: "aqt.browser.SidebarItem") -> None:
|
||||||
old_name = item.name
|
old_name = item.full_name
|
||||||
new_name = getOnlyText(tr(TR.ACTIONS_NEW_NAME), default=old_name)
|
new_name = getOnlyText(tr(TR.ACTIONS_NEW_NAME), default=old_name)
|
||||||
if new_name == old_name or not new_name:
|
if new_name == old_name or not new_name:
|
||||||
return
|
return
|
||||||
|
|
||||||
def do_rename():
|
def do_rename():
|
||||||
|
self.mw.col.backend.clear_tag(old_name)
|
||||||
return self.col.tags.rename_tag(old_name, new_name)
|
return self.col.tags.rename_tag(old_name, new_name)
|
||||||
|
|
||||||
def on_done(fut: Future):
|
def on_done(fut: Future):
|
||||||
|
@ -132,7 +156,7 @@ class NewSidebarTreeView(SidebarTreeViewBase):
|
||||||
showInfo(tr(TR.BROWSING_TAG_RENAME_WARNING_EMPTY))
|
showInfo(tr(TR.BROWSING_TAG_RENAME_WARNING_EMPTY))
|
||||||
return
|
return
|
||||||
|
|
||||||
self.browser.clearUnusedTags()
|
self.browser.maybeRefreshSidebar()
|
||||||
|
|
||||||
self.mw.checkpoint(tr(TR.ACTIONS_RENAME_TAG))
|
self.mw.checkpoint(tr(TR.ACTIONS_RENAME_TAG))
|
||||||
self.browser.model.beginReset()
|
self.browser.model.beginReset()
|
||||||
|
|
|
@ -206,8 +206,11 @@ service BackendService {
|
||||||
|
|
||||||
// tags
|
// tags
|
||||||
|
|
||||||
rpc RegisterTags(RegisterTagsIn) returns (Bool);
|
rpc ClearUnusedTags(Empty) returns (Empty);
|
||||||
rpc AllTags(Empty) returns (AllTagsOut);
|
rpc AllTags(Empty) returns (AllTagsOut);
|
||||||
|
rpc SetTagCollapsed(SetTagCollapsedIn) returns (Empty);
|
||||||
|
rpc ClearTag(String) returns (Empty);
|
||||||
|
rpc TagTree(Empty) returns (TagTreeNode);
|
||||||
|
|
||||||
// config/preferences
|
// config/preferences
|
||||||
|
|
||||||
|
@ -812,26 +815,32 @@ message AddOrUpdateDeckConfigLegacyIn {
|
||||||
bool preserve_usn_and_mtime = 2;
|
bool preserve_usn_and_mtime = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message RegisterTagsIn {
|
|
||||||
string tags = 1;
|
|
||||||
bool preserve_usn = 2;
|
|
||||||
int32 usn = 3;
|
|
||||||
bool clear_first = 4;
|
|
||||||
}
|
|
||||||
|
|
||||||
message AllTagsOut {
|
message AllTagsOut {
|
||||||
repeated TagUsnTuple tags = 1;
|
repeated Tag tags = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
message TagUsnTuple {
|
message SetTagCollapsedIn {
|
||||||
string tag = 1;
|
string name = 1;
|
||||||
|
bool collapsed = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Tag {
|
||||||
|
string name = 1;
|
||||||
sint32 usn = 2;
|
sint32 usn = 2;
|
||||||
|
bool collapsed = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message GetChangedTagsOut {
|
message GetChangedTagsOut {
|
||||||
repeated string tags = 1;
|
repeated string tags = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message TagTreeNode {
|
||||||
|
string name = 1;
|
||||||
|
repeated TagTreeNode children = 2;
|
||||||
|
uint32 level = 3;
|
||||||
|
bool collapsed = 4;
|
||||||
|
}
|
||||||
|
|
||||||
message SetConfigJsonIn {
|
message SetConfigJsonIn {
|
||||||
string key = 1;
|
string key = 1;
|
||||||
bytes value_json = 2;
|
bytes value_json = 2;
|
||||||
|
|
|
@ -1339,28 +1339,43 @@ impl BackendService for Backend {
|
||||||
//-------------------------------------------------------------------
|
//-------------------------------------------------------------------
|
||||||
|
|
||||||
fn all_tags(&self, _input: Empty) -> BackendResult<pb::AllTagsOut> {
|
fn all_tags(&self, _input: Empty) -> BackendResult<pb::AllTagsOut> {
|
||||||
let tags = self.with_col(|col| col.storage.all_tags())?;
|
let tags: Vec<pb::Tag> = self.with_col(|col| {
|
||||||
let tags: Vec<_> = tags
|
Ok(col
|
||||||
.into_iter()
|
.storage
|
||||||
.map(|(tag, usn)| pb::TagUsnTuple { tag, usn: usn.0 })
|
.all_tags()?
|
||||||
.collect();
|
.into_iter()
|
||||||
|
.map(|t| t.into())
|
||||||
|
.collect())
|
||||||
|
})?;
|
||||||
Ok(pb::AllTagsOut { tags })
|
Ok(pb::AllTagsOut { tags })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn register_tags(&self, input: pb::RegisterTagsIn) -> BackendResult<pb::Bool> {
|
fn set_tag_collapsed(&self, input: pb::SetTagCollapsedIn) -> BackendResult<pb::Empty> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.transact(None, |col| {
|
col.transact(None, |col| {
|
||||||
let usn = if input.preserve_usn {
|
col.set_tag_collapsed(&input.name, input.collapsed)?;
|
||||||
Usn(input.usn)
|
Ok(().into())
|
||||||
} else {
|
|
||||||
col.usn()?
|
|
||||||
};
|
|
||||||
col.register_tags(&input.tags, usn, input.clear_first)
|
|
||||||
.map(|val| pb::Bool { val })
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn clear_unused_tags(&self, _input: pb::Empty) -> BackendResult<pb::Empty> {
|
||||||
|
self.with_col(|col| col.transact(None, |col| col.clear_unused_tags().map(Into::into)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn clear_tag(&self, tag: pb::String) -> BackendResult<pb::Empty> {
|
||||||
|
self.with_col(|col| {
|
||||||
|
col.transact(None, |col| {
|
||||||
|
col.storage.clear_tag(tag.val.as_str())?;
|
||||||
|
Ok(().into())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tag_tree(&self, _input: Empty) -> Result<pb::TagTreeNode> {
|
||||||
|
self.with_col(|col| col.tag_tree())
|
||||||
|
}
|
||||||
|
|
||||||
// config/preferences
|
// config/preferences
|
||||||
//-------------------------------------------------------------------
|
//-------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
|
@ -242,7 +242,7 @@ impl Collection {
|
||||||
let usn = self.usn()?;
|
let usn = self.usn()?;
|
||||||
let stamp = TimestampMillis::now();
|
let stamp = TimestampMillis::now();
|
||||||
|
|
||||||
// will rebuild tag list below
|
let collapsed_tags = self.storage.collapsed_tags()?;
|
||||||
self.storage.clear_tags()?;
|
self.storage.clear_tags()?;
|
||||||
|
|
||||||
let total_notes = self.storage.total_notes()?;
|
let total_notes = self.storage.total_notes()?;
|
||||||
|
@ -294,6 +294,10 @@ impl Collection {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// the note rebuilding process took care of adding tags back, so we just need
|
||||||
|
// to ensure to restore the collapse state
|
||||||
|
self.storage.restore_collapsed_tags(&collapsed_tags)?;
|
||||||
|
|
||||||
// if the collection is empty and the user has deleted all note types, ensure at least
|
// if the collection is empty and the user has deleted all note types, ensure at least
|
||||||
// one note type exists
|
// one note type exists
|
||||||
if self.storage.get_all_notetype_names()?.is_empty() {
|
if self.storage.get_all_notetype_names()?.is_empty() {
|
||||||
|
@ -632,4 +636,23 @@ mod test {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tags() -> Result<()> {
|
||||||
|
let mut col = open_test_collection();
|
||||||
|
let nt = col.get_notetype_by_name("Basic")?.unwrap();
|
||||||
|
let mut note = nt.new_note();
|
||||||
|
note.tags.push("one".into());
|
||||||
|
note.tags.push("two".into());
|
||||||
|
col.add_note(&mut note, DeckID(1))?;
|
||||||
|
|
||||||
|
col.set_tag_collapsed("two", true)?;
|
||||||
|
|
||||||
|
col.check_database(progress_fn)?;
|
||||||
|
|
||||||
|
assert_eq!(col.storage.get_tag("one")?.unwrap().collapsed, false);
|
||||||
|
assert_eq!(col.storage.get_tag("two")?.unwrap().collapsed, true);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -155,7 +155,22 @@ impl Note {
|
||||||
pub(crate) fn replace_tags<T: Replacer>(&mut self, re: &Regex, mut repl: T) -> bool {
|
pub(crate) fn replace_tags<T: Replacer>(&mut self, re: &Regex, mut repl: T) -> bool {
|
||||||
let mut changed = false;
|
let mut changed = false;
|
||||||
for tag in &mut self.tags {
|
for tag in &mut self.tags {
|
||||||
if let Cow::Owned(rep) = re.replace_all(tag, repl.by_ref()) {
|
if let Cow::Owned(rep) = re.replace_all(tag, |caps: ®ex::Captures| {
|
||||||
|
if let Some(expanded) = repl.by_ref().no_expansion() {
|
||||||
|
if expanded.trim().is_empty() {
|
||||||
|
"".to_string()
|
||||||
|
} else {
|
||||||
|
// include "::" if it was matched
|
||||||
|
format!(
|
||||||
|
"{}{}",
|
||||||
|
expanded,
|
||||||
|
caps.get(caps.len() - 1).map_or("", |m| m.as_str())
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tag.to_string()
|
||||||
|
}
|
||||||
|
}) {
|
||||||
*tag = rep;
|
*tag = rep;
|
||||||
changed = true;
|
changed = true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ use crate::{
|
||||||
notetype::NoteTypeID,
|
notetype::NoteTypeID,
|
||||||
storage::ids_to_string,
|
storage::ids_to_string,
|
||||||
text::{
|
text::{
|
||||||
escape_sql, is_glob, matches_glob, normalize_to_nfc, strip_html_preserving_media_filenames,
|
is_glob, matches_glob, normalize_to_nfc, strip_html_preserving_media_filenames,
|
||||||
to_custom_re, to_re, to_sql, to_text, without_combining,
|
to_custom_re, to_re, to_sql, to_text, without_combining,
|
||||||
},
|
},
|
||||||
timestamp::TimestampSecs,
|
timestamp::TimestampSecs,
|
||||||
|
@ -194,19 +194,16 @@ impl SqlWriter<'_> {
|
||||||
write!(self.sql, "false").unwrap();
|
write!(self.sql, "false").unwrap();
|
||||||
} else {
|
} else {
|
||||||
match text {
|
match text {
|
||||||
"none" => write!(self.sql, "n.tags = ''").unwrap(),
|
"none" => {
|
||||||
"*" => write!(self.sql, "true").unwrap(),
|
write!(self.sql, "n.tags = ''").unwrap();
|
||||||
s => {
|
}
|
||||||
if is_glob(s) {
|
"*" => {
|
||||||
write!(self.sql, "n.tags regexp ?").unwrap();
|
write!(self.sql, "true").unwrap();
|
||||||
let re = &to_custom_re(s, r"\S");
|
}
|
||||||
self.args.push(format!("(?i).* {} .*", re));
|
text => {
|
||||||
} else if let Some(tag) = self.col.storage.preferred_tag_case(&to_text(s))? {
|
write!(self.sql, "n.tags regexp ?").unwrap();
|
||||||
write!(self.sql, "n.tags like ? escape '\\'").unwrap();
|
let re = &to_custom_re(text, r"\S");
|
||||||
self.args.push(format!("% {} %", escape_sql(&tag)));
|
self.args.push(format!("(?i).* {}(::| ).*", re));
|
||||||
} else {
|
|
||||||
write!(self.sql, "false").unwrap();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -587,7 +584,6 @@ mod test {
|
||||||
collection::{open_collection, Collection},
|
collection::{open_collection, Collection},
|
||||||
i18n::I18n,
|
i18n::I18n,
|
||||||
log,
|
log,
|
||||||
types::Usn,
|
|
||||||
};
|
};
|
||||||
use std::{fs, path::PathBuf};
|
use std::{fs, path::PathBuf};
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
|
@ -698,26 +694,27 @@ mod test {
|
||||||
// dupes
|
// dupes
|
||||||
assert_eq!(s(ctx, "dupe:123,test"), ("(n.id in ())".into(), vec![]));
|
assert_eq!(s(ctx, "dupe:123,test"), ("(n.id in ())".into(), vec![]));
|
||||||
|
|
||||||
// if registered, searches with canonical
|
// tags
|
||||||
ctx.transact(None, |col| col.register_tag("One", Usn(-1)))
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
s(ctx, r"tag:one"),
|
s(ctx, r"tag:one"),
|
||||||
(
|
(
|
||||||
"(n.tags like ? escape '\\')".into(),
|
"(n.tags regexp ?)".into(),
|
||||||
vec![r"% One %".into()]
|
vec!["(?i).* one(::| ).*".into()]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
s(ctx, r"tag:foo::bar"),
|
||||||
|
(
|
||||||
|
"(n.tags regexp ?)".into(),
|
||||||
|
vec!["(?i).* foo::bar(::| ).*".into()]
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
// unregistered tags without wildcards won't match
|
|
||||||
assert_eq!(s(ctx, "tag:unknown"), ("(false)".into(), vec![]));
|
|
||||||
|
|
||||||
// wildcards force a regexp search
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
s(ctx, r"tag:o*n\*et%w%oth_re\_e"),
|
s(ctx, r"tag:o*n\*et%w%oth_re\_e"),
|
||||||
(
|
(
|
||||||
"(n.tags regexp ?)".into(),
|
"(n.tags regexp ?)".into(),
|
||||||
vec![r"(?i).* o\S*n\*et%w%oth\Sre_e .*".into()]
|
vec![r"(?i).* o\S*n\*et%w%oth\Sre_e(::| ).*".into()]
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
assert_eq!(s(ctx, "tag:none"), ("(n.tags = '')".into(), vec![]));
|
assert_eq!(s(ctx, "tag:none"), ("(n.tags = '')".into(), vec![]));
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
err::Result,
|
err::Result,
|
||||||
notes::{Note, NoteID},
|
notes::{Note, NoteID},
|
||||||
|
@ -156,4 +158,20 @@ impl super::SqliteStorage {
|
||||||
.query_row(NO_PARAMS, |r| r.get(0))
|
.query_row(NO_PARAMS, |r| r.get(0))
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn all_tags_in_notes(&self) -> Result<HashSet<String>> {
|
||||||
|
let mut stmt = self
|
||||||
|
.db
|
||||||
|
.prepare_cached("select tags from notes where tags != ''")?;
|
||||||
|
let mut query = stmt.query(NO_PARAMS)?;
|
||||||
|
let mut seen: HashSet<String> = HashSet::new();
|
||||||
|
while let Some(rows) = query.next()? {
|
||||||
|
for tag in split_tags(rows.get_raw(0).as_str()?) {
|
||||||
|
if !seen.contains(tag) {
|
||||||
|
seen.insert(tag.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(seen)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
INSERT
|
INSERT
|
||||||
OR IGNORE INTO tags (tag, usn)
|
OR REPLACE INTO tags (tag, usn, collapsed)
|
||||||
VALUES (?, ?)
|
VALUES (?, ?, ?)
|
10
rslib/src/storage/tag/alloc_id.sql
Normal file
10
rslib/src/storage/tag/alloc_id.sql
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
SELECT CASE
|
||||||
|
WHEN ?1 IN (
|
||||||
|
SELECT id
|
||||||
|
FROM tags
|
||||||
|
) THEN (
|
||||||
|
SELECT max(id) + 1
|
||||||
|
FROM tags
|
||||||
|
)
|
||||||
|
ELSE ?1
|
||||||
|
END;
|
|
@ -2,24 +2,57 @@
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use super::SqliteStorage;
|
use super::SqliteStorage;
|
||||||
use crate::{err::Result, types::Usn};
|
use crate::{err::Result, tags::Tag, types::Usn};
|
||||||
use rusqlite::{params, NO_PARAMS};
|
|
||||||
|
use rusqlite::{params, Row, NO_PARAMS};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
fn row_to_tag(row: &Row) -> Result<Tag> {
|
||||||
|
Ok(Tag {
|
||||||
|
name: row.get(0)?,
|
||||||
|
usn: row.get(1)?,
|
||||||
|
collapsed: row.get(2)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
impl SqliteStorage {
|
impl SqliteStorage {
|
||||||
pub(crate) fn all_tags(&self) -> Result<Vec<(String, Usn)>> {
|
/// All tags in the collection, in alphabetical order.
|
||||||
|
pub(crate) fn all_tags(&self) -> Result<Vec<Tag>> {
|
||||||
self.db
|
self.db
|
||||||
.prepare_cached("select tag, usn from tags")?
|
.prepare_cached("select tag, usn, collapsed from tags")?
|
||||||
.query_and_then(NO_PARAMS, |row| -> Result<_> {
|
.query_and_then(NO_PARAMS, row_to_tag)?
|
||||||
Ok((row.get(0)?, row.get(1)?))
|
|
||||||
})?
|
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn register_tag(&self, tag: &str, usn: Usn) -> Result<()> {
|
pub(crate) fn collapsed_tags(&self) -> Result<Vec<String>> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached("select tag from tags where collapsed = true")?
|
||||||
|
.query_and_then(NO_PARAMS, |r| r.get::<_, String>(0).map_err(Into::into))?
|
||||||
|
.collect::<Result<Vec<_>>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn restore_collapsed_tags(&self, tags: &[String]) -> Result<()> {
|
||||||
|
let mut stmt = self
|
||||||
|
.db
|
||||||
|
.prepare_cached("update tags set collapsed = true where tag = ?")?;
|
||||||
|
for tag in tags {
|
||||||
|
stmt.execute(&[tag])?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_tag(&self, name: &str) -> Result<Option<Tag>> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached("select tag, usn, collapsed from tags where tag = ?")?
|
||||||
|
.query_and_then(&[name], row_to_tag)?
|
||||||
|
.next()
|
||||||
|
.transpose()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn register_tag(&self, tag: &Tag) -> Result<()> {
|
||||||
self.db
|
self.db
|
||||||
.prepare_cached(include_str!("add.sql"))?
|
.prepare_cached(include_str!("add.sql"))?
|
||||||
.execute(params![tag, usn])?;
|
.execute(params![tag.name, tag.usn, tag.collapsed])?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,6 +65,22 @@ impl SqliteStorage {
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn clear_tag(&self, tag: &str) -> Result<()> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached("delete from tags where tag regexp ?")?
|
||||||
|
.execute(&[format!("(?i)^{}($|::)", regex::escape(tag))])?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn set_tag_collapsed(&self, tag: &str, collapsed: bool) -> Result<()> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached("update tags set collapsed = ? where tag = ?")?
|
||||||
|
.execute(params![collapsed, tag])?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn clear_tags(&self) -> Result<()> {
|
pub(crate) fn clear_tags(&self) -> Result<()> {
|
||||||
self.db.execute("delete from tags", NO_PARAMS)?;
|
self.db.execute("delete from tags", NO_PARAMS)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -75,8 +124,11 @@ impl SqliteStorage {
|
||||||
serde_json::from_str(row.get_raw(0).as_str()?).map_err(Into::into);
|
serde_json::from_str(row.get_raw(0).as_str()?).map_err(Into::into);
|
||||||
tags
|
tags
|
||||||
})?;
|
})?;
|
||||||
|
let mut stmt = self
|
||||||
|
.db
|
||||||
|
.prepare_cached("insert or ignore into tags (tag, usn) values (?, ?)")?;
|
||||||
for (tag, usn) in tags.into_iter() {
|
for (tag, usn) in tags.into_iter() {
|
||||||
self.register_tag(&tag, usn)?;
|
stmt.execute(params![tag, usn])?;
|
||||||
}
|
}
|
||||||
self.db.execute_batch("update col set tags=''")?;
|
self.db.execute_batch("update col set tags=''")?;
|
||||||
|
|
||||||
|
@ -85,11 +137,23 @@ impl SqliteStorage {
|
||||||
|
|
||||||
pub(super) fn downgrade_tags_from_schema14(&self) -> Result<()> {
|
pub(super) fn downgrade_tags_from_schema14(&self) -> Result<()> {
|
||||||
let alltags = self.all_tags()?;
|
let alltags = self.all_tags()?;
|
||||||
let tagsmap: HashMap<String, Usn> = alltags.into_iter().collect();
|
let tagsmap: HashMap<String, Usn> = alltags.into_iter().map(|t| (t.name, t.usn)).collect();
|
||||||
self.db.execute(
|
self.db.execute(
|
||||||
"update col set tags=?",
|
"update col set tags=?",
|
||||||
params![serde_json::to_string(&tagsmap)?],
|
params![serde_json::to_string(&tagsmap)?],
|
||||||
)?;
|
)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(super) fn upgrade_tags_to_schema17(&self) -> Result<()> {
|
||||||
|
let tags = self
|
||||||
|
.db
|
||||||
|
.prepare_cached("select tag, usn from tags")?
|
||||||
|
.query_and_then(NO_PARAMS, |r| Ok(Tag::new(r.get(0)?, r.get(1)?)))?
|
||||||
|
.collect::<Result<Vec<Tag>>>()?;
|
||||||
|
self.db
|
||||||
|
.execute_batch(include_str!["../upgrades/schema17_upgrade.sql"])?;
|
||||||
|
tags.into_iter()
|
||||||
|
.try_for_each(|tag| -> Result<()> { self.register_tag(&tag) })
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ pub(super) const SCHEMA_MIN_VERSION: u8 = 11;
|
||||||
/// The version new files are initially created with.
|
/// The version new files are initially created with.
|
||||||
pub(super) const SCHEMA_STARTING_VERSION: u8 = 11;
|
pub(super) const SCHEMA_STARTING_VERSION: u8 = 11;
|
||||||
/// The maximum schema version we can open.
|
/// The maximum schema version we can open.
|
||||||
pub(super) const SCHEMA_MAX_VERSION: u8 = 16;
|
pub(super) const SCHEMA_MAX_VERSION: u8 = 17;
|
||||||
|
|
||||||
use super::SqliteStorage;
|
use super::SqliteStorage;
|
||||||
use crate::err::Result;
|
use crate::err::Result;
|
||||||
|
@ -31,6 +31,10 @@ impl SqliteStorage {
|
||||||
self.upgrade_deck_conf_to_schema16(server)?;
|
self.upgrade_deck_conf_to_schema16(server)?;
|
||||||
self.db.execute_batch("update col set ver = 16")?;
|
self.db.execute_batch("update col set ver = 16")?;
|
||||||
}
|
}
|
||||||
|
if ver < 17 {
|
||||||
|
self.upgrade_tags_to_schema17()?;
|
||||||
|
self.db.execute_batch("update col set ver = 17")?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
7
rslib/src/storage/upgrades/schema17_upgrade.sql
Normal file
7
rslib/src/storage/upgrades/schema17_upgrade.sql
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
DROP TABLE tags;
|
||||||
|
CREATE TABLE tags (
|
||||||
|
tag text NOT NULL PRIMARY KEY COLLATE unicase,
|
||||||
|
usn integer NOT NULL,
|
||||||
|
collapsed boolean NOT NULL,
|
||||||
|
config blob NULL
|
||||||
|
) without rowid;
|
|
@ -17,7 +17,7 @@ use crate::{
|
||||||
revlog::RevlogEntry,
|
revlog::RevlogEntry,
|
||||||
serde::{default_on_invalid, deserialize_int_from_number},
|
serde::{default_on_invalid, deserialize_int_from_number},
|
||||||
storage::open_and_check_sqlite_file,
|
storage::open_and_check_sqlite_file,
|
||||||
tags::{join_tags, split_tags},
|
tags::{join_tags, split_tags, Tag},
|
||||||
};
|
};
|
||||||
pub use http_client::FullSyncProgressFn;
|
pub use http_client::FullSyncProgressFn;
|
||||||
use http_client::HTTPSyncClient;
|
use http_client::HTTPSyncClient;
|
||||||
|
@ -898,7 +898,7 @@ impl Collection {
|
||||||
|
|
||||||
fn merge_tags(&self, tags: Vec<String>, latest_usn: Usn) -> Result<()> {
|
fn merge_tags(&self, tags: Vec<String>, latest_usn: Usn) -> Result<()> {
|
||||||
for tag in tags {
|
for tag in tags {
|
||||||
self.register_tag(&tag, latest_usn)?;
|
self.register_tag(Tag::new(tag, latest_usn))?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1461,12 +1461,12 @@ mod test {
|
||||||
col1.storage
|
col1.storage
|
||||||
.all_tags()?
|
.all_tags()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|t| t.0)
|
.map(|t| t.name)
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
col2.storage
|
col2.storage
|
||||||
.all_tags()?
|
.all_tags()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|t| t.0)
|
.map(|t| t.name)
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -2,16 +2,55 @@
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
backend_proto::{Tag as TagProto, TagTreeNode},
|
||||||
collection::Collection,
|
collection::Collection,
|
||||||
err::{AnkiError, Result},
|
err::{AnkiError, Result},
|
||||||
notes::{NoteID, TransformNoteOutput},
|
notes::{NoteID, TransformNoteOutput},
|
||||||
text::to_re,
|
text::{normalize_to_nfc, to_re},
|
||||||
{text::normalize_to_nfc, types::Usn},
|
types::Usn,
|
||||||
};
|
};
|
||||||
|
|
||||||
use regex::{NoExpand, Regex, Replacer};
|
use regex::{NoExpand, Regex, Replacer};
|
||||||
use std::{borrow::Cow, collections::HashSet};
|
use std::{borrow::Cow, collections::HashSet, iter::Peekable};
|
||||||
use unicase::UniCase;
|
use unicase::UniCase;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub struct Tag {
|
||||||
|
pub name: String,
|
||||||
|
pub usn: Usn,
|
||||||
|
pub collapsed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Tag> for TagProto {
|
||||||
|
fn from(t: Tag) -> Self {
|
||||||
|
TagProto {
|
||||||
|
name: t.name,
|
||||||
|
usn: t.usn.0,
|
||||||
|
collapsed: t.collapsed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<TagProto> for Tag {
|
||||||
|
fn from(t: TagProto) -> Self {
|
||||||
|
Tag {
|
||||||
|
name: t.name,
|
||||||
|
usn: Usn(t.usn),
|
||||||
|
collapsed: t.collapsed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Tag {
|
||||||
|
pub fn new(name: String, usn: Usn) -> Self {
|
||||||
|
Tag {
|
||||||
|
name,
|
||||||
|
usn,
|
||||||
|
collapsed: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn split_tags(tags: &str) -> impl Iterator<Item = &str> {
|
pub(crate) fn split_tags(tags: &str) -> impl Iterator<Item = &str> {
|
||||||
tags.split(is_tag_separator).filter(|tag| !tag.is_empty())
|
tags.split(is_tag_separator).filter(|tag| !tag.is_empty())
|
||||||
}
|
}
|
||||||
|
@ -32,31 +71,134 @@ fn invalid_char_for_tag(c: char) -> bool {
|
||||||
c.is_ascii_control() || is_tag_separator(c) || c == '"'
|
c.is_ascii_control() || is_tag_separator(c) || c == '"'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn normalized_tag_name_component(comp: &str) -> Cow<str> {
|
||||||
|
let mut out = normalize_to_nfc(comp);
|
||||||
|
if out.contains(invalid_char_for_tag) {
|
||||||
|
out = out.replace(invalid_char_for_tag, "").into();
|
||||||
|
}
|
||||||
|
let trimmed = out.trim();
|
||||||
|
if trimmed.is_empty() {
|
||||||
|
"blank".to_string().into()
|
||||||
|
} else if trimmed.len() != out.len() {
|
||||||
|
trimmed.to_string().into()
|
||||||
|
} else {
|
||||||
|
out
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn normalize_tag_name(name: &str) -> String {
|
||||||
|
let mut out = String::with_capacity(name.len());
|
||||||
|
for comp in name.split("::") {
|
||||||
|
out.push_str(&normalized_tag_name_component(comp));
|
||||||
|
out.push_str("::");
|
||||||
|
}
|
||||||
|
out.trim_end_matches("::").into()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn immediate_parent_name(tag_name: UniCase<&str>) -> Option<UniCase<&str>> {
|
||||||
|
tag_name.rsplitn(2, '\x1f').nth(1).map(UniCase::new)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// For the given tag, check if immediate parent exists. If so, add
|
||||||
|
/// tag and return.
|
||||||
|
/// If the immediate parent is missing, check and add any missing parents.
|
||||||
|
/// This should ensure that if an immediate parent is found, all ancestors
|
||||||
|
/// are guaranteed to already exist.
|
||||||
|
fn add_tag_and_missing_parents<'a, 'b>(
|
||||||
|
all: &'a mut HashSet<UniCase<&'b str>>,
|
||||||
|
missing: &'a mut Vec<UniCase<&'b str>>,
|
||||||
|
tag_name: UniCase<&'b str>,
|
||||||
|
) {
|
||||||
|
if let Some(parent) = immediate_parent_name(tag_name) {
|
||||||
|
if !all.contains(&parent) {
|
||||||
|
missing.push(parent);
|
||||||
|
add_tag_and_missing_parents(all, missing, parent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// finally, add provided tag
|
||||||
|
all.insert(tag_name);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Append any missing parents. Caller must sort afterwards.
|
||||||
|
fn add_missing_parents(tags: &mut Vec<Tag>) {
|
||||||
|
let mut all_names: HashSet<UniCase<&str>> = HashSet::new();
|
||||||
|
let mut missing = vec![];
|
||||||
|
for tag in &*tags {
|
||||||
|
add_tag_and_missing_parents(&mut all_names, &mut missing, UniCase::new(&tag.name))
|
||||||
|
}
|
||||||
|
let mut missing: Vec<_> = missing
|
||||||
|
.into_iter()
|
||||||
|
.map(|n| Tag::new(n.to_string(), Usn(0)))
|
||||||
|
.collect();
|
||||||
|
tags.append(&mut missing);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tags_to_tree(mut tags: Vec<Tag>) -> TagTreeNode {
|
||||||
|
for tag in &mut tags {
|
||||||
|
tag.name = tag.name.replace("::", "\x1f");
|
||||||
|
}
|
||||||
|
add_missing_parents(&mut tags);
|
||||||
|
tags.sort_unstable_by(|a, b| UniCase::new(&a.name).cmp(&UniCase::new(&b.name)));
|
||||||
|
let mut top = TagTreeNode::default();
|
||||||
|
let mut it = tags.into_iter().peekable();
|
||||||
|
add_child_nodes(&mut it, &mut top);
|
||||||
|
|
||||||
|
top
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_child_nodes(tags: &mut Peekable<impl Iterator<Item = Tag>>, parent: &mut TagTreeNode) {
|
||||||
|
while let Some(tag) = tags.peek() {
|
||||||
|
let split_name: Vec<_> = tag.name.split('\x1f').collect();
|
||||||
|
match split_name.len() as u32 {
|
||||||
|
l if l <= parent.level => {
|
||||||
|
// next item is at a higher level
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
l if l == parent.level + 1 => {
|
||||||
|
// next item is an immediate descendent of parent
|
||||||
|
parent.children.push(TagTreeNode {
|
||||||
|
name: (*split_name.last().unwrap()).into(),
|
||||||
|
children: vec![],
|
||||||
|
level: parent.level + 1,
|
||||||
|
collapsed: tag.collapsed,
|
||||||
|
});
|
||||||
|
tags.next();
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
// next item is at a lower level
|
||||||
|
if let Some(last_child) = parent.children.last_mut() {
|
||||||
|
add_child_nodes(tags, last_child)
|
||||||
|
} else {
|
||||||
|
// immediate parent is missing
|
||||||
|
tags.next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
|
pub fn tag_tree(&mut self) -> Result<TagTreeNode> {
|
||||||
|
let tags = self.storage.all_tags()?;
|
||||||
|
let tree = tags_to_tree(tags);
|
||||||
|
|
||||||
|
Ok(tree)
|
||||||
|
}
|
||||||
|
|
||||||
/// Given a list of tags, fix case, ordering and duplicates.
|
/// Given a list of tags, fix case, ordering and duplicates.
|
||||||
/// Returns true if any new tags were added.
|
/// Returns true if any new tags were added.
|
||||||
pub(crate) fn canonify_tags(&self, tags: Vec<String>, usn: Usn) -> Result<(Vec<String>, bool)> {
|
pub(crate) fn canonify_tags(&self, tags: Vec<String>, usn: Usn) -> Result<(Vec<String>, bool)> {
|
||||||
let mut seen = HashSet::new();
|
let mut seen = HashSet::new();
|
||||||
let mut added = false;
|
let mut added = false;
|
||||||
|
|
||||||
let mut tags: Vec<_> = tags
|
let tags: Vec<_> = tags.iter().flat_map(|t| split_tags(t)).collect();
|
||||||
.iter()
|
for tag in tags {
|
||||||
.flat_map(|t| split_tags(t))
|
let t = self.register_tag(Tag::new(tag.to_string(), usn))?;
|
||||||
.map(|s| normalize_to_nfc(&s))
|
if t.0.name.is_empty() {
|
||||||
.collect();
|
|
||||||
|
|
||||||
for tag in &mut tags {
|
|
||||||
if tag.contains(invalid_char_for_tag) {
|
|
||||||
*tag = tag.replace(invalid_char_for_tag, "").into();
|
|
||||||
}
|
|
||||||
if tag.trim().is_empty() {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let tag = self.register_tag(tag, usn)?;
|
added |= t.1;
|
||||||
if matches!(tag, Cow::Borrowed(_)) {
|
seen.insert(UniCase::new(t.0.name));
|
||||||
added = true;
|
|
||||||
}
|
|
||||||
seen.insert(UniCase::new(tag));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// exit early if no non-empty tags
|
// exit early if no non-empty tags
|
||||||
|
@ -67,35 +209,52 @@ impl Collection {
|
||||||
// return the sorted, canonified tags
|
// return the sorted, canonified tags
|
||||||
let mut tags = seen.into_iter().collect::<Vec<_>>();
|
let mut tags = seen.into_iter().collect::<Vec<_>>();
|
||||||
tags.sort_unstable();
|
tags.sort_unstable();
|
||||||
let tags: Vec<_> = tags
|
let tags: Vec<_> = tags.into_iter().map(|s| s.into_inner()).collect();
|
||||||
.into_iter()
|
|
||||||
.map(|s| s.into_inner().to_string())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok((tags, added))
|
Ok((tags, added))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn register_tag<'a>(&self, tag: &'a str, usn: Usn) -> Result<Cow<'a, str>> {
|
/// Register tag if it doesn't exist.
|
||||||
if let Some(preferred) = self.storage.preferred_tag_case(tag)? {
|
/// Returns a tuple of the tag with its name normalized and a boolean indicating if it was added.
|
||||||
Ok(preferred.into())
|
pub(crate) fn register_tag(&self, tag: Tag) -> Result<(Tag, bool)> {
|
||||||
|
let normalized_name = normalize_tag_name(&tag.name);
|
||||||
|
let mut t = Tag {
|
||||||
|
name: normalized_name.clone(),
|
||||||
|
..tag
|
||||||
|
};
|
||||||
|
if normalized_name.is_empty() {
|
||||||
|
return Ok((t, false));
|
||||||
|
}
|
||||||
|
if let Some(preferred) = self.storage.preferred_tag_case(&normalized_name)? {
|
||||||
|
t.name = preferred;
|
||||||
|
Ok((t, false))
|
||||||
} else {
|
} else {
|
||||||
self.storage.register_tag(tag, usn)?;
|
self.storage.register_tag(&t)?;
|
||||||
Ok(tag.into())
|
Ok((t, true))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn register_tags(&self, tags: &str, usn: Usn, clear_first: bool) -> Result<bool> {
|
pub fn clear_unused_tags(&self) -> Result<()> {
|
||||||
let mut changed = false;
|
let collapsed: HashSet<_> = self.storage.collapsed_tags()?.into_iter().collect();
|
||||||
if clear_first {
|
self.storage.clear_tags()?;
|
||||||
self.storage.clear_tags()?;
|
let usn = self.usn()?;
|
||||||
|
for name in self.storage.all_tags_in_notes()? {
|
||||||
|
self.register_tag(Tag {
|
||||||
|
collapsed: collapsed.contains(&name),
|
||||||
|
name,
|
||||||
|
usn,
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
for tag in split_tags(tags) {
|
|
||||||
let tag = self.register_tag(tag, usn)?;
|
Ok(())
|
||||||
if matches!(tag, Cow::Borrowed(_)) {
|
}
|
||||||
changed = true;
|
|
||||||
}
|
pub(crate) fn set_tag_collapsed(&self, name: &str, collapsed: bool) -> Result<()> {
|
||||||
|
if self.storage.get_tag(name)?.is_none() {
|
||||||
|
// tag is missing, register it
|
||||||
|
self.register_tag(Tag::new(name.to_string(), self.usn()?))?;
|
||||||
}
|
}
|
||||||
Ok(changed)
|
self.storage.set_tag_collapsed(name, collapsed)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn replace_tags_for_notes_inner<R: Replacer>(
|
fn replace_tags_for_notes_inner<R: Replacer>(
|
||||||
|
@ -135,11 +294,10 @@ impl Collection {
|
||||||
let tags = split_tags(tags)
|
let tags = split_tags(tags)
|
||||||
.map(|tag| {
|
.map(|tag| {
|
||||||
let tag = if regex { tag.into() } else { to_re(tag) };
|
let tag = if regex { tag.into() } else { to_re(tag) };
|
||||||
Regex::new(&format!("(?i)^{}$", tag))
|
Regex::new(&format!("(?i)^{}(::.*)?", tag))
|
||||||
.map_err(|_| AnkiError::invalid_input("invalid regex"))
|
.map_err(|_| AnkiError::invalid_input("invalid regex"))
|
||||||
})
|
})
|
||||||
.collect::<Result<Vec<Regex>>>()?;
|
.collect::<Result<Vec<Regex>>>()?;
|
||||||
|
|
||||||
if !regex {
|
if !regex {
|
||||||
self.replace_tags_for_notes_inner(nids, &tags, NoExpand(repl))
|
self.replace_tags_for_notes_inner(nids, &tags, NoExpand(repl))
|
||||||
} else {
|
} else {
|
||||||
|
@ -263,6 +421,135 @@ mod test {
|
||||||
let note = col.storage.get_note(note.id)?.unwrap();
|
let note = col.storage.get_note(note.id)?.unwrap();
|
||||||
assert_eq!(¬e.tags, &["cee"]);
|
assert_eq!(¬e.tags, &["cee"]);
|
||||||
|
|
||||||
|
let mut note = col.storage.get_note(note.id)?.unwrap();
|
||||||
|
note.tags = vec![
|
||||||
|
"foo::bar".into(),
|
||||||
|
"foo::bar::foo".into(),
|
||||||
|
"bar::foo".into(),
|
||||||
|
"bar::foo::bar".into(),
|
||||||
|
];
|
||||||
|
col.update_note(&mut note)?;
|
||||||
|
col.replace_tags_for_notes(&[note.id], "bar::foo", "foo::bar", false)?;
|
||||||
|
let note = col.storage.get_note(note.id)?.unwrap();
|
||||||
|
assert_eq!(¬e.tags, &["foo::bar", "foo::bar::bar", "foo::bar::foo",]);
|
||||||
|
|
||||||
|
// tag children are also cleared when clearing their parent
|
||||||
|
col.storage.clear_tags()?;
|
||||||
|
for name in vec!["a", "a::b", "A::b::c"] {
|
||||||
|
col.register_tag(Tag::new(name.to_string(), Usn(0)))?;
|
||||||
|
}
|
||||||
|
col.storage.clear_tag("a")?;
|
||||||
|
assert_eq!(col.storage.all_tags()?, vec![]);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn node(name: &str, level: u32, children: Vec<TagTreeNode>) -> TagTreeNode {
|
||||||
|
TagTreeNode {
|
||||||
|
name: name.into(),
|
||||||
|
level,
|
||||||
|
children,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn leaf(name: &str, level: u32) -> TagTreeNode {
|
||||||
|
node(name, level, vec![])
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tree() -> Result<()> {
|
||||||
|
let mut col = open_test_collection();
|
||||||
|
let nt = col.get_notetype_by_name("Basic")?.unwrap();
|
||||||
|
let mut note = nt.new_note();
|
||||||
|
note.tags.push("foo::bar::a".into());
|
||||||
|
note.tags.push("foo::bar::b".into());
|
||||||
|
col.add_note(&mut note, DeckID(1))?;
|
||||||
|
|
||||||
|
// missing parents are added
|
||||||
|
assert_eq!(
|
||||||
|
col.tag_tree()?,
|
||||||
|
node(
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
vec![node(
|
||||||
|
"foo",
|
||||||
|
1,
|
||||||
|
vec![node("bar", 2, vec![leaf("a", 3), leaf("b", 3)])]
|
||||||
|
)]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// differing case should result in only one parent case being added -
|
||||||
|
// the first one
|
||||||
|
col.storage.clear_tags()?;
|
||||||
|
*(&mut note.tags[0]) = "foo::BAR::a".into();
|
||||||
|
*(&mut note.tags[1]) = "FOO::bar::b".into();
|
||||||
|
col.update_note(&mut note)?;
|
||||||
|
assert_eq!(
|
||||||
|
col.tag_tree()?,
|
||||||
|
node(
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
vec![node(
|
||||||
|
"foo",
|
||||||
|
1,
|
||||||
|
vec![node("BAR", 2, vec![leaf("a", 3), leaf("b", 3)])]
|
||||||
|
)]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// things should work even if the immediate parent is not missing
|
||||||
|
col.storage.clear_tags()?;
|
||||||
|
*(&mut note.tags[0]) = "foo::bar::baz".into();
|
||||||
|
*(&mut note.tags[1]) = "foo::bar::baz::quux".into();
|
||||||
|
col.update_note(&mut note)?;
|
||||||
|
assert_eq!(
|
||||||
|
col.tag_tree()?,
|
||||||
|
node(
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
vec![node(
|
||||||
|
"foo",
|
||||||
|
1,
|
||||||
|
vec![node("bar", 2, vec![node("baz", 3, vec![leaf("quux", 4)])])]
|
||||||
|
)]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// numbers have a smaller ascii number than ':', so a naive sort on
|
||||||
|
// '::' would result in one::two being nested under one1.
|
||||||
|
col.storage.clear_tags()?;
|
||||||
|
*(&mut note.tags[0]) = "one".into();
|
||||||
|
*(&mut note.tags[1]) = "one1".into();
|
||||||
|
note.tags.push("one::two".into());
|
||||||
|
col.update_note(&mut note)?;
|
||||||
|
assert_eq!(
|
||||||
|
col.tag_tree()?,
|
||||||
|
node(
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
vec![node("one", 1, vec![leaf("two", 2)]), leaf("one1", 1)]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn clearing() -> Result<()> {
|
||||||
|
let mut col = open_test_collection();
|
||||||
|
let nt = col.get_notetype_by_name("Basic")?.unwrap();
|
||||||
|
let mut note = nt.new_note();
|
||||||
|
note.tags.push("one".into());
|
||||||
|
note.tags.push("two".into());
|
||||||
|
col.add_note(&mut note, DeckID(1))?;
|
||||||
|
|
||||||
|
col.set_tag_collapsed("two", true)?;
|
||||||
|
col.clear_unused_tags()?;
|
||||||
|
assert_eq!(col.storage.get_tag("one")?.unwrap().collapsed, false);
|
||||||
|
assert_eq!(col.storage.get_tag("two")?.unwrap().collapsed, true);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -323,14 +323,6 @@ pub(crate) fn to_text(txt: &str) -> Cow<str> {
|
||||||
RE.replace_all(&txt, "$1")
|
RE.replace_all(&txt, "$1")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Escape characters special to SQL: \%_
|
|
||||||
pub(crate) fn escape_sql(txt: &str) -> Cow<str> {
|
|
||||||
lazy_static! {
|
|
||||||
static ref RE: Regex = Regex::new(r"[\\%_]").unwrap();
|
|
||||||
}
|
|
||||||
RE.replace_all(&txt, r"\$0")
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Escape Anki wildcards and the backslash for escaping them: \*_
|
/// Escape Anki wildcards and the backslash for escaping them: \*_
|
||||||
pub(crate) fn escape_anki_wildcards(txt: &str) -> Cow<str> {
|
pub(crate) fn escape_anki_wildcards(txt: &str) -> Cow<str> {
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
|
@ -407,7 +399,6 @@ mod test {
|
||||||
assert_eq!(&to_custom_re("f_o*", r"\d"), r"f\do\d*");
|
assert_eq!(&to_custom_re("f_o*", r"\d"), r"f\do\d*");
|
||||||
assert_eq!(&to_sql("%f_o*"), r"\%f_o%");
|
assert_eq!(&to_sql("%f_o*"), r"\%f_o%");
|
||||||
assert_eq!(&to_text(r"\*\_*_"), "*_*_");
|
assert_eq!(&to_text(r"\*\_*_"), "*_*_");
|
||||||
assert_eq!(&escape_sql(r"1\2%3_"), r"1\\2\%3\_");
|
|
||||||
assert!(is_glob(r"\\\\_"));
|
assert!(is_glob(r"\\\\_"));
|
||||||
assert!(!is_glob(r"\\\_"));
|
assert!(!is_glob(r"\\\_"));
|
||||||
assert!(matches_glob("foo*bar123", r"foo\*bar*"));
|
assert!(matches_glob("foo*bar123", r"foo\*bar*"));
|
||||||
|
|
Loading…
Reference in a new issue