mirror of
https://github.com/ankitects/anki.git
synced 2025-09-24 16:56:36 -04:00
add separate decks table, and start on moving deck handling to Rust
The Python tests are passing, but there are still a number of issues to work through, and the table/protobuf schema is not yet finalized.
This commit is contained in:
parent
36531ea96d
commit
f592672fa9
34 changed files with 1483 additions and 288 deletions
|
@ -71,7 +71,7 @@ message BackendInput {
|
||||||
int32 get_changed_notetypes = 56;
|
int32 get_changed_notetypes = 56;
|
||||||
AddOrUpdateNotetypeIn add_or_update_notetype = 57;
|
AddOrUpdateNotetypeIn add_or_update_notetype = 57;
|
||||||
Empty get_all_decks = 58;
|
Empty get_all_decks = 58;
|
||||||
bytes set_all_decks = 59;
|
// bytes set_all_decks = 59;
|
||||||
Empty all_stock_notetypes = 60;
|
Empty all_stock_notetypes = 60;
|
||||||
int64 get_notetype_legacy = 61;
|
int64 get_notetype_legacy = 61;
|
||||||
Empty get_notetype_names = 62;
|
Empty get_notetype_names = 62;
|
||||||
|
@ -83,6 +83,12 @@ message BackendInput {
|
||||||
Note update_note = 68;
|
Note update_note = 68;
|
||||||
int64 get_note = 69;
|
int64 get_note = 69;
|
||||||
Empty get_empty_cards = 70;
|
Empty get_empty_cards = 70;
|
||||||
|
int64 get_deck_legacy = 71;
|
||||||
|
string get_deck_id_by_name = 72;
|
||||||
|
Empty get_deck_names = 73;
|
||||||
|
AddOrUpdateDeckLegacyIn add_or_update_deck_legacy = 74;
|
||||||
|
bool new_deck_legacy = 75;
|
||||||
|
int64 remove_deck = 76;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,7 +140,7 @@ message BackendOutput {
|
||||||
bytes get_changed_notetypes = 56;
|
bytes get_changed_notetypes = 56;
|
||||||
int64 add_or_update_notetype = 57;
|
int64 add_or_update_notetype = 57;
|
||||||
bytes get_all_decks = 58;
|
bytes get_all_decks = 58;
|
||||||
Empty set_all_decks = 59;
|
// Empty set_all_decks = 59;
|
||||||
bytes get_notetype_legacy = 61;
|
bytes get_notetype_legacy = 61;
|
||||||
NoteTypeNames get_notetype_names = 62;
|
NoteTypeNames get_notetype_names = 62;
|
||||||
NoteTypeUseCounts get_notetype_names_and_counts = 63;
|
NoteTypeUseCounts get_notetype_names_and_counts = 63;
|
||||||
|
@ -145,6 +151,12 @@ message BackendOutput {
|
||||||
Empty update_note = 68;
|
Empty update_note = 68;
|
||||||
Note get_note = 69;
|
Note get_note = 69;
|
||||||
EmptyCardsReport get_empty_cards = 70;
|
EmptyCardsReport get_empty_cards = 70;
|
||||||
|
bytes get_deck_legacy = 71;
|
||||||
|
int64 get_deck_id_by_name = 72;
|
||||||
|
DeckNames get_deck_names = 73;
|
||||||
|
int64 add_or_update_deck_legacy = 74;
|
||||||
|
bytes new_deck_legacy = 75;
|
||||||
|
Empty remove_deck = 76;
|
||||||
|
|
||||||
BackendError error = 2047;
|
BackendError error = 2047;
|
||||||
}
|
}
|
||||||
|
@ -166,6 +178,8 @@ message BackendError {
|
||||||
string json_error = 9;
|
string json_error = 9;
|
||||||
string proto_error = 10;
|
string proto_error = 10;
|
||||||
Empty not_found_error = 11;
|
Empty not_found_error = 11;
|
||||||
|
Empty exists = 12;
|
||||||
|
Empty deck_is_filtered = 13;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -614,3 +628,80 @@ message NoteWithEmptyCards {
|
||||||
repeated int64 card_ids = 2;
|
repeated int64 card_ids = 2;
|
||||||
bool will_delete_note = 3;
|
bool will_delete_note = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message Deck {
|
||||||
|
int64 id = 1;
|
||||||
|
string name = 2;
|
||||||
|
uint32 mtime_secs = 3;
|
||||||
|
int32 usn = 4;
|
||||||
|
DeckCommon common = 5;
|
||||||
|
oneof kind {
|
||||||
|
NormalDeck normal = 6;
|
||||||
|
FilteredDeck filtered = 7;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
message DeckCommon {
|
||||||
|
bool collapsed = 1;
|
||||||
|
uint32 last_day_studied = 2;
|
||||||
|
int32 new_studied = 3;
|
||||||
|
int32 review_studied = 4;
|
||||||
|
int32 learning_studied = 5;
|
||||||
|
int32 secs_studied = 6;
|
||||||
|
bytes other = 16;
|
||||||
|
}
|
||||||
|
|
||||||
|
message DeckKind {
|
||||||
|
oneof kind {
|
||||||
|
NormalDeck normal = 1;
|
||||||
|
FilteredDeck filtered = 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
message NormalDeck {
|
||||||
|
int64 config_id = 1;
|
||||||
|
uint32 extend_new = 2;
|
||||||
|
uint32 extend_review = 3;
|
||||||
|
string description = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
message FilteredDeck {
|
||||||
|
bool reschedule = 1;
|
||||||
|
repeated FilteredSearchTerm search_terms = 2;
|
||||||
|
// v1 scheduler only
|
||||||
|
repeated float delays = 3;
|
||||||
|
// v2 scheduler only
|
||||||
|
uint32 preview_delay = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
message FilteredSearchTerm {
|
||||||
|
string search = 1;
|
||||||
|
uint32 limit = 2;
|
||||||
|
FilteredSearchOrder order = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
enum FilteredSearchOrder {
|
||||||
|
FILTERED_SEARCH_ORDER_OLDEST_FIRST = 0;
|
||||||
|
FILTERED_SEARCH_ORDER_RANDOM = 1;
|
||||||
|
FILTERED_SEARCH_ORDER_INTERVALS_ASCENDING = 2;
|
||||||
|
FILTERED_SEARCH_ORDER_INTERVALS_DESCENDING = 3;
|
||||||
|
FILTERED_SEARCH_ORDER_LAPSES = 4;
|
||||||
|
FILTERED_SEARCH_ORDER_ADDED = 5;
|
||||||
|
FILTERED_SEARCH_ORDER_DUE = 6;
|
||||||
|
FILTERED_SEARCH_ORDER_REVERSE_ADDED = 7;
|
||||||
|
FILTERED_SEARCH_ORDER_DUE_PRIORITY = 8;
|
||||||
|
}
|
||||||
|
|
||||||
|
message DeckNames {
|
||||||
|
repeated DeckNameID entries = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message DeckNameID {
|
||||||
|
int64 id = 1;
|
||||||
|
string name = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message AddOrUpdateDeckLegacyIn {
|
||||||
|
bytes deck = 1;
|
||||||
|
bool preserve_usn_and_mtime = 2;
|
||||||
|
}
|
||||||
|
|
|
@ -158,14 +158,10 @@ class _Collection:
|
||||||
self.dty, # no longer used
|
self.dty, # no longer used
|
||||||
self._usn,
|
self._usn,
|
||||||
self.ls,
|
self.ls,
|
||||||
decks,
|
|
||||||
) = self.db.first(
|
) = self.db.first(
|
||||||
"""
|
"""
|
||||||
select crt, mod, scm, dty, usn, ls,
|
select crt, mod, scm, dty, usn, ls from col"""
|
||||||
decks from col"""
|
|
||||||
)
|
)
|
||||||
self.decks.decks = self.backend.get_all_decks()
|
|
||||||
self.decks.changed = False
|
|
||||||
|
|
||||||
def setMod(self) -> None:
|
def setMod(self) -> None:
|
||||||
"""Mark DB modified.
|
"""Mark DB modified.
|
||||||
|
|
|
@ -8,6 +8,7 @@ import unicodedata
|
||||||
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
import anki # pylint: disable=unused-import
|
import anki # pylint: disable=unused-import
|
||||||
|
import anki.backend_pb2 as pb
|
||||||
from anki import hooks
|
from anki import hooks
|
||||||
from anki.consts import *
|
from anki.consts import *
|
||||||
from anki.errors import DeckRenameError
|
from anki.errors import DeckRenameError
|
||||||
|
@ -25,11 +26,13 @@ defaultDeck = {
|
||||||
"conf": 1,
|
"conf": 1,
|
||||||
"usn": 0,
|
"usn": 0,
|
||||||
"desc": "",
|
"desc": "",
|
||||||
"dyn": DECK_STD, # anki uses int/bool interchangably here
|
"dyn": DECK_STD,
|
||||||
"collapsed": False,
|
"collapsed": False,
|
||||||
# added in beta11
|
# added in beta11
|
||||||
"extendNew": 10,
|
"extendNew": 10,
|
||||||
"extendRev": 50,
|
"extendRev": 50,
|
||||||
|
# fixme: if we keep this, mod must be set or handled in serde
|
||||||
|
"mod": 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
defaultDynamicDeck = {
|
defaultDynamicDeck = {
|
||||||
|
@ -49,119 +52,140 @@ defaultDynamicDeck = {
|
||||||
"return": True, # currently unused
|
"return": True, # currently unused
|
||||||
# v2 scheduler
|
# v2 scheduler
|
||||||
"previewDelay": 10,
|
"previewDelay": 10,
|
||||||
|
"mod": 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class DeckManager:
|
class DecksDictProxy:
|
||||||
decks: Dict[str, Any]
|
def __init__(self, col: anki.storage._Collection):
|
||||||
|
self._col = col.weakref()
|
||||||
|
|
||||||
|
def _warn(self):
|
||||||
|
print("add-on should use methods on col.decks, not col.decks.decks dict")
|
||||||
|
|
||||||
|
def __getitem__(self, item):
|
||||||
|
self._warn()
|
||||||
|
return self._col.decks.get(int(item))
|
||||||
|
|
||||||
|
def __setitem__(self, key, val):
|
||||||
|
self._warn()
|
||||||
|
self._col.decks.save(val)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
self._warn()
|
||||||
|
return len(self._col.decks.all_names_and_ids())
|
||||||
|
|
||||||
|
def keys(self):
|
||||||
|
self._warn()
|
||||||
|
return [str(nt.id) for nt in self._col.decks.all_names_and_ids()]
|
||||||
|
|
||||||
|
def values(self):
|
||||||
|
self._warn()
|
||||||
|
return self._col.decks.all()
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
self._warn()
|
||||||
|
return [(str(nt["id"]), nt) for nt in self._col.decks.all()]
|
||||||
|
|
||||||
|
def __contains__(self, item):
|
||||||
|
self._warn()
|
||||||
|
self._col.decks.have(item)
|
||||||
|
|
||||||
|
|
||||||
|
class DeckManager:
|
||||||
# Registry save/load
|
# Registry save/load
|
||||||
#############################################################
|
#############################################################
|
||||||
|
|
||||||
def __init__(self, col: anki.storage._Collection) -> None:
|
def __init__(self, col: anki.storage._Collection) -> None:
|
||||||
self.col = col.weakref()
|
self.col = col.weakref()
|
||||||
self.decks = {}
|
self.decks = DecksDictProxy(col)
|
||||||
|
# do not access this directly!
|
||||||
|
# self._cache: Dict[int, ] = {}
|
||||||
|
# self.decks = {}
|
||||||
self._dconf_cache: Optional[Dict[int, Dict[str, Any]]] = None
|
self._dconf_cache: Optional[Dict[int, Dict[str, Any]]] = None
|
||||||
|
|
||||||
def save(self, g: Optional[Any] = None) -> None:
|
def save(self, g: Dict = None) -> None:
|
||||||
"Can be called with either a deck or a deck configuration."
|
"Can be called with either a deck or a deck configuration."
|
||||||
if g:
|
if not g:
|
||||||
|
print("col.decks.save() should be passed the changed deck")
|
||||||
|
return
|
||||||
|
|
||||||
# deck conf?
|
# deck conf?
|
||||||
if "maxTaken" in g:
|
if "maxTaken" in g:
|
||||||
self.update_config(g)
|
self.update_config(g)
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
g["mod"] = intTime()
|
# g["mod"] = intTime()
|
||||||
g["usn"] = self.col.usn()
|
# g["usn"] = self.col.usn()
|
||||||
self.changed = True
|
self.update(g)
|
||||||
|
|
||||||
def flush(self) -> None:
|
# legacy
|
||||||
if self.changed:
|
def flush(self):
|
||||||
self.col.backend.set_all_decks(self.decks)
|
pass
|
||||||
self.changed = False
|
|
||||||
|
|
||||||
# Deck save/load
|
# Deck save/load
|
||||||
#############################################################
|
#############################################################
|
||||||
|
|
||||||
|
# fixme: if we're stripping chars on add, then we need to do that on lookup as well
|
||||||
|
# and need to make sure \x1f conversion
|
||||||
|
|
||||||
def id(
|
def id(
|
||||||
self, name: str, create: bool = True, type: Optional[Dict[str, Any]] = None
|
self, name: str, create: bool = True, type: Optional[Dict[str, Any]] = None
|
||||||
) -> Optional[int]:
|
) -> Optional[int]:
|
||||||
"Add a deck with NAME. Reuse deck if already exists. Return id as int."
|
"Add a deck with NAME. Reuse deck if already exists. Return id as int."
|
||||||
if type is None:
|
if type is None:
|
||||||
type = defaultDeck
|
type = defaultDeck
|
||||||
name = name.replace('"', "")
|
|
||||||
name = unicodedata.normalize("NFC", name)
|
|
||||||
deck = self.byName(name)
|
|
||||||
if deck:
|
|
||||||
return int(deck["id"])
|
|
||||||
if not create:
|
|
||||||
return None
|
|
||||||
g = copy.deepcopy(type)
|
|
||||||
if "::" in name:
|
|
||||||
# not top level; ensure all parents exist
|
|
||||||
name = self._ensureParents(name)
|
|
||||||
g["name"] = name
|
|
||||||
while 1:
|
|
||||||
id = intTime(1000)
|
|
||||||
if str(id) not in self.decks:
|
|
||||||
break
|
|
||||||
g["id"] = id
|
|
||||||
self.decks[str(id)] = g
|
|
||||||
self.save(g)
|
|
||||||
self.maybeAddToActive()
|
|
||||||
hooks.deck_added(g)
|
|
||||||
return int(id)
|
|
||||||
|
|
||||||
def rem(self, did: int, cardsToo: bool = False, childrenToo: bool = True) -> None:
|
id = self.id_for_name(name)
|
||||||
"Remove the deck. If cardsToo, delete any cards inside."
|
if id:
|
||||||
if str(did) == "1":
|
return id
|
||||||
# we won't allow the default deck to be deleted, but if it's a
|
elif not create:
|
||||||
# child of an existing deck then it needs to be renamed
|
return None
|
||||||
deck = self.get(did)
|
|
||||||
if "::" in deck["name"]:
|
deck = self.new_deck_legacy(bool(type["dyn"]))
|
||||||
base = self.basename(deck["name"])
|
|
||||||
suffix = ""
|
|
||||||
while True:
|
|
||||||
# find an unused name
|
|
||||||
name = base + suffix
|
|
||||||
if not self.byName(name):
|
|
||||||
deck["name"] = name
|
deck["name"] = name
|
||||||
self.save(deck)
|
self.update(deck)
|
||||||
break
|
|
||||||
suffix += "1"
|
# fixme
|
||||||
return
|
self.maybeAddToActive()
|
||||||
# log the removal regardless of whether we have the deck or not
|
|
||||||
self.col._logRem([did], REM_DECK)
|
# fixme
|
||||||
# do nothing else if doesn't exist
|
hooks.deck_added(deck)
|
||||||
if not str(did) in self.decks:
|
|
||||||
return
|
return deck["id"]
|
||||||
deck = self.get(did)
|
|
||||||
if deck["dyn"]:
|
def rem(self, did: int, cardsToo: bool = True, childrenToo: bool = True) -> None:
|
||||||
# deleting a cramming deck returns cards to their previous deck
|
"Remove the deck. If cardsToo, delete any cards inside."
|
||||||
# rather than deleting the cards
|
assert cardsToo and childrenToo
|
||||||
self.col.sched.emptyDyn(did)
|
self.col.backend.remove_deck(did)
|
||||||
if childrenToo:
|
# fixme: default deck special case
|
||||||
for name, id in self.children(did):
|
# if str(did) == "1":
|
||||||
self.rem(id, cardsToo, childrenToo=False)
|
# # we won't allow the default deck to be deleted, but if it's a
|
||||||
else:
|
# # child of an existing deck then it needs to be renamed
|
||||||
# delete children first
|
# deck = self.get(did)
|
||||||
if childrenToo:
|
# if "::" in deck["name"]:
|
||||||
# we don't want to delete children when syncing
|
# base = self.basename(deck["name"])
|
||||||
for name, id in self.children(did):
|
# suffix = ""
|
||||||
self.rem(id, cardsToo, childrenToo=False)
|
# while True:
|
||||||
# delete cards too?
|
# # find an unused name
|
||||||
if cardsToo:
|
# name = base + suffix
|
||||||
# don't use cids(), as we want cards in cram decks too
|
# if not self.byName(name):
|
||||||
cids = self.col.db.list(
|
# deck["name"] = name
|
||||||
"select id from cards where did=? or odid=?", did, did
|
# self.save(deck)
|
||||||
)
|
# break
|
||||||
self.col.remCards(cids)
|
# suffix += "1"
|
||||||
# delete the deck and add a grave
|
# return
|
||||||
del self.decks[str(did)]
|
|
||||||
|
# fixme:
|
||||||
|
# # don't use cids(), as we want cards in cram decks too
|
||||||
|
# cids = self.col.db.list(
|
||||||
|
# "select id from cards where did=? or odid=?", did, did
|
||||||
|
# )
|
||||||
|
|
||||||
|
# fixme
|
||||||
# ensure we have an active deck
|
# ensure we have an active deck
|
||||||
if did in self.active():
|
if did in self.active():
|
||||||
self.select(int(list(self.decks.keys())[0]))
|
self.select(self.all_names_and_ids()[0].id)
|
||||||
self.save()
|
|
||||||
|
|
||||||
def allNames(self, dyn: bool = True, force_default: bool = True) -> List:
|
def allNames(self, dyn: bool = True, force_default: bool = True) -> List:
|
||||||
"An unsorted list of all deck names."
|
"An unsorted list of all deck names."
|
||||||
|
@ -172,6 +196,29 @@ class DeckManager:
|
||||||
x["name"] for x in self.all(force_default=force_default) if not x["dyn"]
|
x["name"] for x in self.all(force_default=force_default) if not x["dyn"]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def all_names_and_ids(self) -> List[pb.DeckNameID]:
|
||||||
|
return self.col.backend.get_deck_names_and_ids()
|
||||||
|
|
||||||
|
def id_for_name(self, name: str) -> Optional[int]:
|
||||||
|
return self.col.backend.get_deck_id_by_name(name)
|
||||||
|
|
||||||
|
def get_legacy(self, did: int) -> Optional[Dict]:
|
||||||
|
return self.col.backend.get_deck_legacy(did)
|
||||||
|
|
||||||
|
def have(self, id: int) -> bool:
|
||||||
|
return not self.get_legacy(int(id))
|
||||||
|
|
||||||
|
def get_all_legacy(self) -> List[Dict]:
|
||||||
|
return list(self.col.backend.get_all_decks().values())
|
||||||
|
|
||||||
|
def new_deck_legacy(self, filtered: bool) -> Dict:
|
||||||
|
try:
|
||||||
|
return self.col.backend.new_deck_legacy(filtered)
|
||||||
|
except anki.rsbackend.DeckIsFilteredError:
|
||||||
|
raise DeckRenameError("deck was filtered")
|
||||||
|
except anki.rsbackend.ExistsError:
|
||||||
|
raise DeckRenameError("deck already exists")
|
||||||
|
|
||||||
def all(self, force_default: bool = True) -> List:
|
def all(self, force_default: bool = True) -> List:
|
||||||
"""A list of all decks.
|
"""A list of all decks.
|
||||||
|
|
||||||
|
@ -181,19 +228,20 @@ class DeckManager:
|
||||||
* default deck contains a card
|
* default deck contains a card
|
||||||
* default deck has a child (assumed not to be the case if assume_no_child)
|
* default deck has a child (assumed not to be the case if assume_no_child)
|
||||||
"""
|
"""
|
||||||
decks = list(self.decks.values())
|
decks = self.get_all_legacy()
|
||||||
if not force_default and not self.should_default_be_displayed(force_default):
|
if not force_default and not self.should_default_be_displayed(force_default):
|
||||||
decks = [deck for deck in decks if deck["id"] != 1]
|
decks = [deck for deck in decks if deck["id"] != 1]
|
||||||
return decks
|
return decks
|
||||||
|
|
||||||
def allIds(self) -> List[str]:
|
def allIds(self) -> List[str]:
|
||||||
return list(self.decks.keys())
|
return [str(x.id) for x in self.all_names_and_ids()]
|
||||||
|
|
||||||
def collapse(self, did) -> None:
|
def collapse(self, did) -> None:
|
||||||
deck = self.get(did)
|
deck = self.get(did)
|
||||||
deck["collapsed"] = not deck["collapsed"]
|
deck["collapsed"] = not deck["collapsed"]
|
||||||
self.save(deck)
|
self.save(deck)
|
||||||
|
|
||||||
|
# fixme
|
||||||
def collapseBrowser(self, did) -> None:
|
def collapseBrowser(self, did) -> None:
|
||||||
deck = self.get(did)
|
deck = self.get(did)
|
||||||
collapsed = deck.get("browserCollapsed", False)
|
collapsed = deck.get("browserCollapsed", False)
|
||||||
|
@ -201,51 +249,54 @@ class DeckManager:
|
||||||
self.save(deck)
|
self.save(deck)
|
||||||
|
|
||||||
def count(self) -> int:
|
def count(self) -> int:
|
||||||
return len(self.decks)
|
return len(self.all_names_and_ids())
|
||||||
|
|
||||||
def get(self, did: Union[int, str], default: bool = True) -> Any:
|
def get(self, did: Union[int, str], default: bool = True) -> Optional[Dict]:
|
||||||
id = str(did)
|
if not did:
|
||||||
if id in self.decks:
|
if default:
|
||||||
return self.decks[id]
|
return self.get_legacy(1)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
id = int(did)
|
||||||
|
deck = self.get_legacy(id)
|
||||||
|
if deck:
|
||||||
|
return deck
|
||||||
elif default:
|
elif default:
|
||||||
return self.decks["1"]
|
return self.get_legacy(1)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
def byName(self, name: str) -> Any:
|
def byName(self, name: str) -> Optional[Dict]:
|
||||||
"""Get deck with NAME, ignoring case."""
|
"""Get deck with NAME, ignoring case."""
|
||||||
for m in list(self.decks.values()):
|
id = self.id_for_name(name)
|
||||||
if self.equalName(m["name"], name):
|
if id:
|
||||||
return m
|
return self.get_legacy(id)
|
||||||
|
return None
|
||||||
|
|
||||||
def update(self, g: Dict[str, Any]) -> None:
|
def update(self, g: Dict[str, Any], preserve_usn=False) -> None:
|
||||||
"Add or update an existing deck. Used for syncing and merging."
|
"Add or update an existing deck. Used for syncing and merging."
|
||||||
self.decks[str(g["id"])] = g
|
try:
|
||||||
|
self.col.backend.add_or_update_deck_legacy(g, preserve_usn)
|
||||||
|
except anki.rsbackend.DeckIsFilteredError:
|
||||||
|
raise DeckRenameError("deck was filtered")
|
||||||
|
except anki.rsbackend.ExistsError:
|
||||||
|
raise DeckRenameError("deck already exists")
|
||||||
|
|
||||||
|
# self.decks[str(g["id"])] = g
|
||||||
self.maybeAddToActive()
|
self.maybeAddToActive()
|
||||||
# mark registry changed, but don't bump mod time
|
# mark registry changed, but don't bump mod time
|
||||||
self.save()
|
|
||||||
|
# self.save()
|
||||||
|
|
||||||
def rename(self, g: Dict[str, Any], newName: str) -> None:
|
def rename(self, g: Dict[str, Any], newName: str) -> None:
|
||||||
"Rename deck prefix to NAME if not exists. Updates children."
|
"Rename deck prefix to NAME if not exists. Updates children."
|
||||||
# make sure target node doesn't already exist
|
|
||||||
if self.byName(newName):
|
|
||||||
raise DeckRenameError(_("That deck already exists."))
|
|
||||||
# make sure we're not nesting under a filtered deck
|
|
||||||
for p in self.parentsByName(newName):
|
|
||||||
if p["dyn"]:
|
|
||||||
raise DeckRenameError(_("A filtered deck cannot have subdecks."))
|
|
||||||
# ensure we have parents
|
|
||||||
newName = self._ensureParents(newName)
|
|
||||||
# rename children
|
|
||||||
for grp in self.all():
|
|
||||||
if grp["name"].startswith(g["name"] + "::"):
|
|
||||||
grp["name"] = grp["name"].replace(g["name"] + "::", newName + "::", 1)
|
|
||||||
self.save(grp)
|
|
||||||
# adjust name
|
|
||||||
g["name"] = newName
|
g["name"] = newName
|
||||||
# ensure we have parents again, as we may have renamed parent->child
|
self.update(g)
|
||||||
newName = self._ensureParents(newName)
|
return
|
||||||
self.save(g)
|
|
||||||
# renaming may have altered active did order
|
# fixme: ensure rename of b in a::b::c generates new b
|
||||||
self.maybeAddToActive()
|
# fixme: renaming may have altered active did order
|
||||||
|
# self.maybeAddToActive()
|
||||||
|
|
||||||
def renameForDragAndDrop(self, draggedDeckDid: int, ontoDeckDid: Any) -> None:
|
def renameForDragAndDrop(self, draggedDeckDid: int, ontoDeckDid: Any) -> None:
|
||||||
draggedDeck = self.get(draggedDeckDid)
|
draggedDeck = self.get(draggedDeckDid)
|
||||||
|
@ -389,6 +440,7 @@ class DeckManager:
|
||||||
grp["conf"] = id
|
grp["conf"] = id
|
||||||
self.save(grp)
|
self.save(grp)
|
||||||
|
|
||||||
|
# fixme: expensive
|
||||||
def didsForConf(self, conf) -> List:
|
def didsForConf(self, conf) -> List:
|
||||||
dids = []
|
dids = []
|
||||||
for deck in list(self.decks.values()):
|
for deck in list(self.decks.values()):
|
||||||
|
@ -459,16 +511,18 @@ class DeckManager:
|
||||||
def for_card_ids(self, cids: List[int]) -> List[int]:
|
def for_card_ids(self, cids: List[int]) -> List[int]:
|
||||||
return self.col.db.list(f"select did from cards where id in {ids2str(cids)}")
|
return self.col.db.list(f"select did from cards where id in {ids2str(cids)}")
|
||||||
|
|
||||||
|
# fixme
|
||||||
def _recoverOrphans(self) -> None:
|
def _recoverOrphans(self) -> None:
|
||||||
dids = list(self.decks.keys())
|
pass
|
||||||
mod = self.col.db.mod
|
# dids = list(self.decks.keys())
|
||||||
self.col.db.execute(
|
# mod = self.col.db.mod
|
||||||
"update cards set did = 1 where did not in " + ids2str(dids)
|
# self.col.db.execute(
|
||||||
)
|
# "update cards set did = 1 where did not in " + ids2str(dids)
|
||||||
self.col.db.mod = mod
|
# )
|
||||||
|
# self.col.db.mod = mod
|
||||||
|
|
||||||
def _checkDeckTree(self) -> None:
|
def _checkDeckTree(self) -> None:
|
||||||
decks = self.col.decks.all()
|
decks = self.all()
|
||||||
decks.sort(key=self.key)
|
decks.sort(key=self.key)
|
||||||
names: Set[str] = set()
|
names: Set[str] = set()
|
||||||
|
|
||||||
|
@ -522,18 +576,20 @@ class DeckManager:
|
||||||
return True
|
return True
|
||||||
if self.col.db.scalar("select 1 from cards where did = 1 limit 1"):
|
if self.col.db.scalar("select 1 from cards where did = 1 limit 1"):
|
||||||
return True
|
return True
|
||||||
if len(self.decks) == 1:
|
# fixme
|
||||||
return True
|
|
||||||
# looking for children
|
|
||||||
if assume_no_child:
|
|
||||||
return False
|
|
||||||
if default_deck is None:
|
|
||||||
default_deck = self.get(1)
|
|
||||||
defaultName = default_deck["name"]
|
|
||||||
for name in self.allNames():
|
|
||||||
if name.startswith(f"{defaultName}::"):
|
|
||||||
return True
|
|
||||||
return False
|
return False
|
||||||
|
# if len(self.all_names_and_ids()) == 1:
|
||||||
|
# return True
|
||||||
|
# # looking for children
|
||||||
|
# if assume_no_child:
|
||||||
|
# return False
|
||||||
|
# if default_deck is None:
|
||||||
|
# default_deck = self.get(1)
|
||||||
|
# defaultName = default_deck["name"]
|
||||||
|
# for name in self.allNames():
|
||||||
|
# if name.startswith(f"{defaultName}::"):
|
||||||
|
# return True
|
||||||
|
# return False
|
||||||
|
|
||||||
# Deck selection
|
# Deck selection
|
||||||
#############################################################
|
#############################################################
|
||||||
|
@ -632,7 +688,7 @@ class DeckManager:
|
||||||
return parents
|
return parents
|
||||||
|
|
||||||
def nameMap(self) -> dict:
|
def nameMap(self) -> dict:
|
||||||
return dict((d["name"], d) for d in self.decks.values())
|
return dict((d["name"], d) for d in self.all())
|
||||||
|
|
||||||
# Sync handling
|
# Sync handling
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
|
@ -191,9 +191,6 @@ class ModelManager:
|
||||||
def all_use_counts(self) -> List[pb.NoteTypeNameIDUseCount]:
|
def all_use_counts(self) -> List[pb.NoteTypeNameIDUseCount]:
|
||||||
return self.col.backend.get_notetype_use_counts()
|
return self.col.backend.get_notetype_use_counts()
|
||||||
|
|
||||||
def id_for_name(self, name: str) -> Optional[int]:
|
|
||||||
return self.col.backend.get_notetype_id_by_name(name)
|
|
||||||
|
|
||||||
# legacy
|
# legacy
|
||||||
|
|
||||||
def allNames(self) -> List[str]:
|
def allNames(self) -> List[str]:
|
||||||
|
@ -227,6 +224,9 @@ class ModelManager:
|
||||||
# Retrieving and creating models
|
# Retrieving and creating models
|
||||||
#############################################################
|
#############################################################
|
||||||
|
|
||||||
|
def id_for_name(self, name: str) -> Optional[int]:
|
||||||
|
return self.col.backend.get_notetype_id_by_name(name)
|
||||||
|
|
||||||
def get(self, id: int) -> Optional[NoteType]:
|
def get(self, id: int) -> Optional[NoteType]:
|
||||||
"Get model with ID, or None."
|
"Get model with ID, or None."
|
||||||
# deal with various legacy input types
|
# deal with various legacy input types
|
||||||
|
|
|
@ -102,6 +102,14 @@ class NotFoundError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ExistsError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DeckIsFilteredError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def proto_exception_to_native(err: pb.BackendError) -> Exception:
|
def proto_exception_to_native(err: pb.BackendError) -> Exception:
|
||||||
val = err.WhichOneof("value")
|
val = err.WhichOneof("value")
|
||||||
if val == "interrupted":
|
if val == "interrupted":
|
||||||
|
@ -122,6 +130,10 @@ def proto_exception_to_native(err: pb.BackendError) -> Exception:
|
||||||
return StringError(err.localized)
|
return StringError(err.localized)
|
||||||
elif val == "not_found_error":
|
elif val == "not_found_error":
|
||||||
return NotFoundError()
|
return NotFoundError()
|
||||||
|
elif val == "exists":
|
||||||
|
return ExistsError()
|
||||||
|
elif val == "deck_is_filtered":
|
||||||
|
return DeckIsFilteredError()
|
||||||
else:
|
else:
|
||||||
assert_impossible_literal(val)
|
assert_impossible_literal(val)
|
||||||
|
|
||||||
|
@ -597,9 +609,6 @@ class RustBackend:
|
||||||
).get_all_decks
|
).get_all_decks
|
||||||
return orjson.loads(jstr)
|
return orjson.loads(jstr)
|
||||||
|
|
||||||
def set_all_decks(self, nts: Dict[str, Dict[str, Any]]):
|
|
||||||
self._run_command(pb.BackendInput(set_all_decks=orjson.dumps(nts)))
|
|
||||||
|
|
||||||
def all_stock_notetypes(self) -> List[NoteType]:
|
def all_stock_notetypes(self) -> List[NoteType]:
|
||||||
return list(
|
return list(
|
||||||
self._run_command(
|
self._run_command(
|
||||||
|
@ -674,6 +683,52 @@ class RustBackend:
|
||||||
pb.BackendInput(get_empty_cards=pb.Empty()), release_gil=True
|
pb.BackendInput(get_empty_cards=pb.Empty()), release_gil=True
|
||||||
).get_empty_cards
|
).get_empty_cards
|
||||||
|
|
||||||
|
def get_deck_legacy(self, did: int) -> Optional[Dict]:
|
||||||
|
try:
|
||||||
|
bytes = self._run_command(
|
||||||
|
pb.BackendInput(get_deck_legacy=did)
|
||||||
|
).get_deck_legacy
|
||||||
|
return orjson.loads(bytes)
|
||||||
|
except NotFoundError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_deck_names_and_ids(self) -> List[pb.DeckNameID]:
|
||||||
|
return list(
|
||||||
|
self._run_command(
|
||||||
|
pb.BackendInput(get_deck_names=pb.Empty())
|
||||||
|
).get_deck_names.entries
|
||||||
|
)
|
||||||
|
|
||||||
|
def add_or_update_deck_legacy(
|
||||||
|
self, deck: Dict[str, Any], preserve_usn: bool
|
||||||
|
) -> None:
|
||||||
|
deck_json = orjson.dumps(deck)
|
||||||
|
id = self._run_command(
|
||||||
|
pb.BackendInput(
|
||||||
|
add_or_update_deck_legacy=pb.AddOrUpdateDeckLegacyIn(
|
||||||
|
deck=deck_json, preserve_usn_and_mtime=preserve_usn
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).add_or_update_deck_legacy
|
||||||
|
deck["id"] = id
|
||||||
|
|
||||||
|
def new_deck_legacy(self, filtered: bool) -> Dict[str, Any]:
|
||||||
|
jstr = self._run_command(
|
||||||
|
pb.BackendInput(new_deck_legacy=filtered)
|
||||||
|
).new_deck_legacy
|
||||||
|
return orjson.loads(jstr)
|
||||||
|
|
||||||
|
def get_deck_id_by_name(self, name: str) -> Optional[int]:
|
||||||
|
return (
|
||||||
|
self._run_command(
|
||||||
|
pb.BackendInput(get_deck_id_by_name=name)
|
||||||
|
).get_deck_id_by_name
|
||||||
|
or None
|
||||||
|
)
|
||||||
|
|
||||||
|
def remove_deck(self, did: int) -> None:
|
||||||
|
self._run_command(pb.BackendInput(remove_deck=did))
|
||||||
|
|
||||||
|
|
||||||
def translate_string_in(
|
def translate_string_in(
|
||||||
key: TR, **kwargs: Union[str, int, float]
|
key: TR, **kwargs: Union[str, int, float]
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
import copy
|
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import weakref
|
import weakref
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
@ -10,10 +8,8 @@ from typing import Optional
|
||||||
|
|
||||||
from anki.collection import _Collection
|
from anki.collection import _Collection
|
||||||
from anki.dbproxy import DBProxy
|
from anki.dbproxy import DBProxy
|
||||||
from anki.lang import _
|
|
||||||
from anki.media import media_paths_from_col_path
|
from anki.media import media_paths_from_col_path
|
||||||
from anki.rsbackend import RustBackend
|
from anki.rsbackend import RustBackend
|
||||||
from anki.utils import intTime
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -43,37 +39,7 @@ def Collection(
|
||||||
backend.open_collection(path, media_dir, media_db, log_path)
|
backend.open_collection(path, media_dir, media_db, log_path)
|
||||||
db = DBProxy(weakref.proxy(backend), path)
|
db = DBProxy(weakref.proxy(backend), path)
|
||||||
|
|
||||||
# initial setup required?
|
|
||||||
create = db.scalar("select decks = '{}' from col")
|
|
||||||
if create:
|
|
||||||
initial_db_setup(db)
|
|
||||||
|
|
||||||
# add db to col and do any remaining upgrades
|
# add db to col and do any remaining upgrades
|
||||||
col = _Collection(db, backend=backend, server=server, log=should_log)
|
col = _Collection(db, backend=backend, server=server)
|
||||||
if create:
|
|
||||||
col.save()
|
|
||||||
else:
|
|
||||||
db.begin()
|
db.begin()
|
||||||
return col
|
return col
|
||||||
|
|
||||||
|
|
||||||
# Creating a new collection
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
|
|
||||||
def initial_db_setup(db: DBProxy) -> None:
|
|
||||||
import anki.decks
|
|
||||||
|
|
||||||
db.begin()
|
|
||||||
|
|
||||||
g = copy.deepcopy(anki.decks.defaultDeck)
|
|
||||||
g["id"] = 1
|
|
||||||
g["name"] = _("Default")
|
|
||||||
g["conf"] = 1
|
|
||||||
g["mod"] = intTime()
|
|
||||||
|
|
||||||
db.execute(
|
|
||||||
"""
|
|
||||||
update col set decks = ?""",
|
|
||||||
json.dumps({"1": g}),
|
|
||||||
)
|
|
||||||
|
|
|
@ -7,13 +7,13 @@ from tests.shared import assertException, getEmptyCol
|
||||||
def test_basic():
|
def test_basic():
|
||||||
deck = getEmptyCol()
|
deck = getEmptyCol()
|
||||||
# we start with a standard deck
|
# we start with a standard deck
|
||||||
assert len(deck.decks.decks) == 1
|
assert len(deck.decks.all_names_and_ids()) == 1
|
||||||
# it should have an id of 1
|
# it should have an id of 1
|
||||||
assert deck.decks.name(1)
|
assert deck.decks.name(1)
|
||||||
# create a new deck
|
# create a new deck
|
||||||
parentId = deck.decks.id("new deck")
|
parentId = deck.decks.id("new deck")
|
||||||
assert parentId
|
assert parentId
|
||||||
assert len(deck.decks.decks) == 2
|
assert len(deck.decks.all_names_and_ids()) == 2
|
||||||
# should get the same id
|
# should get the same id
|
||||||
assert deck.decks.id("new deck") == parentId
|
assert deck.decks.id("new deck") == parentId
|
||||||
# we start with the default deck selected
|
# we start with the default deck selected
|
||||||
|
@ -54,22 +54,11 @@ def test_remove():
|
||||||
deck.addNote(f)
|
deck.addNote(f)
|
||||||
c = f.cards()[0]
|
c = f.cards()[0]
|
||||||
assert c.did == g1
|
assert c.did == g1
|
||||||
# by default deleting the deck leaves the cards with an invalid did
|
|
||||||
assert deck.cardCount() == 1
|
assert deck.cardCount() == 1
|
||||||
deck.decks.rem(g1)
|
deck.decks.rem(g1)
|
||||||
assert deck.cardCount() == 1
|
|
||||||
c.load()
|
|
||||||
assert c.did == g1
|
|
||||||
# but if we try to get it, we get the default
|
|
||||||
assert deck.decks.name(c.did) == "[no deck]"
|
|
||||||
# let's create another deck and explicitly set the card to it
|
|
||||||
g2 = deck.decks.id("g2")
|
|
||||||
c.did = g2
|
|
||||||
c.flush()
|
|
||||||
# this time we'll delete the card/note too
|
|
||||||
deck.decks.rem(g2, cardsToo=True)
|
|
||||||
assert deck.cardCount() == 0
|
assert deck.cardCount() == 0
|
||||||
assert deck.noteCount() == 0
|
# if we try to get it, we get the default
|
||||||
|
assert deck.decks.name(c.did) == "[no deck]"
|
||||||
|
|
||||||
|
|
||||||
def test_rename():
|
def test_rename():
|
||||||
|
|
|
@ -630,6 +630,7 @@ def test_cram():
|
||||||
assert d.sched.nextIvl(c, 2) == 138 * 60 * 60 * 24
|
assert d.sched.nextIvl(c, 2) == 138 * 60 * 60 * 24
|
||||||
cram = d.decks.get(did)
|
cram = d.decks.get(did)
|
||||||
cram["delays"] = [1, 10]
|
cram["delays"] = [1, 10]
|
||||||
|
d.decks.save(cram)
|
||||||
assert d.sched.answerButtons(c) == 3
|
assert d.sched.answerButtons(c) == 3
|
||||||
assert d.sched.nextIvl(c, 1) == 60
|
assert d.sched.nextIvl(c, 1) == 60
|
||||||
assert d.sched.nextIvl(c, 2) == 600
|
assert d.sched.nextIvl(c, 2) == 600
|
||||||
|
@ -739,6 +740,7 @@ def test_cram_resched():
|
||||||
did = d.decks.newDyn("Cram")
|
did = d.decks.newDyn("Cram")
|
||||||
cram = d.decks.get(did)
|
cram = d.decks.get(did)
|
||||||
cram["resched"] = False
|
cram["resched"] = False
|
||||||
|
d.decks.save(cram)
|
||||||
d.sched.rebuildDyn(did)
|
d.sched.rebuildDyn(did)
|
||||||
d.reset()
|
d.reset()
|
||||||
# graduate should return it to new
|
# graduate should return it to new
|
||||||
|
@ -1017,7 +1019,7 @@ def test_deckDue():
|
||||||
foobaz = f.model()["did"] = d.decks.id("foo::baz")
|
foobaz = f.model()["did"] = d.decks.id("foo::baz")
|
||||||
d.addNote(f)
|
d.addNote(f)
|
||||||
d.reset()
|
d.reset()
|
||||||
assert len(d.decks.decks) == 5
|
assert len(d.decks.all_names_and_ids()) == 5
|
||||||
cnts = d.sched.deckDueList()
|
cnts = d.sched.deckDueList()
|
||||||
assert cnts[0] == ["Default", 1, 0, 0, 1]
|
assert cnts[0] == ["Default", 1, 0, 0, 1]
|
||||||
assert cnts[1] == ["Default::1", default1, 1, 0, 0]
|
assert cnts[1] == ["Default::1", default1, 1, 0, 0]
|
||||||
|
|
|
@ -460,6 +460,8 @@ def test_review_limits():
|
||||||
assert tree[1][5][0][2] == 4 # child
|
assert tree[1][5][0][2] == 4 # child
|
||||||
|
|
||||||
# switch limits
|
# switch limits
|
||||||
|
parent = d.decks.get(parent["id"])
|
||||||
|
child = d.decks.get(child["id"])
|
||||||
d.decks.setConf(parent, cconf["id"])
|
d.decks.setConf(parent, cconf["id"])
|
||||||
d.decks.setConf(child, pconf["id"])
|
d.decks.setConf(child, pconf["id"])
|
||||||
d.decks.select(parent["id"])
|
d.decks.select(parent["id"])
|
||||||
|
@ -825,6 +827,7 @@ def test_preview():
|
||||||
did = d.decks.newDyn("Cram")
|
did = d.decks.newDyn("Cram")
|
||||||
cram = d.decks.get(did)
|
cram = d.decks.get(did)
|
||||||
cram["resched"] = False
|
cram["resched"] = False
|
||||||
|
d.decks.save(cram)
|
||||||
d.sched.rebuildDyn(did)
|
d.sched.rebuildDyn(did)
|
||||||
d.reset()
|
d.reset()
|
||||||
# grab the first card
|
# grab the first card
|
||||||
|
@ -1032,7 +1035,7 @@ def test_deckDue():
|
||||||
foobaz = f.model()["did"] = d.decks.id("foo::baz")
|
foobaz = f.model()["did"] = d.decks.id("foo::baz")
|
||||||
d.addNote(f)
|
d.addNote(f)
|
||||||
d.reset()
|
d.reset()
|
||||||
assert len(d.decks.decks) == 5
|
assert len(d.decks.all_names_and_ids()) == 5
|
||||||
cnts = d.sched.deckDueList()
|
cnts = d.sched.deckDueList()
|
||||||
assert cnts[0] == ["Default", 1, 1, 0, 1]
|
assert cnts[0] == ["Default", 1, 1, 0, 1]
|
||||||
assert cnts[1] == ["Default::1", default1, 1, 0, 0]
|
assert cnts[1] == ["Default::1", default1, 1, 0, 0]
|
||||||
|
|
|
@ -11,7 +11,7 @@ use crate::{
|
||||||
collection::{open_collection, Collection},
|
collection::{open_collection, Collection},
|
||||||
config::SortKind,
|
config::SortKind,
|
||||||
deckconf::{DeckConf, DeckConfID},
|
deckconf::{DeckConf, DeckConfID},
|
||||||
decks::{Deck, DeckID},
|
decks::{Deck, DeckID, DeckSchema11},
|
||||||
err::{AnkiError, NetworkErrorKind, Result, SyncErrorKind},
|
err::{AnkiError, NetworkErrorKind, Result, SyncErrorKind},
|
||||||
i18n::{tr_args, I18n, TR},
|
i18n::{tr_args, I18n, TR},
|
||||||
latex::{extract_latex, extract_latex_expanding_clozes, ExtractedLatex},
|
latex::{extract_latex, extract_latex_expanding_clozes, ExtractedLatex},
|
||||||
|
@ -78,6 +78,8 @@ fn anki_error_to_proto_error(err: AnkiError, i18n: &I18n) -> pb::BackendError {
|
||||||
AnkiError::JSONError { info } => V::JsonError(info),
|
AnkiError::JSONError { info } => V::JsonError(info),
|
||||||
AnkiError::ProtoError { info } => V::ProtoError(info),
|
AnkiError::ProtoError { info } => V::ProtoError(info),
|
||||||
AnkiError::NotFound => V::NotFoundError(Empty {}),
|
AnkiError::NotFound => V::NotFoundError(Empty {}),
|
||||||
|
AnkiError::Existing => V::Exists(Empty {}),
|
||||||
|
AnkiError::DeckIsFiltered => V::DeckIsFiltered(Empty {}),
|
||||||
};
|
};
|
||||||
|
|
||||||
pb::BackendError {
|
pb::BackendError {
|
||||||
|
@ -306,10 +308,6 @@ impl Backend {
|
||||||
OValue::GetChangedNotetypes(self.get_changed_notetypes()?)
|
OValue::GetChangedNotetypes(self.get_changed_notetypes()?)
|
||||||
}
|
}
|
||||||
Value::GetAllDecks(_) => OValue::GetAllDecks(self.get_all_decks()?),
|
Value::GetAllDecks(_) => OValue::GetAllDecks(self.get_all_decks()?),
|
||||||
Value::SetAllDecks(bytes) => {
|
|
||||||
self.set_all_decks(&bytes)?;
|
|
||||||
OValue::SetAllDecks(pb::Empty {})
|
|
||||||
}
|
|
||||||
Value::AllStockNotetypes(_) => OValue::AllStockNotetypes(pb::AllStockNotetypesOut {
|
Value::AllStockNotetypes(_) => OValue::AllStockNotetypes(pb::AllStockNotetypesOut {
|
||||||
notetypes: all_stock_notetypes(&self.i18n)
|
notetypes: all_stock_notetypes(&self.i18n)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -342,6 +340,22 @@ impl Backend {
|
||||||
}
|
}
|
||||||
Value::GetNote(nid) => OValue::GetNote(self.get_note(nid)?),
|
Value::GetNote(nid) => OValue::GetNote(self.get_note(nid)?),
|
||||||
Value::GetEmptyCards(_) => OValue::GetEmptyCards(self.get_empty_cards()?),
|
Value::GetEmptyCards(_) => OValue::GetEmptyCards(self.get_empty_cards()?),
|
||||||
|
Value::GetDeckLegacy(did) => OValue::GetDeckLegacy(self.get_deck_legacy(did)?),
|
||||||
|
Value::GetDeckIdByName(name) => {
|
||||||
|
OValue::GetDeckIdByName(self.get_deck_id_by_name(&name)?)
|
||||||
|
}
|
||||||
|
Value::GetDeckNames(_) => OValue::GetDeckNames(self.get_deck_names()?),
|
||||||
|
Value::AddOrUpdateDeckLegacy(input) => {
|
||||||
|
OValue::AddOrUpdateDeckLegacy(self.add_or_update_deck_legacy(input)?)
|
||||||
|
}
|
||||||
|
Value::NewDeckLegacy(filtered) => {
|
||||||
|
OValue::NewDeckLegacy(self.new_deck_legacy(filtered)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
Value::RemoveDeck(did) => OValue::RemoveDeck({
|
||||||
|
self.remove_deck(did)?;
|
||||||
|
pb::Empty {}
|
||||||
|
}),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -846,14 +860,9 @@ impl Backend {
|
||||||
// })
|
// })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_all_decks(&self, json: &[u8]) -> Result<()> {
|
|
||||||
let val: HashMap<DeckID, Deck> = serde_json::from_slice(json)?;
|
|
||||||
self.with_col(|col| col.transact(None, |col| col.storage.set_all_decks(val)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_all_decks(&self) -> Result<Vec<u8>> {
|
fn get_all_decks(&self) -> Result<Vec<u8>> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let decks = col.storage.get_all_decks()?;
|
let decks = col.storage.get_all_decks_as_schema11()?;
|
||||||
serde_json::to_vec(&decks).map_err(Into::into)
|
serde_json::to_vec(&decks).map_err(Into::into)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -974,6 +983,59 @@ impl Backend {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_deck_legacy(&self, did: i64) -> Result<Vec<u8>> {
|
||||||
|
self.with_col(|col| {
|
||||||
|
let deck: DeckSchema11 = col
|
||||||
|
.storage
|
||||||
|
.get_deck(DeckID(did))?
|
||||||
|
.ok_or(AnkiError::NotFound)?
|
||||||
|
.into();
|
||||||
|
serde_json::to_vec(&deck).map_err(Into::into)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_deck_id_by_name(&self, human_name: &str) -> Result<i64> {
|
||||||
|
self.with_col(|col| {
|
||||||
|
col.get_deck_id(human_name)
|
||||||
|
.map(|d| d.map(|d| d.0).unwrap_or_default())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_deck_names(&self) -> Result<pb::DeckNames> {
|
||||||
|
self.with_col(|col| {
|
||||||
|
let names = col.storage.get_all_deck_names()?;
|
||||||
|
Ok(pb::DeckNames {
|
||||||
|
entries: names
|
||||||
|
.into_iter()
|
||||||
|
.map(|(id, name)| pb::DeckNameId { id: id.0, name })
|
||||||
|
.collect(),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_or_update_deck_legacy(&self, input: pb::AddOrUpdateDeckLegacyIn) -> Result<i64> {
|
||||||
|
self.with_col(|col| {
|
||||||
|
let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?;
|
||||||
|
let mut deck: Deck = schema11.into();
|
||||||
|
col.add_or_update_deck(&mut deck, input.preserve_usn_and_mtime)?;
|
||||||
|
Ok(deck.id.0)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_deck_legacy(&self, filtered: bool) -> Result<Vec<u8>> {
|
||||||
|
let deck = if filtered {
|
||||||
|
Deck::new_filtered()
|
||||||
|
} else {
|
||||||
|
Deck::new_normal()
|
||||||
|
};
|
||||||
|
let schema11: DeckSchema11 = deck.into();
|
||||||
|
serde_json::to_vec(&schema11).map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn remove_deck(&self, did: i64) -> Result<()> {
|
||||||
|
self.with_col(|col| col.remove_deck_and_child_decks(DeckID(did)))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn translate_arg_to_fluent_val(arg: &pb::TranslateArgValue) -> FluentValue {
|
fn translate_arg_to_fluent_val(arg: &pb::TranslateArgValue) -> FluentValue {
|
||||||
|
|
|
@ -5,9 +5,13 @@ use crate::decks::DeckID;
|
||||||
use crate::define_newtype;
|
use crate::define_newtype;
|
||||||
use crate::err::{AnkiError, Result};
|
use crate::err::{AnkiError, Result};
|
||||||
use crate::notes::NoteID;
|
use crate::notes::NoteID;
|
||||||
use crate::{collection::Collection, timestamp::TimestampSecs, types::Usn, undo::Undoable};
|
use crate::{
|
||||||
|
collection::Collection, config::SchedulerVersion, timestamp::TimestampSecs, types::Usn,
|
||||||
|
undo::Undoable,
|
||||||
|
};
|
||||||
use num_enum::TryFromPrimitive;
|
use num_enum::TryFromPrimitive;
|
||||||
use serde_repr::{Deserialize_repr, Serialize_repr};
|
use serde_repr::{Deserialize_repr, Serialize_repr};
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
define_newtype!(CardID, i64);
|
define_newtype!(CardID, i64);
|
||||||
|
|
||||||
|
@ -86,6 +90,60 @@ impl Default for Card {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Card {
|
||||||
|
pub(crate) fn return_home(&mut self, sched: SchedulerVersion) {
|
||||||
|
if self.odid.0 == 0 {
|
||||||
|
// this should not happen
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// fixme: avoid bumping mtime?
|
||||||
|
self.did = self.odid;
|
||||||
|
self.odid.0 = 0;
|
||||||
|
if self.odue > 0 {
|
||||||
|
self.due = self.odue;
|
||||||
|
}
|
||||||
|
self.odue = 0;
|
||||||
|
|
||||||
|
self.queue = match sched {
|
||||||
|
SchedulerVersion::V1 => {
|
||||||
|
match self.ctype {
|
||||||
|
CardType::New => CardQueue::New,
|
||||||
|
CardType::Learn => CardQueue::New,
|
||||||
|
CardType::Review => CardQueue::Review,
|
||||||
|
// not applicable in v1, should not happen
|
||||||
|
CardType::Relearn => {
|
||||||
|
println!("did not expect relearn type in v1 for card {}", self.id);
|
||||||
|
CardQueue::New
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
SchedulerVersion::V2 => {
|
||||||
|
if (self.queue as i8) >= 0 {
|
||||||
|
match self.ctype {
|
||||||
|
CardType::Learn | CardType::Relearn => {
|
||||||
|
if self.due > 1_000_000_000 {
|
||||||
|
// unix timestamp
|
||||||
|
CardQueue::Learn
|
||||||
|
} else {
|
||||||
|
// day number
|
||||||
|
CardQueue::DayLearn
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CardType::New => CardQueue::New,
|
||||||
|
CardType::Review => CardQueue::Review,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.queue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if sched == SchedulerVersion::V1 && self.ctype == CardType::Learn {
|
||||||
|
self.ctype = CardType::New;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct UpdateCardUndo(Card);
|
pub(crate) struct UpdateCardUndo(Card);
|
||||||
|
|
||||||
|
@ -145,6 +203,28 @@ impl Collection {
|
||||||
card.usn = self.usn()?;
|
card.usn = self.usn()?;
|
||||||
self.storage.add_card(card)
|
self.storage.add_card(card)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Remove cards and any resulting orphaned notes.
|
||||||
|
/// Expects a transaction.
|
||||||
|
pub(crate) fn remove_cards_inner(&mut self, cids: &[CardID]) -> Result<()> {
|
||||||
|
let usn = self.usn()?;
|
||||||
|
let mut nids = HashSet::new();
|
||||||
|
for cid in cids {
|
||||||
|
if let Some(card) = self.storage.get_card(*cid)? {
|
||||||
|
// fixme: undo
|
||||||
|
nids.insert(card.nid);
|
||||||
|
self.storage.remove_card(*cid)?;
|
||||||
|
self.storage.add_card_grave(*cid, usn)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for nid in nids {
|
||||||
|
if self.storage.note_is_orphaned(nid)? {
|
||||||
|
self.remove_note_only(nid, usn)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -9,6 +9,7 @@ use serde::{de::DeserializeOwned, Serialize};
|
||||||
use serde_aux::field_attributes::deserialize_bool_from_anything;
|
use serde_aux::field_attributes::deserialize_bool_from_anything;
|
||||||
use serde_derive::Deserialize;
|
use serde_derive::Deserialize;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
use serde_repr::{Deserialize_repr, Serialize_repr};
|
||||||
use slog::warn;
|
use slog::warn;
|
||||||
|
|
||||||
pub(crate) fn schema11_config_as_string() -> String {
|
pub(crate) fn schema11_config_as_string() -> String {
|
||||||
|
@ -40,6 +41,13 @@ pub(crate) enum ConfigKey {
|
||||||
LocalOffset,
|
LocalOffset,
|
||||||
CurrentNoteTypeID,
|
CurrentNoteTypeID,
|
||||||
NextNewCardPosition,
|
NextNewCardPosition,
|
||||||
|
SchedulerVersion,
|
||||||
|
}
|
||||||
|
#[derive(PartialEq, Serialize_repr, Deserialize_repr, Clone, Copy)]
|
||||||
|
#[repr(u8)]
|
||||||
|
pub(crate) enum SchedulerVersion {
|
||||||
|
V1 = 1,
|
||||||
|
V2 = 2,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ConfigKey> for &'static str {
|
impl From<ConfigKey> for &'static str {
|
||||||
|
@ -53,6 +61,7 @@ impl From<ConfigKey> for &'static str {
|
||||||
ConfigKey::LocalOffset => "localOffset",
|
ConfigKey::LocalOffset => "localOffset",
|
||||||
ConfigKey::CurrentNoteTypeID => "curModel",
|
ConfigKey::CurrentNoteTypeID => "curModel",
|
||||||
ConfigKey::NextNewCardPosition => "nextPos",
|
ConfigKey::NextNewCardPosition => "nextPos",
|
||||||
|
ConfigKey::SchedulerVersion => "schedVer",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -142,6 +151,11 @@ impl Collection {
|
||||||
self.set_config(ConfigKey::NextNewCardPosition, &pos.wrapping_add(1))?;
|
self.set_config(ConfigKey::NextNewCardPosition, &pos.wrapping_add(1))?;
|
||||||
Ok(pos)
|
Ok(pos)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn sched_ver(&self) -> SchedulerVersion {
|
||||||
|
self.get_config_optional(ConfigKey::SchedulerVersion)
|
||||||
|
.unwrap_or(SchedulerVersion::V1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, PartialEq, Debug)]
|
#[derive(Deserialize, PartialEq, Debug)]
|
||||||
|
|
|
@ -1,15 +1,41 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use crate::{collection::Collection, define_newtype, err::Result};
|
use crate::backend_proto as pb;
|
||||||
|
pub use crate::backend_proto::{
|
||||||
|
deck_kind::Kind as DeckKind, Deck as DeckProto, DeckCommon, DeckKind as DeckKindProto,
|
||||||
|
FilteredDeck, FilteredSearchOrder, FilteredSearchTerm, NormalDeck,
|
||||||
|
};
|
||||||
|
use crate::{
|
||||||
|
card::CardID,
|
||||||
|
collection::Collection,
|
||||||
|
define_newtype,
|
||||||
|
err::{AnkiError, Result},
|
||||||
|
text::normalize_to_nfc,
|
||||||
|
timestamp::TimestampSecs,
|
||||||
|
types::Usn,
|
||||||
|
};
|
||||||
mod schema11;
|
mod schema11;
|
||||||
pub use schema11::Deck;
|
pub use schema11::DeckSchema11;
|
||||||
use std::sync::Arc;
|
use std::{borrow::Cow, sync::Arc};
|
||||||
|
|
||||||
define_newtype!(DeckID, i64);
|
define_newtype!(DeckID, i64);
|
||||||
|
|
||||||
pub(crate) fn child_ids<'a>(decks: &'a [Deck], name: &str) -> impl Iterator<Item = DeckID> + 'a {
|
#[derive(Debug)]
|
||||||
|
pub struct Deck {
|
||||||
|
pub id: DeckID,
|
||||||
|
pub name: String,
|
||||||
|
pub mtime_secs: TimestampSecs,
|
||||||
|
pub usn: Usn,
|
||||||
|
pub common: DeckCommon,
|
||||||
|
pub kind: DeckKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
// fixme: needs update
|
||||||
|
pub(crate) fn child_ids<'a>(
|
||||||
|
decks: &'a [DeckSchema11],
|
||||||
|
name: &str,
|
||||||
|
) -> impl Iterator<Item = DeckID> + 'a {
|
||||||
let prefix = format!("{}::", name.to_ascii_lowercase());
|
let prefix = format!("{}::", name.to_ascii_lowercase());
|
||||||
decks
|
decks
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -17,7 +43,8 @@ pub(crate) fn child_ids<'a>(decks: &'a [Deck], name: &str) -> impl Iterator<Item
|
||||||
.map(|d| d.id())
|
.map(|d| d.id())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_deck(decks: &[Deck], id: DeckID) -> Option<&Deck> {
|
// fixme: needs update
|
||||||
|
pub(crate) fn get_deck(decks: &[DeckSchema11], id: DeckID) -> Option<&DeckSchema11> {
|
||||||
for d in decks {
|
for d in decks {
|
||||||
if d.id() == id {
|
if d.id() == id {
|
||||||
return Some(d);
|
return Some(d);
|
||||||
|
@ -28,12 +55,93 @@ pub(crate) fn get_deck(decks: &[Deck], id: DeckID) -> Option<&Deck> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Deck {
|
impl Deck {
|
||||||
pub(crate) fn is_filtered(&self) -> bool {
|
pub fn new_normal() -> Deck {
|
||||||
matches!(self, Deck::Filtered(_))
|
let mut norm = NormalDeck::default();
|
||||||
|
norm.config_id = 1;
|
||||||
|
|
||||||
|
Deck {
|
||||||
|
id: DeckID(0),
|
||||||
|
name: "".into(),
|
||||||
|
mtime_secs: TimestampSecs(0),
|
||||||
|
usn: Usn(0),
|
||||||
|
common: DeckCommon::default(),
|
||||||
|
kind: DeckKind::Normal(norm),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn new_filtered() -> Deck {
|
||||||
|
let mut filt = FilteredDeck::default();
|
||||||
|
filt.search_terms.push(FilteredSearchTerm {
|
||||||
|
search: "".into(),
|
||||||
|
limit: 100,
|
||||||
|
order: 0,
|
||||||
|
});
|
||||||
|
filt.preview_delay = 10;
|
||||||
|
filt.reschedule = true;
|
||||||
|
Deck {
|
||||||
|
id: DeckID(0),
|
||||||
|
name: "".into(),
|
||||||
|
mtime_secs: TimestampSecs(0),
|
||||||
|
usn: Usn(0),
|
||||||
|
common: DeckCommon::default(),
|
||||||
|
kind: DeckKind::Filtered(filt),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deck {
|
||||||
|
pub(crate) fn is_filtered(&self) -> bool {
|
||||||
|
matches!(self.kind, DeckKind::Filtered(_))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn prepare_for_update(&mut self) {
|
||||||
|
// fixme - we currently only do this when converting from human; should be done in pub methods instead
|
||||||
|
|
||||||
|
// if self.name.contains(invalid_char_for_deck_component) {
|
||||||
|
// self.name = self.name.replace(invalid_char_for_deck_component, "");
|
||||||
|
// }
|
||||||
|
// ensure_string_in_nfc(&mut self.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
// fixme: unify with prepare for update
|
||||||
|
pub(crate) fn set_modified(&mut self, usn: Usn) {
|
||||||
|
self.mtime_secs = TimestampSecs::now();
|
||||||
|
self.usn = usn;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// fixme: need to bump usn on upgrade if we rename
|
||||||
|
fn invalid_char_for_deck_component(c: char) -> bool {
|
||||||
|
c.is_ascii_control() || c == '"'
|
||||||
|
}
|
||||||
|
|
||||||
|
fn normalized_deck_name_component(comp: &str) -> Cow<str> {
|
||||||
|
let mut out = normalize_to_nfc(comp);
|
||||||
|
if out.contains(invalid_char_for_deck_component) {
|
||||||
|
out = out.replace(invalid_char_for_deck_component, "").into();
|
||||||
|
}
|
||||||
|
let trimmed = out.trim();
|
||||||
|
if trimmed.is_empty() {
|
||||||
|
"blank".into()
|
||||||
|
} else if trimmed.len() != out.len() {
|
||||||
|
// fixme: trimming leading/trailing spaces may break old clients if we don't bump mod
|
||||||
|
trimmed.to_string().into()
|
||||||
|
} else {
|
||||||
|
out
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn human_deck_name_to_native(name: &str) -> String {
|
||||||
|
let mut out = String::with_capacity(name.len());
|
||||||
|
for comp in name.split("::") {
|
||||||
|
out.push_str(&normalized_deck_name_component(comp));
|
||||||
|
out.push('\x1f');
|
||||||
|
}
|
||||||
|
out.trim_end_matches('\x1f').into()
|
||||||
|
}
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
|
// fixme: this cache may belong in CardGenContext?
|
||||||
pub(crate) fn get_deck(&mut self, did: DeckID) -> Result<Option<Arc<Deck>>> {
|
pub(crate) fn get_deck(&mut self, did: DeckID) -> Result<Option<Arc<Deck>>> {
|
||||||
if let Some(deck) = self.state.deck_cache.get(&did) {
|
if let Some(deck) = self.state.deck_cache.get(&did) {
|
||||||
return Ok(Some(deck.clone()));
|
return Ok(Some(deck.clone()));
|
||||||
|
@ -47,3 +155,312 @@ impl Collection {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<Deck> for DeckProto {
|
||||||
|
fn from(d: Deck) -> Self {
|
||||||
|
DeckProto {
|
||||||
|
id: d.id.0,
|
||||||
|
name: d.name,
|
||||||
|
mtime_secs: d.mtime_secs.0 as u32,
|
||||||
|
usn: d.usn.0,
|
||||||
|
common: Some(d.common),
|
||||||
|
kind: Some(d.kind.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<DeckKind> for pb::deck::Kind {
|
||||||
|
fn from(k: DeckKind) -> Self {
|
||||||
|
match k {
|
||||||
|
DeckKind::Normal(n) => pb::deck::Kind::Normal(n),
|
||||||
|
DeckKind::Filtered(f) => pb::deck::Kind::Filtered(f),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn immediate_parent_name(machine_name: &str) -> Option<&str> {
|
||||||
|
machine_name.rsplitn(2, '\x1f').nth(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Collection {
|
||||||
|
pub(crate) fn add_or_update_deck(&mut self, deck: &mut Deck, preserve_usn: bool) -> Result<()> {
|
||||||
|
// fixme: vet cache clearing
|
||||||
|
self.state.deck_cache.clear();
|
||||||
|
|
||||||
|
self.transact(None, |col| {
|
||||||
|
let usn = col.usn()?;
|
||||||
|
|
||||||
|
deck.prepare_for_update();
|
||||||
|
|
||||||
|
// fixme: bail
|
||||||
|
assert!(!deck.name.contains("::"));
|
||||||
|
|
||||||
|
// fixme: check deck name is not duplicate
|
||||||
|
// handle blank deck name, etc
|
||||||
|
|
||||||
|
if !preserve_usn {
|
||||||
|
deck.set_modified(usn);
|
||||||
|
}
|
||||||
|
|
||||||
|
if deck.id.0 == 0 {
|
||||||
|
col.match_or_create_parents(deck)?;
|
||||||
|
col.storage.add_deck(deck)
|
||||||
|
} else {
|
||||||
|
if let Some(existing_deck) = col.storage.get_deck(deck.id)? {
|
||||||
|
if existing_deck.name != deck.name {
|
||||||
|
return col.update_renamed_deck(existing_deck, deck, usn);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// fixme: this should only happen in the syncing case, and we should
|
||||||
|
// ensure there are no missing parents at the end of the sync
|
||||||
|
}
|
||||||
|
col.storage.update_deck(deck)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_or_create_normal_deck(&mut self, human_name: &str) -> Result<Deck> {
|
||||||
|
let native_name = human_deck_name_to_native(human_name);
|
||||||
|
if let Some(did) = self.storage.get_deck_id(&native_name)? {
|
||||||
|
self.storage.get_deck(did).map(|opt| opt.unwrap())
|
||||||
|
} else {
|
||||||
|
let mut deck = Deck::new_normal();
|
||||||
|
deck.name = native_name;
|
||||||
|
self.add_or_update_deck(&mut deck, false)?;
|
||||||
|
Ok(deck)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_renamed_deck(&mut self, existing: Deck, updated: &mut Deck, usn: Usn) -> Result<()> {
|
||||||
|
// new name should not conflict with a different deck
|
||||||
|
if let Some(other_did) = self.storage.get_deck_id(&updated.name)? {
|
||||||
|
if other_did != updated.id {
|
||||||
|
// fixme: this could break when syncing
|
||||||
|
return Err(AnkiError::Existing);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.match_or_create_parents(updated)?;
|
||||||
|
self.storage.update_deck(updated)?;
|
||||||
|
self.rename_child_decks(&existing, &updated.name, usn)
|
||||||
|
}
|
||||||
|
|
||||||
|
// fixme: make sure this handles foo::bar and FOO::baz
|
||||||
|
fn rename_child_decks(&mut self, old: &Deck, new_name: &str, usn: Usn) -> Result<()> {
|
||||||
|
let children = self.storage.child_decks(old)?;
|
||||||
|
let old_component_count = old.name.matches('\x1f').count() + 1;
|
||||||
|
|
||||||
|
for mut child in children {
|
||||||
|
let child_components: Vec<_> = child.name.split('\x1f').collect();
|
||||||
|
let child_only = &child_components[old_component_count..];
|
||||||
|
let new_name = format!("{}\x1f{}", new_name, child_only.join("\x1f"));
|
||||||
|
child.name = new_name;
|
||||||
|
child.set_modified(usn);
|
||||||
|
self.storage.update_deck(&child)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a single, normal deck with the provided name for a child deck.
|
||||||
|
/// Caller must have done necessarily validation on name.
|
||||||
|
fn add_parent_deck(&self, machine_name: &str) -> Result<()> {
|
||||||
|
let mut deck = Deck::new_normal();
|
||||||
|
deck.name = machine_name.into();
|
||||||
|
// fixme: undo
|
||||||
|
self.storage.add_deck(&mut deck)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If parent deck(s) exist, rewrite name to match their case.
|
||||||
|
/// If they don't exist, create them.
|
||||||
|
/// Returns an error if a DB operation fails, or if the first existing parent is a filtered deck.
|
||||||
|
fn match_or_create_parents(&mut self, deck: &mut Deck) -> Result<()> {
|
||||||
|
let child_split: Vec<_> = deck.name.split('\x1f').collect();
|
||||||
|
if let Some(parent_deck) = self.first_existing_parent(&deck.name, 0)? {
|
||||||
|
if parent_deck.is_filtered() {
|
||||||
|
return Err(AnkiError::DeckIsFiltered);
|
||||||
|
}
|
||||||
|
let parent_count = parent_deck.name.matches('\x1f').count() + 1;
|
||||||
|
let need_create = parent_count != child_split.len() - 1;
|
||||||
|
deck.name = format!(
|
||||||
|
"{}\x1f{}",
|
||||||
|
parent_deck.name,
|
||||||
|
&child_split[parent_count..].join("\x1f")
|
||||||
|
);
|
||||||
|
if need_create {
|
||||||
|
self.create_missing_parents(&deck.name)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
} else if child_split.len() == 1 {
|
||||||
|
// no parents required
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
// no existing parents
|
||||||
|
self.create_missing_parents(&deck.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_missing_parents(&self, mut machine_name: &str) -> Result<()> {
|
||||||
|
while let Some(parent_name) = immediate_parent_name(machine_name) {
|
||||||
|
if self.storage.get_deck_id(parent_name)?.is_none() {
|
||||||
|
self.add_parent_deck(parent_name)?;
|
||||||
|
}
|
||||||
|
machine_name = parent_name;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn first_existing_parent(
|
||||||
|
&self,
|
||||||
|
machine_name: &str,
|
||||||
|
recursion_level: usize,
|
||||||
|
) -> Result<Option<Deck>> {
|
||||||
|
if recursion_level > 10 {
|
||||||
|
return Err(AnkiError::invalid_input("deck nesting level too deep"));
|
||||||
|
}
|
||||||
|
if let Some(parent_name) = immediate_parent_name(machine_name) {
|
||||||
|
if let Some(parent_did) = self.storage.get_deck_id(parent_name)? {
|
||||||
|
self.storage.get_deck(parent_did)
|
||||||
|
} else {
|
||||||
|
self.first_existing_parent(parent_name, recursion_level + 1)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a deck based on its human name. If you have a machine name,
|
||||||
|
/// use the method in storage instead.
|
||||||
|
pub(crate) fn get_deck_id(&self, human_name: &str) -> Result<Option<DeckID>> {
|
||||||
|
let machine_name = human_deck_name_to_native(&human_name);
|
||||||
|
self.storage.get_deck_id(&machine_name)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove_deck_and_child_decks(&mut self, did: DeckID) -> Result<()> {
|
||||||
|
self.transact(None, |col| {
|
||||||
|
let usn = col.usn()?;
|
||||||
|
|
||||||
|
if let Some(deck) = col.storage.get_deck(did)? {
|
||||||
|
let child_decks = col.storage.child_decks(&deck)?;
|
||||||
|
|
||||||
|
// top level
|
||||||
|
col.remove_single_deck(&deck, usn)?;
|
||||||
|
|
||||||
|
// remove children
|
||||||
|
for deck in child_decks {
|
||||||
|
col.remove_single_deck(&deck, usn)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn remove_single_deck(&mut self, deck: &Deck, usn: Usn) -> Result<()> {
|
||||||
|
// fixme: undo
|
||||||
|
match deck.kind {
|
||||||
|
DeckKind::Normal(_) => self.delete_all_cards_in_normal_deck(deck.id)?,
|
||||||
|
DeckKind::Filtered(_) => self.return_all_cards_in_filtered_deck(deck.id)?,
|
||||||
|
}
|
||||||
|
self.storage.remove_deck(deck.id)?;
|
||||||
|
self.storage.add_deck_grave(deck.id, usn)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete_all_cards_in_normal_deck(&mut self, did: DeckID) -> Result<()> {
|
||||||
|
// fixme: need to search on odid as well
|
||||||
|
// fixme: the odid requirement will require a table scan, which will be slow when deleting a large tree
|
||||||
|
let cids = self.storage.all_cards_in_single_deck(did)?;
|
||||||
|
self.remove_cards_inner(&cids)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn return_all_cards_in_filtered_deck(&mut self, did: DeckID) -> Result<()> {
|
||||||
|
let cids = self.storage.all_cards_in_single_deck(did)?;
|
||||||
|
self.return_cards_to_home_deck(&cids)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn return_cards_to_home_deck(&mut self, cids: &[CardID]) -> Result<()> {
|
||||||
|
let sched = self.sched_ver();
|
||||||
|
for cid in cids {
|
||||||
|
if let Some(mut card) = self.storage.get_card(*cid)? {
|
||||||
|
// fixme: undo
|
||||||
|
card.return_home(sched);
|
||||||
|
self.storage.update_card(&card)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::{human_deck_name_to_native, immediate_parent_name};
|
||||||
|
use crate::{
|
||||||
|
collection::{open_test_collection, Collection},
|
||||||
|
err::Result,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn sorted_names(col: &Collection) -> Vec<String> {
|
||||||
|
col.storage
|
||||||
|
.get_all_deck_names()
|
||||||
|
.unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.map(|d| d.1)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parent() {
|
||||||
|
assert_eq!(immediate_parent_name("foo"), None);
|
||||||
|
assert_eq!(immediate_parent_name("foo\x1fbar"), Some("foo"));
|
||||||
|
assert_eq!(
|
||||||
|
immediate_parent_name("foo\x1fbar\x1fbaz"),
|
||||||
|
Some("foo\x1fbar")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn from_human() {
|
||||||
|
assert_eq!(&human_deck_name_to_native("foo"), "foo");
|
||||||
|
assert_eq!(&human_deck_name_to_native("foo::bar"), "foo\x1fbar");
|
||||||
|
assert_eq!(&human_deck_name_to_native("fo\x1fo::ba\nr"), "foo\x1fbar");
|
||||||
|
assert_eq!(
|
||||||
|
&human_deck_name_to_native("foo::::baz"),
|
||||||
|
"foo\x1fblank\x1fbaz"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn adding_updating() -> Result<()> {
|
||||||
|
let mut col = open_test_collection();
|
||||||
|
|
||||||
|
let deck1 = col.get_or_create_normal_deck("foo")?;
|
||||||
|
let deck2 = col.get_or_create_normal_deck("FOO")?;
|
||||||
|
assert_eq!(deck1.id, deck2.id);
|
||||||
|
assert_eq!(sorted_names(&col), vec!["Default", "foo"]);
|
||||||
|
|
||||||
|
// missing parents should be automatically created, and case should match
|
||||||
|
// existing parents
|
||||||
|
let _deck3 = col.get_or_create_normal_deck("FOO::BAR::BAZ")?;
|
||||||
|
assert_eq!(
|
||||||
|
sorted_names(&col),
|
||||||
|
vec!["Default", "foo", "foo::BAR", "foo::BAR::BAZ"]
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn renaming() -> Result<()> {
|
||||||
|
let mut col = open_test_collection();
|
||||||
|
|
||||||
|
let _ = col.get_or_create_normal_deck("foo::bar::baz")?;
|
||||||
|
let mut top_deck = col.get_or_create_normal_deck("foo")?;
|
||||||
|
top_deck.name = "other".into();
|
||||||
|
col.add_or_update_deck(&mut top_deck, false)?;
|
||||||
|
assert_eq!(
|
||||||
|
sorted_names(&col),
|
||||||
|
vec!["Default", "other", "other::bar", "other::bar::baz"]
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -2,11 +2,16 @@
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use super::DeckID;
|
use super::DeckID;
|
||||||
|
use super::{
|
||||||
|
human_deck_name_to_native, Deck, DeckCommon, DeckKind, FilteredDeck, FilteredSearchTerm,
|
||||||
|
NormalDeck,
|
||||||
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
serde::{default_on_invalid, deserialize_bool_from_anything, deserialize_number_from_string},
|
serde::{default_on_invalid, deserialize_bool_from_anything, deserialize_number_from_string},
|
||||||
timestamp::TimestampSecs,
|
timestamp::TimestampSecs,
|
||||||
types::Usn,
|
types::Usn,
|
||||||
};
|
};
|
||||||
|
|
||||||
use serde_derive::{Deserialize, Serialize};
|
use serde_derive::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use serde_tuple::Serialize_tuple;
|
use serde_tuple::Serialize_tuple;
|
||||||
|
@ -14,19 +19,19 @@ use std::collections::HashMap;
|
||||||
|
|
||||||
#[derive(Serialize, PartialEq, Debug, Clone)]
|
#[derive(Serialize, PartialEq, Debug, Clone)]
|
||||||
#[serde(untagged)]
|
#[serde(untagged)]
|
||||||
pub enum Deck {
|
pub enum DeckSchema11 {
|
||||||
Normal(NormalDeck),
|
Normal(NormalDeckSchema11),
|
||||||
Filtered(FilteredDeck),
|
Filtered(FilteredDeckSchema11),
|
||||||
}
|
}
|
||||||
|
|
||||||
// serde doesn't support integer/bool enum tags, so we manually pick the correct variant
|
// serde doesn't support integer/bool enum tags, so we manually pick the correct variant
|
||||||
mod dynfix {
|
mod dynfix {
|
||||||
use super::{Deck, FilteredDeck, NormalDeck};
|
use super::{DeckSchema11, FilteredDeckSchema11, NormalDeckSchema11};
|
||||||
use serde::de::{self, Deserialize, Deserializer};
|
use serde::de::{self, Deserialize, Deserializer};
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for Deck {
|
impl<'de> Deserialize<'de> for DeckSchema11 {
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Deck, D::Error>
|
fn deserialize<D>(deserializer: D) -> Result<DeckSchema11, D::Error>
|
||||||
where
|
where
|
||||||
D: Deserializer<'de>,
|
D: Deserializer<'de>,
|
||||||
{
|
{
|
||||||
|
@ -58,12 +63,12 @@ mod dynfix {
|
||||||
|
|
||||||
let rest = Value::Object(map);
|
let rest = Value::Object(map);
|
||||||
if is_dyn {
|
if is_dyn {
|
||||||
FilteredDeck::deserialize(rest)
|
FilteredDeckSchema11::deserialize(rest)
|
||||||
.map(Deck::Filtered)
|
.map(DeckSchema11::Filtered)
|
||||||
.map_err(de::Error::custom)
|
.map_err(de::Error::custom)
|
||||||
} else {
|
} else {
|
||||||
NormalDeck::deserialize(rest)
|
NormalDeckSchema11::deserialize(rest)
|
||||||
.map(Deck::Normal)
|
.map(DeckSchema11::Normal)
|
||||||
.map_err(de::Error::custom)
|
.map_err(de::Error::custom)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -71,7 +76,7 @@ mod dynfix {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
||||||
pub struct DeckCommon {
|
pub struct DeckCommonSchema11 {
|
||||||
#[serde(deserialize_with = "deserialize_number_from_string")]
|
#[serde(deserialize_with = "deserialize_number_from_string")]
|
||||||
pub(crate) id: DeckID,
|
pub(crate) id: DeckID,
|
||||||
#[serde(
|
#[serde(
|
||||||
|
@ -83,7 +88,7 @@ pub struct DeckCommon {
|
||||||
pub(crate) name: String,
|
pub(crate) name: String,
|
||||||
pub(crate) usn: Usn,
|
pub(crate) usn: Usn,
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub(crate) today: DeckToday,
|
pub(crate) today: DeckTodaySchema11,
|
||||||
collapsed: bool,
|
collapsed: bool,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
desc: String,
|
desc: String,
|
||||||
|
@ -95,9 +100,9 @@ pub struct DeckCommon {
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct NormalDeck {
|
pub struct NormalDeckSchema11 {
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub(crate) common: DeckCommon,
|
pub(crate) common: DeckCommonSchema11,
|
||||||
|
|
||||||
#[serde(deserialize_with = "deserialize_number_from_string")]
|
#[serde(deserialize_with = "deserialize_number_from_string")]
|
||||||
pub(crate) conf: i64,
|
pub(crate) conf: i64,
|
||||||
|
@ -109,13 +114,13 @@ pub struct NormalDeck {
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct FilteredDeck {
|
pub struct FilteredDeckSchema11 {
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
common: DeckCommon,
|
common: DeckCommonSchema11,
|
||||||
|
|
||||||
#[serde(deserialize_with = "deserialize_bool_from_anything")]
|
#[serde(deserialize_with = "deserialize_bool_from_anything")]
|
||||||
resched: bool,
|
resched: bool,
|
||||||
terms: Vec<FilteredSearch>,
|
terms: Vec<FilteredSearchTermSchema11>,
|
||||||
|
|
||||||
// unused, but older clients require its existence
|
// unused, but older clients require its existence
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
@ -127,50 +132,50 @@ pub struct FilteredDeck {
|
||||||
|
|
||||||
// new scheduler
|
// new scheduler
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
preview_delay: u16,
|
preview_delay: u32,
|
||||||
}
|
}
|
||||||
#[derive(Serialize, Deserialize, Debug, PartialEq, Default, Clone)]
|
#[derive(Serialize, Deserialize, Debug, PartialEq, Default, Clone)]
|
||||||
pub struct DeckToday {
|
pub struct DeckTodaySchema11 {
|
||||||
#[serde(rename = "lrnToday")]
|
#[serde(rename = "lrnToday")]
|
||||||
pub(crate) lrn: TodayAmount,
|
pub(crate) lrn: TodayAmountSchema11,
|
||||||
#[serde(rename = "revToday")]
|
#[serde(rename = "revToday")]
|
||||||
pub(crate) rev: TodayAmount,
|
pub(crate) rev: TodayAmountSchema11,
|
||||||
#[serde(rename = "newToday")]
|
#[serde(rename = "newToday")]
|
||||||
pub(crate) new: TodayAmount,
|
pub(crate) new: TodayAmountSchema11,
|
||||||
#[serde(rename = "timeToday")]
|
#[serde(rename = "timeToday")]
|
||||||
pub(crate) time: TodayAmount,
|
pub(crate) time: TodayAmountSchema11,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize_tuple, Deserialize, Debug, PartialEq, Default, Clone)]
|
#[derive(Serialize_tuple, Deserialize, Debug, PartialEq, Default, Clone)]
|
||||||
#[serde(from = "Vec<Value>")]
|
#[serde(from = "Vec<Value>")]
|
||||||
pub struct TodayAmount {
|
pub struct TodayAmountSchema11 {
|
||||||
day: i32,
|
day: i32,
|
||||||
amount: i32,
|
amount: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Vec<Value>> for TodayAmount {
|
impl From<Vec<Value>> for TodayAmountSchema11 {
|
||||||
fn from(mut v: Vec<Value>) -> Self {
|
fn from(mut v: Vec<Value>) -> Self {
|
||||||
let amt = v.pop().and_then(|v| v.as_i64()).unwrap_or(0);
|
let amt = v.pop().and_then(|v| v.as_i64()).unwrap_or(0);
|
||||||
let day = v.pop().and_then(|v| v.as_i64()).unwrap_or(0);
|
let day = v.pop().and_then(|v| v.as_i64()).unwrap_or(0);
|
||||||
TodayAmount {
|
TodayAmountSchema11 {
|
||||||
amount: amt as i32,
|
amount: amt as i32,
|
||||||
day: day as i32,
|
day: day as i32,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[derive(Serialize_tuple, Deserialize, Debug, PartialEq, Clone)]
|
#[derive(Serialize_tuple, Deserialize, Debug, PartialEq, Clone)]
|
||||||
pub struct FilteredSearch {
|
pub struct FilteredSearchTermSchema11 {
|
||||||
search: String,
|
search: String,
|
||||||
#[serde(deserialize_with = "deserialize_number_from_string")]
|
#[serde(deserialize_with = "deserialize_number_from_string")]
|
||||||
limit: i32,
|
limit: i32,
|
||||||
order: i8,
|
order: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Deck {
|
impl DeckSchema11 {
|
||||||
pub fn common(&self) -> &DeckCommon {
|
pub fn common(&self) -> &DeckCommonSchema11 {
|
||||||
match self {
|
match self {
|
||||||
Deck::Normal(d) => &d.common,
|
DeckSchema11::Normal(d) => &d.common,
|
||||||
Deck::Filtered(d) => &d.common,
|
DeckSchema11::Filtered(d) => &d.common,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -190,16 +195,16 @@ impl Deck {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for Deck {
|
impl Default for DeckSchema11 {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Deck::Normal(NormalDeck::default())
|
DeckSchema11::Normal(NormalDeckSchema11::default())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for NormalDeck {
|
impl Default for NormalDeckSchema11 {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
NormalDeck {
|
NormalDeckSchema11 {
|
||||||
common: DeckCommon {
|
common: DeckCommonSchema11 {
|
||||||
id: DeckID(0),
|
id: DeckID(0),
|
||||||
mtime: TimestampSecs(0),
|
mtime: TimestampSecs(0),
|
||||||
name: "".to_string(),
|
name: "".to_string(),
|
||||||
|
@ -216,3 +221,169 @@ impl Default for NormalDeck {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// schema 11 -> latest
|
||||||
|
|
||||||
|
impl From<DeckSchema11> for Deck {
|
||||||
|
fn from(deck: DeckSchema11) -> Self {
|
||||||
|
match deck {
|
||||||
|
DeckSchema11::Normal(d) => Deck {
|
||||||
|
id: d.common.id,
|
||||||
|
name: human_deck_name_to_native(&d.common.name),
|
||||||
|
mtime_secs: d.common.mtime,
|
||||||
|
usn: d.common.usn,
|
||||||
|
common: (&d.common).into(),
|
||||||
|
kind: DeckKind::Normal(d.into()),
|
||||||
|
},
|
||||||
|
DeckSchema11::Filtered(d) => Deck {
|
||||||
|
id: d.common.id,
|
||||||
|
name: human_deck_name_to_native(&d.common.name),
|
||||||
|
mtime_secs: d.common.mtime,
|
||||||
|
usn: d.common.usn,
|
||||||
|
common: (&d.common).into(),
|
||||||
|
kind: DeckKind::Filtered(d.into()),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&DeckCommonSchema11> for DeckCommon {
|
||||||
|
fn from(common: &DeckCommonSchema11) -> Self {
|
||||||
|
let other = if common.other.is_empty() {
|
||||||
|
vec![]
|
||||||
|
} else {
|
||||||
|
serde_json::to_vec(&common.other).unwrap_or_default()
|
||||||
|
};
|
||||||
|
DeckCommon {
|
||||||
|
collapsed: common.collapsed,
|
||||||
|
last_day_studied: common.today.new.day as u32,
|
||||||
|
new_studied: common.today.new.amount,
|
||||||
|
review_studied: common.today.rev.amount,
|
||||||
|
learning_studied: common.today.lrn.amount,
|
||||||
|
secs_studied: common.today.time.amount,
|
||||||
|
other,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<NormalDeckSchema11> for NormalDeck {
|
||||||
|
fn from(deck: NormalDeckSchema11) -> Self {
|
||||||
|
NormalDeck {
|
||||||
|
config_id: deck.conf,
|
||||||
|
extend_new: deck.extend_new.max(0) as u32,
|
||||||
|
extend_review: deck.extend_rev.max(0) as u32,
|
||||||
|
description: deck.common.desc,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<FilteredDeckSchema11> for FilteredDeck {
|
||||||
|
fn from(deck: FilteredDeckSchema11) -> Self {
|
||||||
|
FilteredDeck {
|
||||||
|
reschedule: deck.resched,
|
||||||
|
search_terms: deck.terms.into_iter().map(Into::into).collect(),
|
||||||
|
delays: deck.delays.unwrap_or_default(),
|
||||||
|
preview_delay: deck.preview_delay,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<FilteredSearchTermSchema11> for FilteredSearchTerm {
|
||||||
|
fn from(term: FilteredSearchTermSchema11) -> Self {
|
||||||
|
FilteredSearchTerm {
|
||||||
|
search: term.search,
|
||||||
|
limit: term.limit.max(0) as u32,
|
||||||
|
order: term.order,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// latest -> schema 11
|
||||||
|
|
||||||
|
impl From<Deck> for DeckSchema11 {
|
||||||
|
fn from(deck: Deck) -> Self {
|
||||||
|
match deck.kind {
|
||||||
|
DeckKind::Normal(ref norm) => DeckSchema11::Normal(NormalDeckSchema11 {
|
||||||
|
conf: norm.config_id,
|
||||||
|
extend_new: norm.extend_new as i32,
|
||||||
|
extend_rev: norm.extend_review as i32,
|
||||||
|
common: deck.into(),
|
||||||
|
}),
|
||||||
|
DeckKind::Filtered(ref filt) => DeckSchema11::Filtered(FilteredDeckSchema11 {
|
||||||
|
resched: filt.reschedule,
|
||||||
|
terms: filt.search_terms.iter().map(|v| v.clone().into()).collect(),
|
||||||
|
separate: true,
|
||||||
|
delays: if filt.delays.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(filt.delays.clone())
|
||||||
|
},
|
||||||
|
preview_delay: filt.preview_delay as u32,
|
||||||
|
common: deck.into(),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Deck> for DeckCommonSchema11 {
|
||||||
|
fn from(deck: Deck) -> Self {
|
||||||
|
let other: HashMap<String, Value> = if deck.common.other.is_empty() {
|
||||||
|
Default::default()
|
||||||
|
} else {
|
||||||
|
serde_json::from_slice(&deck.common.other).unwrap_or_default()
|
||||||
|
};
|
||||||
|
DeckCommonSchema11 {
|
||||||
|
id: deck.id,
|
||||||
|
mtime: deck.mtime_secs,
|
||||||
|
name: deck.name.replace("\x1f", "::"),
|
||||||
|
usn: deck.usn,
|
||||||
|
today: (&deck).into(),
|
||||||
|
collapsed: deck.common.collapsed,
|
||||||
|
dynamic: if matches!(deck.kind, DeckKind::Filtered(_)) {
|
||||||
|
1
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
},
|
||||||
|
desc: match deck.kind {
|
||||||
|
DeckKind::Normal(n) => n.description,
|
||||||
|
DeckKind::Filtered(_) => String::new(),
|
||||||
|
},
|
||||||
|
other,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Deck> for DeckTodaySchema11 {
|
||||||
|
fn from(deck: &Deck) -> Self {
|
||||||
|
let day = deck.common.last_day_studied as i32;
|
||||||
|
let c = &deck.common;
|
||||||
|
DeckTodaySchema11 {
|
||||||
|
lrn: TodayAmountSchema11 {
|
||||||
|
day,
|
||||||
|
amount: c.learning_studied,
|
||||||
|
},
|
||||||
|
rev: TodayAmountSchema11 {
|
||||||
|
day,
|
||||||
|
amount: c.review_studied,
|
||||||
|
},
|
||||||
|
new: TodayAmountSchema11 {
|
||||||
|
day,
|
||||||
|
amount: c.new_studied,
|
||||||
|
},
|
||||||
|
time: TodayAmountSchema11 {
|
||||||
|
day,
|
||||||
|
amount: c.secs_studied,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<FilteredSearchTerm> for FilteredSearchTermSchema11 {
|
||||||
|
fn from(term: FilteredSearchTerm) -> Self {
|
||||||
|
FilteredSearchTermSchema11 {
|
||||||
|
search: term.search,
|
||||||
|
limit: term.limit as i32,
|
||||||
|
order: term.order,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -48,6 +48,12 @@ pub enum AnkiError {
|
||||||
|
|
||||||
#[fail(display = "A requested item was not found.")]
|
#[fail(display = "A requested item was not found.")]
|
||||||
NotFound,
|
NotFound,
|
||||||
|
|
||||||
|
#[fail(display = "The provided item already exists.")]
|
||||||
|
Existing,
|
||||||
|
|
||||||
|
#[fail(display = "Unable to place item in/under a filtered deck.")]
|
||||||
|
DeckIsFiltered,
|
||||||
}
|
}
|
||||||
|
|
||||||
// error helpers
|
// error helpers
|
||||||
|
|
|
@ -220,6 +220,16 @@ impl Collection {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Remove a note. Cards must already have been deleted.
|
||||||
|
pub(crate) fn remove_note_only(&mut self, nid: NoteID, usn: Usn) -> Result<()> {
|
||||||
|
if let Some(_note) = self.storage.get_note(nid)? {
|
||||||
|
// fixme: undo
|
||||||
|
self.storage.remove_note(nid)?;
|
||||||
|
self.storage.add_note_grave(nid, usn)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -289,11 +289,8 @@ impl Collection {
|
||||||
|
|
||||||
/// If deck exists and and is a normal deck, return it.
|
/// If deck exists and and is a normal deck, return it.
|
||||||
fn deck_id_if_normal(&mut self, did: DeckID) -> Option<DeckID> {
|
fn deck_id_if_normal(&mut self, did: DeckID) -> Option<DeckID> {
|
||||||
// fixme: currently disabled until deck writes are immediate
|
|
||||||
return Some(did);
|
|
||||||
|
|
||||||
self.get_deck(did)
|
self.get_deck(did)
|
||||||
.ok()
|
.ok()
|
||||||
.and_then(|opt| opt.and_then(|d| if !d.is_filtered() { Some(d.id()) } else { None }))
|
.and_then(|opt| opt.and_then(|d| if !d.is_filtered() { Some(d.id) } else { None }))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -92,8 +92,7 @@ fn write_order(sql: &mut String, kind: &SortKind, reverse: bool) -> Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// In the future these items should be moved from JSON into separate SQL tables,
|
// fixme: use the new tables
|
||||||
// - for now we use a temporary deck to sort them.
|
|
||||||
fn prepare_sort(col: &mut Collection, kind: &SortKind) -> Result<()> {
|
fn prepare_sort(col: &mut Collection, kind: &SortKind) -> Result<()> {
|
||||||
use SortKind::*;
|
use SortKind::*;
|
||||||
match kind {
|
match kind {
|
||||||
|
@ -106,7 +105,7 @@ fn prepare_sort(col: &mut Collection, kind: &SortKind) -> Result<()> {
|
||||||
|
|
||||||
match kind {
|
match kind {
|
||||||
CardDeck => {
|
CardDeck => {
|
||||||
for (k, v) in col.storage.get_all_decks()? {
|
for (k, v) in col.storage.get_all_decks_as_schema11()? {
|
||||||
stmt.execute(params![k, v.name()])?;
|
stmt.execute(params![k, v.name()])?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -221,6 +221,7 @@ impl SqlWriter<'_> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// fixme: update for new table
|
||||||
fn write_deck(&mut self, deck: &str) -> Result<()> {
|
fn write_deck(&mut self, deck: &str) -> Result<()> {
|
||||||
match deck {
|
match deck {
|
||||||
"*" => write!(self.sql, "true").unwrap(),
|
"*" => write!(self.sql, "true").unwrap(),
|
||||||
|
@ -229,7 +230,7 @@ impl SqlWriter<'_> {
|
||||||
let all_decks: Vec<_> = self
|
let all_decks: Vec<_> = self
|
||||||
.col
|
.col
|
||||||
.storage
|
.storage
|
||||||
.get_all_decks()?
|
.get_all_decks_as_schema11()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(_, v)| v)
|
.map(|(_, v)| v)
|
||||||
.collect();
|
.collect();
|
||||||
|
|
|
@ -111,6 +111,13 @@ impl super::SqliteStorage {
|
||||||
card.id = CardID(self.db.last_insert_rowid());
|
card.id = CardID(self.db.last_insert_rowid());
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn remove_card(&self, cid: CardID) -> Result<()> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached("delete from cards where id = ?")?
|
||||||
|
.execute(&[cid])?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
1
rslib/src/storage/deck/add_deck.sql
Normal file
1
rslib/src/storage/deck/add_deck.sql
Normal file
|
@ -0,0 +1 @@
|
||||||
|
insert into decks
|
13
rslib/src/storage/deck/alloc_id.sql
Normal file
13
rslib/src/storage/deck/alloc_id.sql
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
select
|
||||||
|
case
|
||||||
|
when ?1 in (
|
||||||
|
select
|
||||||
|
id
|
||||||
|
from decks
|
||||||
|
) then (
|
||||||
|
select
|
||||||
|
max(id) + 1
|
||||||
|
from decks
|
||||||
|
)
|
||||||
|
else ?1
|
||||||
|
end;
|
8
rslib/src/storage/deck/get_deck.sql
Normal file
8
rslib/src/storage/deck/get_deck.sql
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
select
|
||||||
|
name,
|
||||||
|
id,
|
||||||
|
mtime_secs,
|
||||||
|
usn,
|
||||||
|
common,
|
||||||
|
kind
|
||||||
|
from decks
|
|
@ -3,29 +3,200 @@
|
||||||
|
|
||||||
use super::SqliteStorage;
|
use super::SqliteStorage;
|
||||||
use crate::{
|
use crate::{
|
||||||
decks::{Deck, DeckID},
|
card::CardID,
|
||||||
err::Result,
|
decks::{Deck, DeckCommon, DeckID, DeckKindProto, DeckSchema11},
|
||||||
|
err::{AnkiError, DBErrorKind, Result},
|
||||||
|
i18n::{I18n, TR},
|
||||||
|
timestamp::TimestampMillis,
|
||||||
};
|
};
|
||||||
use rusqlite::NO_PARAMS;
|
use prost::Message;
|
||||||
use std::collections::HashMap;
|
use rusqlite::{params, Row, NO_PARAMS};
|
||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use unicase::UniCase;
|
||||||
|
|
||||||
impl SqliteStorage {
|
fn row_to_deck(row: &Row) -> Result<Deck> {
|
||||||
pub(crate) fn get_all_decks(&self) -> Result<HashMap<DeckID, Deck>> {
|
let common = DeckCommon::decode(row.get_raw(4).as_blob()?)?;
|
||||||
self.db
|
let kind = DeckKindProto::decode(row.get_raw(5).as_blob()?)?;
|
||||||
.query_row_and_then("select decks from col", NO_PARAMS, |row| -> Result<_> {
|
let id = row.get(1)?;
|
||||||
Ok(serde_json::from_str(row.get_raw(0).as_str()?)?)
|
Ok(Deck {
|
||||||
|
id,
|
||||||
|
name: row.get(0)?,
|
||||||
|
mtime_secs: row.get(2)?,
|
||||||
|
usn: row.get(3)?,
|
||||||
|
common,
|
||||||
|
kind: kind.kind.ok_or_else(|| AnkiError::DBError {
|
||||||
|
kind: DBErrorKind::MissingEntity,
|
||||||
|
info: format!("invalid deck kind: {}", id),
|
||||||
|
})?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn set_all_decks(&self, decks: HashMap<DeckID, Deck>) -> Result<()> {
|
impl SqliteStorage {
|
||||||
|
pub(crate) fn get_all_decks_as_schema11(&self) -> Result<HashMap<DeckID, DeckSchema11>> {
|
||||||
|
self.get_all_decks()
|
||||||
|
.map(|r| r.into_iter().map(|d| (d.id, d.into())).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_deck(&self, did: DeckID) -> Result<Option<Deck>> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached(concat!(include_str!("get_deck.sql"), " where id = ?"))?
|
||||||
|
.query_and_then(&[did], row_to_deck)?
|
||||||
|
.next()
|
||||||
|
.transpose()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_all_decks(&self) -> Result<Vec<Deck>> {
|
||||||
|
self.db
|
||||||
|
.prepare(include_str!("get_deck.sql"))?
|
||||||
|
.query_and_then(NO_PARAMS, row_to_deck)?
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all deck names in human-readable form (::)
|
||||||
|
pub(crate) fn get_all_deck_names(&self) -> Result<Vec<(DeckID, String)>> {
|
||||||
|
self.db
|
||||||
|
.prepare("select name, id from decks")?
|
||||||
|
.query_and_then(NO_PARAMS, |row| {
|
||||||
|
Ok((row.get(1)?, row.get_raw(0).as_str()?.replace('\x1f', "::")))
|
||||||
|
})?
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_deck_id(&self, machine_name: &str) -> Result<Option<DeckID>> {
|
||||||
|
self.db
|
||||||
|
.prepare("select id from decks where name = ?")?
|
||||||
|
.query_and_then(&[machine_name], |row| row.get(0))?
|
||||||
|
.next()
|
||||||
|
.transpose()
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
// caller should ensure name unique
|
||||||
|
pub(crate) fn add_deck(&self, deck: &mut Deck) -> Result<()> {
|
||||||
|
assert!(deck.id.0 == 0);
|
||||||
|
deck.id.0 = self
|
||||||
|
.db
|
||||||
|
.prepare(include_str!("alloc_id.sql"))?
|
||||||
|
.query_row(&[TimestampMillis::now()], |r| r.get(0))?;
|
||||||
|
self.update_deck(deck).map_err(|err| {
|
||||||
|
// restore id of 0
|
||||||
|
deck.id.0 = 0;
|
||||||
|
err
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// fixme: bail instead of assert
|
||||||
|
pub(crate) fn update_deck(&self, deck: &Deck) -> Result<()> {
|
||||||
|
assert!(deck.id.0 != 0);
|
||||||
|
let mut stmt = self.db.prepare_cached(include_str!("update_deck.sql"))?;
|
||||||
|
let mut common = vec![];
|
||||||
|
deck.common.encode(&mut common)?;
|
||||||
|
let kind_enum = DeckKindProto {
|
||||||
|
kind: Some(deck.kind.clone()),
|
||||||
|
};
|
||||||
|
let mut kind = vec![];
|
||||||
|
kind_enum.encode(&mut kind)?;
|
||||||
|
stmt.execute(params![
|
||||||
|
deck.name,
|
||||||
|
deck.id,
|
||||||
|
deck.mtime_secs,
|
||||||
|
deck.usn,
|
||||||
|
common,
|
||||||
|
kind
|
||||||
|
])?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn remove_deck(&self, did: DeckID) -> Result<()> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached("delete from decks where id = ?")?
|
||||||
|
.execute(&[did])?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn all_cards_in_single_deck(&self, did: DeckID) -> Result<Vec<CardID>> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached("select id from cards where did = ?")?
|
||||||
|
.query_and_then(&[did], |r| r.get(0).map_err(Into::into))?
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn child_decks(&self, parent: &Deck) -> Result<Vec<Deck>> {
|
||||||
|
let prefix_start = format!("{}\x1f", parent.name);
|
||||||
|
let prefix_end = format!("{}\x20", parent.name);
|
||||||
|
self.db
|
||||||
|
.prepare_cached(concat!(
|
||||||
|
include_str!("get_deck.sql"),
|
||||||
|
" where name >= ? and name < ?"
|
||||||
|
))?
|
||||||
|
.query_and_then(&[prefix_start, prefix_end], row_to_deck)?
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upgrading/downgrading/legacy
|
||||||
|
|
||||||
|
// pub(crate) fn get_all_decks_as_schema11(&self) -> Result<HashMap<DeckID, DeckSchema11>> {
|
||||||
|
// let mut nts = HashMap::new();
|
||||||
|
// for (ntid, _name) in self.get_all_deck_names()? {
|
||||||
|
// let full = self.get_deck(ntid)?.unwrap();
|
||||||
|
// nts.insert(ntid, full.into());
|
||||||
|
// }
|
||||||
|
// Ok(nts)
|
||||||
|
// }
|
||||||
|
|
||||||
|
pub(super) fn add_default_deck(&self, i18n: &I18n) -> Result<()> {
|
||||||
|
let mut deck = Deck::new_normal();
|
||||||
|
deck.id.0 = 1;
|
||||||
|
// fixme: separate key
|
||||||
|
deck.name = i18n.tr(TR::DeckConfigDefaultName).into();
|
||||||
|
self.update_deck(&deck)
|
||||||
|
}
|
||||||
|
|
||||||
|
// fixme: make sure conflicting deck names don't break things
|
||||||
|
pub(crate) fn upgrade_decks_to_schema15(&self) -> Result<()> {
|
||||||
|
let decks = self.get_schema11_decks()?;
|
||||||
|
let mut names = HashSet::new();
|
||||||
|
for (_id, deck) in decks {
|
||||||
|
let mut deck = Deck::from(deck);
|
||||||
|
loop {
|
||||||
|
let name = UniCase::new(deck.name.clone());
|
||||||
|
if !names.contains(&name) {
|
||||||
|
names.insert(name);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
deck.name.push('_');
|
||||||
|
}
|
||||||
|
self.update_deck(&deck)?;
|
||||||
|
}
|
||||||
|
self.db.execute("update col set decks = ''", NO_PARAMS)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn downgrade_decks_from_schema15(&self) -> Result<()> {
|
||||||
|
let decks = self.get_all_decks_as_schema11()?;
|
||||||
|
self.set_schema11_decks(decks)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_schema11_decks(&self) -> Result<HashMap<DeckID, DeckSchema11>> {
|
||||||
|
let mut stmt = self.db.prepare("select decks from col")?;
|
||||||
|
let decks = stmt
|
||||||
|
.query_and_then(NO_PARAMS, |row| -> Result<HashMap<DeckID, DeckSchema11>> {
|
||||||
|
let v: HashMap<DeckID, DeckSchema11> =
|
||||||
|
serde_json::from_str(row.get_raw(0).as_str()?)?;
|
||||||
|
Ok(v)
|
||||||
|
})?
|
||||||
|
.next()
|
||||||
|
.ok_or_else(|| AnkiError::DBError {
|
||||||
|
info: "col table empty".to_string(),
|
||||||
|
kind: DBErrorKind::MissingEntity,
|
||||||
|
})??;
|
||||||
|
Ok(decks)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn set_schema11_decks(&self, decks: HashMap<DeckID, DeckSchema11>) -> Result<()> {
|
||||||
let json = serde_json::to_string(&decks)?;
|
let json = serde_json::to_string(&decks)?;
|
||||||
self.db.execute("update col set decks = ?", &[json])?;
|
self.db.execute("update col set decks = ?", &[json])?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_deck(&self, did: DeckID) -> Result<Option<Deck>> {
|
|
||||||
// fixme: this is just temporary until we create an extra table
|
|
||||||
let mut decks = self.get_all_decks()?;
|
|
||||||
Ok(decks.remove(&did))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
4
rslib/src/storage/deck/update_deck.sql
Normal file
4
rslib/src/storage/deck/update_deck.sql
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
insert
|
||||||
|
or replace into decks (name, id, mtime_secs, usn, common, kind)
|
||||||
|
values
|
||||||
|
(?, ?, ?, ?, ?, ?)
|
4
rslib/src/storage/graves/add.sql
Normal file
4
rslib/src/storage/graves/add.sql
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
insert
|
||||||
|
or ignore into graves (usn, oid, type)
|
||||||
|
values
|
||||||
|
(?, ?, ?)
|
39
rslib/src/storage/graves/mod.rs
Normal file
39
rslib/src/storage/graves/mod.rs
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use super::SqliteStorage;
|
||||||
|
use crate::{card::CardID, decks::DeckID, err::Result, notes::NoteID, types::Usn};
|
||||||
|
use rusqlite::{params, NO_PARAMS};
|
||||||
|
|
||||||
|
enum GraveKind {
|
||||||
|
Card,
|
||||||
|
Note,
|
||||||
|
Deck,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SqliteStorage {
|
||||||
|
fn add_grave(&self, oid: i64, kind: GraveKind, usn: Usn) -> Result<()> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached(include_str!("add.sql"))?
|
||||||
|
.execute(params![usn, oid, kind as u8])?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub(crate) fn clear_all_graves(&self) -> Result<()> {
|
||||||
|
self.db.execute("delete from graves", NO_PARAMS)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn add_card_grave(&self, cid: CardID, usn: Usn) -> Result<()> {
|
||||||
|
self.add_grave(cid.0, GraveKind::Card, usn)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn add_note_grave(&self, nid: NoteID, usn: Usn) -> Result<()> {
|
||||||
|
self.add_grave(nid.0, GraveKind::Note, usn)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn add_deck_grave(&self, did: DeckID, usn: Usn) -> Result<()> {
|
||||||
|
self.add_grave(did.0, GraveKind::Deck, usn)
|
||||||
|
}
|
||||||
|
}
|
|
@ -5,6 +5,7 @@ mod card;
|
||||||
mod config;
|
mod config;
|
||||||
mod deck;
|
mod deck;
|
||||||
mod deckconf;
|
mod deckconf;
|
||||||
|
mod graves;
|
||||||
mod note;
|
mod note;
|
||||||
mod notetype;
|
mod notetype;
|
||||||
mod sqlite;
|
mod sqlite;
|
||||||
|
|
5
rslib/src/storage/note/is_orphaned.sql
Normal file
5
rslib/src/storage/note/is_orphaned.sql
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
select
|
||||||
|
count(id) = 0
|
||||||
|
from cards
|
||||||
|
where
|
||||||
|
nid = ?;
|
|
@ -74,4 +74,18 @@ impl super::SqliteStorage {
|
||||||
note.id.0 = self.db.last_insert_rowid();
|
note.id.0 = self.db.last_insert_rowid();
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn remove_note(&self, nid: NoteID) -> Result<()> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached("delete from notes where id = ?")?
|
||||||
|
.execute(&[nid])?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn note_is_orphaned(&self, nid: NoteID) -> Result<bool> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached(include_str!("is_orphaned.sql"))?
|
||||||
|
.query_row(&[nid], |r| r.get(0))
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -198,6 +198,7 @@ impl SqliteStorage {
|
||||||
|
|
||||||
if create {
|
if create {
|
||||||
storage.add_default_deck_config(i18n)?;
|
storage.add_default_deck_config(i18n)?;
|
||||||
|
storage.add_default_deck(i18n)?;
|
||||||
storage.add_stock_notetypes(i18n)?;
|
storage.add_stock_notetypes(i18n)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,7 @@ impl SqliteStorage {
|
||||||
self.db
|
self.db
|
||||||
.execute_batch(include_str!("schema15_upgrade.sql"))?;
|
.execute_batch(include_str!("schema15_upgrade.sql"))?;
|
||||||
self.upgrade_notetypes_to_schema15()?;
|
self.upgrade_notetypes_to_schema15()?;
|
||||||
|
self.upgrade_decks_to_schema15()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -25,6 +26,7 @@ impl SqliteStorage {
|
||||||
pub(super) fn downgrade_to_schema_11(&self) -> Result<()> {
|
pub(super) fn downgrade_to_schema_11(&self) -> Result<()> {
|
||||||
self.begin_trx()?;
|
self.begin_trx()?;
|
||||||
|
|
||||||
|
self.downgrade_decks_from_schema15()?;
|
||||||
self.downgrade_notetypes_from_schema15()?;
|
self.downgrade_notetypes_from_schema15()?;
|
||||||
self.downgrade_config_from_schema14()?;
|
self.downgrade_config_from_schema14()?;
|
||||||
self.downgrade_tags_from_schema14()?;
|
self.downgrade_tags_from_schema14()?;
|
||||||
|
|
|
@ -4,6 +4,7 @@ drop table tags;
|
||||||
drop table fields;
|
drop table fields;
|
||||||
drop table templates;
|
drop table templates;
|
||||||
drop table notetypes;
|
drop table notetypes;
|
||||||
|
drop table decks;
|
||||||
update col
|
update col
|
||||||
set
|
set
|
||||||
ver = 11;
|
ver = 11;
|
|
@ -26,6 +26,15 @@ create table notetypes (
|
||||||
);
|
);
|
||||||
create unique index idx_notetypes_name on notetypes (name);
|
create unique index idx_notetypes_name on notetypes (name);
|
||||||
create index idx_notetypes_usn on notetypes (usn);
|
create index idx_notetypes_usn on notetypes (usn);
|
||||||
|
create table decks (
|
||||||
|
name text not null primary key collate unicase,
|
||||||
|
id integer not null,
|
||||||
|
mtime_secs integer not null,
|
||||||
|
usn integer not null,
|
||||||
|
common bytes not null,
|
||||||
|
kind bytes not null
|
||||||
|
);
|
||||||
|
create unique index idx_decks_id on decks (id);
|
||||||
update col
|
update col
|
||||||
set
|
set
|
||||||
ver = 15;
|
ver = 15;
|
||||||
|
|
Loading…
Reference in a new issue