mirror of
https://github.com/ankitects/anki.git
synced 2025-12-13 14:50:59 -05:00
store config in separate DB table
- mtime is tracked on each key individually, which will allow
merging of config changes when syncing in the future
- added col.(get|set|remove)_config()
- in order to support existing code that was mutating returned
values (eg col.conf["something"]["another"] = 5), the returned list/dict
will be automatically wrapped so that when the value is dropped, it
will save the mutated item back to the DB if it's changed. Code that
is fetching lists/dicts from the config like so:
col.conf["foo"]["bar"] = baz
col.setMod()
will continue to work in most case, but should be gradually updated to:
conf = col.get_config("foo")
conf["bar"] = baz
col.set_config("foo", conf)
This commit is contained in:
parent
8b76098bc7
commit
676f4e74a8
24 changed files with 539 additions and 125 deletions
|
|
@ -61,6 +61,10 @@ message BackendInput {
|
||||||
string canonify_tags = 49;
|
string canonify_tags = 49;
|
||||||
Empty all_tags = 50;
|
Empty all_tags = 50;
|
||||||
int32 get_changed_tags = 51;
|
int32 get_changed_tags = 51;
|
||||||
|
string get_config_json = 52;
|
||||||
|
SetConfigJson set_config_json = 53;
|
||||||
|
string set_all_config = 54;
|
||||||
|
Empty get_all_config = 55;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -105,6 +109,10 @@ message BackendOutput {
|
||||||
CanonifyTagsOut canonify_tags = 49;
|
CanonifyTagsOut canonify_tags = 49;
|
||||||
AllTagsOut all_tags = 50;
|
AllTagsOut all_tags = 50;
|
||||||
GetChangedTagsOut get_changed_tags = 51;
|
GetChangedTagsOut get_changed_tags = 51;
|
||||||
|
string get_config_json = 52;
|
||||||
|
Empty set_config_json = 53;
|
||||||
|
Empty set_all_config = 54;
|
||||||
|
string get_all_config = 55;
|
||||||
|
|
||||||
BackendError error = 2047;
|
BackendError error = 2047;
|
||||||
}
|
}
|
||||||
|
|
@ -454,3 +462,11 @@ message CanonifyTagsOut {
|
||||||
string tags = 1;
|
string tags = 1;
|
||||||
bool tag_list_changed = 2;
|
bool tag_list_changed = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message SetConfigJson {
|
||||||
|
string key = 1;
|
||||||
|
oneof op {
|
||||||
|
string val = 2;
|
||||||
|
Empty remove = 3;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,6 @@ from __future__ import annotations
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import pprint
|
import pprint
|
||||||
import random
|
import random
|
||||||
|
|
@ -22,6 +21,7 @@ import anki.latex # sets up hook
|
||||||
import anki.template
|
import anki.template
|
||||||
from anki import hooks
|
from anki import hooks
|
||||||
from anki.cards import Card
|
from anki.cards import Card
|
||||||
|
from anki.config import ConfigManager
|
||||||
from anki.consts import *
|
from anki.consts import *
|
||||||
from anki.dbproxy import DBProxy
|
from anki.dbproxy import DBProxy
|
||||||
from anki.decks import DeckManager
|
from anki.decks import DeckManager
|
||||||
|
|
@ -45,25 +45,6 @@ from anki.utils import (
|
||||||
stripHTMLMedia,
|
stripHTMLMedia,
|
||||||
)
|
)
|
||||||
|
|
||||||
defaultConf = {
|
|
||||||
# review options
|
|
||||||
"activeDecks": [1],
|
|
||||||
"curDeck": 1,
|
|
||||||
"newSpread": NEW_CARDS_DISTRIBUTE,
|
|
||||||
"collapseTime": 1200,
|
|
||||||
"timeLim": 0,
|
|
||||||
"estTimes": True,
|
|
||||||
"dueCounts": True,
|
|
||||||
# other config
|
|
||||||
"curModel": None,
|
|
||||||
"nextPos": 1,
|
|
||||||
"sortType": "noteFld",
|
|
||||||
"sortBackwards": False,
|
|
||||||
"addToCur": True, # add new to currently selected deck?
|
|
||||||
"dayLearnFirst": False,
|
|
||||||
"schedVer": 1,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# this is initialized by storage.Collection
|
# this is initialized by storage.Collection
|
||||||
class _Collection:
|
class _Collection:
|
||||||
|
|
@ -75,7 +56,6 @@ class _Collection:
|
||||||
dty: bool # no longer used
|
dty: bool # no longer used
|
||||||
_usn: int
|
_usn: int
|
||||||
ls: int
|
ls: int
|
||||||
conf: Dict[str, Any]
|
|
||||||
_undo: List[Any]
|
_undo: List[Any]
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
|
@ -97,6 +77,7 @@ class _Collection:
|
||||||
self.models = ModelManager(self)
|
self.models = ModelManager(self)
|
||||||
self.decks = DeckManager(self)
|
self.decks = DeckManager(self)
|
||||||
self.tags = TagManager(self)
|
self.tags = TagManager(self)
|
||||||
|
self.conf = ConfigManager(self)
|
||||||
self.load()
|
self.load()
|
||||||
if not self.crt:
|
if not self.crt:
|
||||||
d = datetime.datetime.today()
|
d = datetime.datetime.today()
|
||||||
|
|
@ -179,23 +160,21 @@ class _Collection:
|
||||||
self.dty, # no longer used
|
self.dty, # no longer used
|
||||||
self._usn,
|
self._usn,
|
||||||
self.ls,
|
self.ls,
|
||||||
conf,
|
|
||||||
models,
|
models,
|
||||||
decks,
|
decks,
|
||||||
) = self.db.first(
|
) = self.db.first(
|
||||||
"""
|
"""
|
||||||
select crt, mod, scm, dty, usn, ls,
|
select crt, mod, scm, dty, usn, ls,
|
||||||
conf, models, decks from col"""
|
models, decks from col"""
|
||||||
)
|
)
|
||||||
self.conf = json.loads(conf)
|
|
||||||
self.models.load(models)
|
self.models.load(models)
|
||||||
self.decks.load(decks)
|
self.decks.load(decks)
|
||||||
|
|
||||||
def setMod(self) -> None:
|
def setMod(self) -> None:
|
||||||
"""Mark DB modified.
|
"""Mark DB modified.
|
||||||
|
|
||||||
DB operations and the deck/tag/model managers do this automatically, so this
|
DB operations and the deck/model managers do this automatically, so this
|
||||||
is only necessary if you modify properties of this object or the conf dict."""
|
is only necessary if you modify properties of this object."""
|
||||||
self.db.mod = True
|
self.db.mod = True
|
||||||
|
|
||||||
def flush(self, mod: Optional[int] = None) -> None:
|
def flush(self, mod: Optional[int] = None) -> None:
|
||||||
|
|
@ -203,14 +182,13 @@ is only necessary if you modify properties of this object or the conf dict."""
|
||||||
self.mod = intTime(1000) if mod is None else mod
|
self.mod = intTime(1000) if mod is None else mod
|
||||||
self.db.execute(
|
self.db.execute(
|
||||||
"""update col set
|
"""update col set
|
||||||
crt=?, mod=?, scm=?, dty=?, usn=?, ls=?, conf=?""",
|
crt=?, mod=?, scm=?, dty=?, usn=?, ls=?""",
|
||||||
self.crt,
|
self.crt,
|
||||||
self.mod,
|
self.mod,
|
||||||
self.scm,
|
self.scm,
|
||||||
self.dty,
|
self.dty,
|
||||||
self._usn,
|
self._usn,
|
||||||
self.ls,
|
self.ls,
|
||||||
json.dumps(self.conf),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def flush_all_changes(self, mod: Optional[int] = None):
|
def flush_all_changes(self, mod: Optional[int] = None):
|
||||||
|
|
@ -651,6 +629,23 @@ where c.nid = n.id and c.id in %s group by nid"""
|
||||||
findCards = find_cards
|
findCards = find_cards
|
||||||
findNotes = find_notes
|
findNotes = find_notes
|
||||||
|
|
||||||
|
# Config
|
||||||
|
##########################################################################
|
||||||
|
|
||||||
|
def get_config(self, key: str, default: Any = None) -> Any:
|
||||||
|
try:
|
||||||
|
return self.conf.get_immutable(key)
|
||||||
|
except KeyError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
def set_config(self, key: str, val: Any):
|
||||||
|
self.setMod()
|
||||||
|
self.conf.set(key, val)
|
||||||
|
|
||||||
|
def remove_config(self, key):
|
||||||
|
self.setMod()
|
||||||
|
self.conf.remove(key)
|
||||||
|
|
||||||
# Stats
|
# Stats
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
|
|
|
||||||
121
pylib/anki/config.py
Normal file
121
pylib/anki/config.py
Normal file
|
|
@ -0,0 +1,121 @@
|
||||||
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
"""
|
||||||
|
Config handling
|
||||||
|
|
||||||
|
- To set a config value, use col.set_config(key, val).
|
||||||
|
- To get a config value, use col.get_config(key, default=None). In
|
||||||
|
the case of lists and dictionaries, any changes you make to the returned
|
||||||
|
value will not be saved unless you call set_config().
|
||||||
|
- To remove a config value, use col.remove_config(key).
|
||||||
|
|
||||||
|
For legacy reasons, the config is also exposed as a dict interface
|
||||||
|
as col.conf. To support old code that was mutating inner values,
|
||||||
|
using col.conf["key"] needs to wrap lists and dicts when returning them.
|
||||||
|
As this is less efficient, please use the col.*_config() API in new code.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import json
|
||||||
|
import weakref
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import anki
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigManager:
|
||||||
|
def __init__(self, col: anki.storage._Collection):
|
||||||
|
self.col = col.weakref()
|
||||||
|
|
||||||
|
def get_immutable(self, key: str) -> Any:
|
||||||
|
s = self.col.backend.get_config_json(key)
|
||||||
|
if not s:
|
||||||
|
raise KeyError
|
||||||
|
return json.loads(s)
|
||||||
|
|
||||||
|
def set(self, key: str, val: Any) -> None:
|
||||||
|
self.col.backend.set_config_json(key, val)
|
||||||
|
|
||||||
|
def remove(self, key: str) -> None:
|
||||||
|
self.col.backend.remove_config(key)
|
||||||
|
|
||||||
|
# Legacy dict interface
|
||||||
|
#########################
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
val = self.get_immutable(key)
|
||||||
|
if isinstance(val, list):
|
||||||
|
print(
|
||||||
|
f"conf key {key} should be fetched with col.get_config(), and saved with col.set_config()"
|
||||||
|
)
|
||||||
|
return WrappedList(weakref.ref(self), key, val)
|
||||||
|
elif isinstance(val, dict):
|
||||||
|
print(
|
||||||
|
f"conf key {key} should be fetched with col.get_config(), and saved with col.set_config()"
|
||||||
|
)
|
||||||
|
return WrappedDict(weakref.ref(self), key, val)
|
||||||
|
else:
|
||||||
|
return val
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
self.set(key, value)
|
||||||
|
|
||||||
|
def get(self, key, default=None):
|
||||||
|
try:
|
||||||
|
return self[key]
|
||||||
|
except KeyError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
def setdefault(self, key, default):
|
||||||
|
if key not in self:
|
||||||
|
self[key] = default
|
||||||
|
return self[key]
|
||||||
|
|
||||||
|
def __contains__(self, key):
|
||||||
|
try:
|
||||||
|
self.get_immutable(key)
|
||||||
|
return True
|
||||||
|
except KeyError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
self.remove(key)
|
||||||
|
|
||||||
|
|
||||||
|
# Tracking changes to mutable objects
|
||||||
|
#########################################
|
||||||
|
# Because we previously allowed mutation of the conf
|
||||||
|
# structure directly, to allow col.conf["foo"]["bar"] = xx
|
||||||
|
# to continue to function, we apply changes as the object
|
||||||
|
# is dropped.
|
||||||
|
|
||||||
|
|
||||||
|
class WrappedList(list):
|
||||||
|
def __init__(self, conf, key, val):
|
||||||
|
self.key = key
|
||||||
|
self.conf = conf
|
||||||
|
self.orig = copy.deepcopy(val)
|
||||||
|
super().__init__(val)
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
cur = list(self)
|
||||||
|
conf = self.conf()
|
||||||
|
if conf and self.orig != cur:
|
||||||
|
conf[self.key] = cur
|
||||||
|
|
||||||
|
|
||||||
|
class WrappedDict(dict):
|
||||||
|
def __init__(self, conf, key, val):
|
||||||
|
self.key = key
|
||||||
|
self.conf = conf
|
||||||
|
self.orig = copy.deepcopy(val)
|
||||||
|
super().__init__(val)
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
cur = dict(self)
|
||||||
|
conf = self.conf()
|
||||||
|
if conf and self.orig != cur:
|
||||||
|
conf[self.key] = cur
|
||||||
|
|
@ -524,8 +524,8 @@ class DeckManager:
|
||||||
#############################################################
|
#############################################################
|
||||||
|
|
||||||
def active(self) -> Any:
|
def active(self) -> Any:
|
||||||
"The currrently active dids. Make sure to copy before modifying."
|
"The currrently active dids."
|
||||||
return self.col.conf["activeDecks"]
|
return self.col.get_config("activeDecks", [1])
|
||||||
|
|
||||||
def selected(self) -> Any:
|
def selected(self) -> Any:
|
||||||
"The currently selected did."
|
"The currently selected did."
|
||||||
|
|
|
||||||
|
|
@ -577,6 +577,32 @@ class RustBackend:
|
||||||
).get_changed_tags.tags
|
).get_changed_tags.tags
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def get_config_json(self, key: str) -> str:
|
||||||
|
return self._run_command(pb.BackendInput(get_config_json=key)).get_config_json
|
||||||
|
|
||||||
|
def set_config_json(self, key: str, val: Any):
|
||||||
|
self._run_command(
|
||||||
|
pb.BackendInput(
|
||||||
|
set_config_json=pb.SetConfigJson(key=key, val=json.dumps(val))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def remove_config(self, key: str):
|
||||||
|
self._run_command(
|
||||||
|
pb.BackendInput(
|
||||||
|
set_config_json=pb.SetConfigJson(key=key, remove=pb.Empty())
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_all_config(self) -> Dict[str, Any]:
|
||||||
|
jstr = self._run_command(
|
||||||
|
pb.BackendInput(get_all_config=pb.Empty())
|
||||||
|
).get_all_config
|
||||||
|
return json.loads(jstr)
|
||||||
|
|
||||||
|
def set_all_config(self, conf: Dict[str, Any]):
|
||||||
|
self._run_command(pb.BackendInput(set_all_config=json.dumps(conf)))
|
||||||
|
|
||||||
|
|
||||||
def translate_string_in(
|
def translate_string_in(
|
||||||
key: TR, **kwargs: Union[str, int, float]
|
key: TR, **kwargs: Union[str, int, float]
|
||||||
|
|
|
||||||
|
|
@ -6,10 +6,9 @@ import json
|
||||||
import os
|
import os
|
||||||
import weakref
|
import weakref
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Dict, Optional, Tuple
|
from typing import Optional
|
||||||
|
|
||||||
from anki.collection import _Collection
|
from anki.collection import _Collection
|
||||||
from anki.consts import *
|
|
||||||
from anki.dbproxy import DBProxy
|
from anki.dbproxy import DBProxy
|
||||||
from anki.lang import _
|
from anki.lang import _
|
||||||
from anki.media import media_paths_from_col_path
|
from anki.media import media_paths_from_col_path
|
||||||
|
|
@ -74,26 +73,18 @@ def Collection(
|
||||||
|
|
||||||
|
|
||||||
def initial_db_setup(db: DBProxy) -> None:
|
def initial_db_setup(db: DBProxy) -> None:
|
||||||
db.begin()
|
|
||||||
_addColVars(db, *_getColVars(db))
|
|
||||||
|
|
||||||
|
|
||||||
def _getColVars(db: DBProxy) -> Tuple[Any, Dict[str, Any]]:
|
|
||||||
import anki.collection
|
|
||||||
import anki.decks
|
import anki.decks
|
||||||
|
|
||||||
|
db.begin()
|
||||||
|
|
||||||
g = copy.deepcopy(anki.decks.defaultDeck)
|
g = copy.deepcopy(anki.decks.defaultDeck)
|
||||||
g["id"] = 1
|
g["id"] = 1
|
||||||
g["name"] = _("Default")
|
g["name"] = _("Default")
|
||||||
g["conf"] = 1
|
g["conf"] = 1
|
||||||
g["mod"] = intTime()
|
g["mod"] = intTime()
|
||||||
return g, anki.collection.defaultConf.copy()
|
|
||||||
|
|
||||||
|
|
||||||
def _addColVars(db: DBProxy, g: Dict[str, Any], c: Dict[str, Any]) -> None:
|
|
||||||
db.execute(
|
db.execute(
|
||||||
"""
|
"""
|
||||||
update col set conf = ?, decks = ?""",
|
update col set decks = ?""",
|
||||||
json.dumps(c),
|
|
||||||
json.dumps({"1": g}),
|
json.dumps({"1": g}),
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -449,11 +449,11 @@ from notes where %s"""
|
||||||
# Col config
|
# Col config
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
def getConf(self) -> Any:
|
def getConf(self) -> Dict[str, Any]:
|
||||||
return self.col.conf
|
return self.col.backend.get_all_config()
|
||||||
|
|
||||||
def mergeConf(self, conf) -> None:
|
def mergeConf(self, conf: Dict[str, Any]) -> None:
|
||||||
self.col.conf = conf
|
self.col.backend.set_all_config(conf)
|
||||||
|
|
||||||
|
|
||||||
# HTTP syncing tools
|
# HTTP syncing tools
|
||||||
|
|
|
||||||
|
|
@ -80,7 +80,7 @@ class DataModel(QAbstractTableModel):
|
||||||
self.browser = browser
|
self.browser = browser
|
||||||
self.col = browser.col
|
self.col = browser.col
|
||||||
self.sortKey = None
|
self.sortKey = None
|
||||||
self.activeCols = self.col.conf.get(
|
self.activeCols = self.col.get_config(
|
||||||
"activeCols", ["noteFld", "template", "cardDue", "deck"]
|
"activeCols", ["noteFld", "template", "cardDue", "deck"]
|
||||||
)
|
)
|
||||||
self.cards: Sequence[int] = []
|
self.cards: Sequence[int] = []
|
||||||
|
|
@ -1121,7 +1121,7 @@ QTableView {{ gridline-color: {grid} }}
|
||||||
|
|
||||||
def _favTree(self, root) -> None:
|
def _favTree(self, root) -> None:
|
||||||
assert self.col
|
assert self.col
|
||||||
saved = self.col.conf.get("savedFilters", {})
|
saved = self.col.get_config("savedFilters", {})
|
||||||
for name, filt in sorted(saved.items()):
|
for name, filt in sorted(saved.items()):
|
||||||
item = SidebarItem(
|
item = SidebarItem(
|
||||||
name,
|
name,
|
||||||
|
|
@ -1360,7 +1360,7 @@ QTableView {{ gridline-color: {grid} }}
|
||||||
ml = MenuList()
|
ml = MenuList()
|
||||||
# make sure exists
|
# make sure exists
|
||||||
if "savedFilters" not in self.col.conf:
|
if "savedFilters" not in self.col.conf:
|
||||||
self.col.conf["savedFilters"] = {}
|
self.col.set_config("savedFilters", {})
|
||||||
|
|
||||||
ml.addSeparator()
|
ml.addSeparator()
|
||||||
|
|
||||||
|
|
@ -1369,7 +1369,7 @@ QTableView {{ gridline-color: {grid} }}
|
||||||
else:
|
else:
|
||||||
ml.addItem(_("Save Current Filter..."), self._onSaveFilter)
|
ml.addItem(_("Save Current Filter..."), self._onSaveFilter)
|
||||||
|
|
||||||
saved = self.col.conf["savedFilters"]
|
saved = self.col.get_config("savedFilters")
|
||||||
if not saved:
|
if not saved:
|
||||||
return ml
|
return ml
|
||||||
|
|
||||||
|
|
@ -1384,8 +1384,9 @@ QTableView {{ gridline-color: {grid} }}
|
||||||
if not name:
|
if not name:
|
||||||
return
|
return
|
||||||
filt = self.form.searchEdit.lineEdit().text()
|
filt = self.form.searchEdit.lineEdit().text()
|
||||||
self.col.conf["savedFilters"][name] = filt
|
conf = self.col.get_config("savedFilters")
|
||||||
self.col.setMod()
|
conf[name] = filt
|
||||||
|
self.col.save_config("savedFilters", conf)
|
||||||
self.maybeRefreshSidebar()
|
self.maybeRefreshSidebar()
|
||||||
|
|
||||||
def _onRemoveFilter(self):
|
def _onRemoveFilter(self):
|
||||||
|
|
@ -1399,7 +1400,7 @@ QTableView {{ gridline-color: {grid} }}
|
||||||
# returns name if found
|
# returns name if found
|
||||||
def _currentFilterIsSaved(self):
|
def _currentFilterIsSaved(self):
|
||||||
filt = self.form.searchEdit.lineEdit().text()
|
filt = self.form.searchEdit.lineEdit().text()
|
||||||
for k, v in self.col.conf["savedFilters"].items():
|
for k, v in self.col.get_config("savedFilters").items():
|
||||||
if filt == v:
|
if filt == v:
|
||||||
return k
|
return k
|
||||||
return None
|
return None
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
import aqt
|
import aqt
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,6 @@
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use crate::backend::dbproxy::db_command_bytes;
|
use crate::backend::dbproxy::db_command_bytes;
|
||||||
use crate::backend_proto::backend_input::Value;
|
|
||||||
use crate::backend_proto::{
|
use crate::backend_proto::{
|
||||||
AddOrUpdateDeckConfigIn, BuiltinSortKind, Empty, RenderedTemplateReplacement, SyncMediaIn,
|
AddOrUpdateDeckConfigIn, BuiltinSortKind, Empty, RenderedTemplateReplacement, SyncMediaIn,
|
||||||
};
|
};
|
||||||
|
|
@ -34,7 +33,9 @@ use crate::{backend_proto as pb, log};
|
||||||
use fluent::FluentValue;
|
use fluent::FluentValue;
|
||||||
use futures::future::{AbortHandle, Abortable};
|
use futures::future::{AbortHandle, Abortable};
|
||||||
use log::error;
|
use log::error;
|
||||||
|
use pb::backend_input::Value;
|
||||||
use prost::Message;
|
use prost::Message;
|
||||||
|
use serde_json::Value as JsonValue;
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
@ -291,6 +292,17 @@ impl Backend {
|
||||||
Value::AllTags(_) => OValue::AllTags(self.all_tags()?),
|
Value::AllTags(_) => OValue::AllTags(self.all_tags()?),
|
||||||
Value::RegisterTags(input) => OValue::RegisterTags(self.register_tags(input)?),
|
Value::RegisterTags(input) => OValue::RegisterTags(self.register_tags(input)?),
|
||||||
Value::GetChangedTags(usn) => OValue::GetChangedTags(self.get_changed_tags(usn)?),
|
Value::GetChangedTags(usn) => OValue::GetChangedTags(self.get_changed_tags(usn)?),
|
||||||
|
Value::GetConfigJson(key) => OValue::GetConfigJson(self.get_config_json(&key)?),
|
||||||
|
Value::SetConfigJson(input) => OValue::SetConfigJson({
|
||||||
|
self.set_config_json(input)?;
|
||||||
|
pb::Empty {}
|
||||||
|
}),
|
||||||
|
|
||||||
|
Value::SetAllConfig(input) => OValue::SetConfigJson({
|
||||||
|
self.set_all_config(&input)?;
|
||||||
|
pb::Empty {}
|
||||||
|
}),
|
||||||
|
Value::GetAllConfig(_) => OValue::GetAllConfig(self.get_all_config()?),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -400,8 +412,8 @@ impl Backend {
|
||||||
|
|
||||||
fn sched_timing_today(&self, input: pb::SchedTimingTodayIn) -> pb::SchedTimingTodayOut {
|
fn sched_timing_today(&self, input: pb::SchedTimingTodayIn) -> pb::SchedTimingTodayOut {
|
||||||
let today = sched_timing_today(
|
let today = sched_timing_today(
|
||||||
input.created_secs as i64,
|
TimestampSecs(input.created_secs),
|
||||||
input.now_secs as i64,
|
TimestampSecs(input.now_secs),
|
||||||
input.created_mins_west.map(|v| v.val),
|
input.created_mins_west.map(|v| v.val),
|
||||||
input.now_mins_west.map(|v| v.val),
|
input.now_mins_west.map(|v| v.val),
|
||||||
input.rollover_hour.map(|v| v.val as i8),
|
input.rollover_hour.map(|v| v.val as i8),
|
||||||
|
|
@ -783,6 +795,52 @@ impl Backend {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_config_json(&self, key: &str) -> Result<String> {
|
||||||
|
self.with_col(|col| {
|
||||||
|
let val: Option<JsonValue> = col.get_config_optional(key);
|
||||||
|
match val {
|
||||||
|
None => Ok("".to_string()),
|
||||||
|
Some(val) => Ok(serde_json::to_string(&val)?),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_config_json(&self, input: pb::SetConfigJson) -> Result<()> {
|
||||||
|
self.with_col(|col| {
|
||||||
|
col.transact(None, |col| {
|
||||||
|
if let Some(op) = input.op {
|
||||||
|
match op {
|
||||||
|
pb::set_config_json::Op::Val(val) => {
|
||||||
|
// ensure it's a well-formed object
|
||||||
|
let val: JsonValue = serde_json::from_str(&val)?;
|
||||||
|
col.set_config(&input.key, &val)
|
||||||
|
}
|
||||||
|
pb::set_config_json::Op::Remove(_) => col.remove_config(&input.key),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(AnkiError::invalid_input("no op received"))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_all_config(&self, conf: &str) -> Result<()> {
|
||||||
|
let val: HashMap<String, JsonValue> = serde_json::from_str(conf)?;
|
||||||
|
self.with_col(|col| {
|
||||||
|
col.transact(None, |col| {
|
||||||
|
col.storage
|
||||||
|
.set_all_config(val, col.usn()?, TimestampSecs::now())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_all_config(&self) -> Result<String> {
|
||||||
|
self.with_col(|col| {
|
||||||
|
let conf = col.storage.get_all_config()?;
|
||||||
|
serde_json::to_string(&conf).map_err(Into::into)
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn translate_arg_to_fluent_val(arg: &pb::TranslateArgValue) -> FluentValue {
|
fn translate_arg_to_fluent_val(arg: &pb::TranslateArgValue) -> FluentValue {
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,11 @@ use crate::i18n::I18n;
|
||||||
use crate::log::Logger;
|
use crate::log::Logger;
|
||||||
use crate::timestamp::TimestampSecs;
|
use crate::timestamp::TimestampSecs;
|
||||||
use crate::types::Usn;
|
use crate::types::Usn;
|
||||||
use crate::{sched::cutoff::SchedTimingToday, storage::SqliteStorage, undo::UndoManager};
|
use crate::{
|
||||||
|
sched::cutoff::{sched_timing_today, SchedTimingToday},
|
||||||
|
storage::SqliteStorage,
|
||||||
|
undo::UndoManager,
|
||||||
|
};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
pub fn open_collection<P: Into<PathBuf>>(
|
pub fn open_collection<P: Into<PathBuf>>(
|
||||||
|
|
@ -141,8 +145,24 @@ impl Collection {
|
||||||
return Ok(*timing);
|
return Ok(*timing);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.state.timing_today = Some(self.storage.timing_today(self.server)?);
|
|
||||||
Ok(self.state.timing_today.clone().unwrap())
|
let local_offset = if self.server {
|
||||||
|
self.get_local_mins_west()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let timing = sched_timing_today(
|
||||||
|
self.storage.creation_stamp()?,
|
||||||
|
TimestampSecs::now(),
|
||||||
|
self.get_creation_mins_west(),
|
||||||
|
local_offset,
|
||||||
|
self.get_rollover(),
|
||||||
|
);
|
||||||
|
|
||||||
|
self.state.timing_today = Some(timing);
|
||||||
|
|
||||||
|
Ok(timing)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn usn(&self) -> Result<Usn> {
|
pub(crate) fn usn(&self) -> Result<Usn> {
|
||||||
|
|
|
||||||
|
|
@ -1,26 +1,111 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use crate::collection::Collection;
|
||||||
use crate::decks::DeckID;
|
use crate::decks::DeckID;
|
||||||
use crate::serde::default_on_invalid;
|
use crate::err::Result;
|
||||||
use serde_aux::field_attributes::deserialize_number_from_string;
|
use crate::timestamp::TimestampSecs;
|
||||||
|
use serde::{de::DeserializeOwned, Serialize};
|
||||||
use serde_derive::Deserialize;
|
use serde_derive::Deserialize;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
pub(crate) fn schema11_config_as_string() -> String {
|
||||||
#[serde(rename_all = "camelCase")]
|
let obj = json!({
|
||||||
pub struct Config {
|
"activeDecks": [1],
|
||||||
#[serde(
|
"curDeck": 1,
|
||||||
rename = "curDeck",
|
"newSpread": 0,
|
||||||
deserialize_with = "deserialize_number_from_string"
|
"collapseTime": 1200,
|
||||||
)]
|
"timeLim": 0,
|
||||||
pub(crate) current_deck_id: DeckID,
|
"estTimes": true,
|
||||||
pub(crate) rollover: Option<i8>,
|
"dueCounts": true,
|
||||||
pub(crate) creation_offset: Option<i32>,
|
"curModel": null,
|
||||||
pub(crate) local_offset: Option<i32>,
|
"nextPos": 1,
|
||||||
#[serde(rename = "sortType", deserialize_with = "default_on_invalid")]
|
"sortType": "noteFld",
|
||||||
pub(crate) browser_sort_kind: SortKind,
|
"sortBackwards": false,
|
||||||
#[serde(rename = "sortBackwards", deserialize_with = "default_on_invalid")]
|
"addToCur": true,
|
||||||
pub(crate) browser_sort_reverse: bool,
|
"dayLearnFirst": false,
|
||||||
|
"schedVer": 1,
|
||||||
|
});
|
||||||
|
serde_json::to_string(&obj).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) enum ConfigKey {
|
||||||
|
BrowserSortKind,
|
||||||
|
BrowserSortReverse,
|
||||||
|
CurrentDeckID,
|
||||||
|
CreationOffset,
|
||||||
|
Rollover,
|
||||||
|
LocalOffset,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<ConfigKey> for &'static str {
|
||||||
|
fn from(c: ConfigKey) -> Self {
|
||||||
|
match c {
|
||||||
|
ConfigKey::BrowserSortKind => "sortType",
|
||||||
|
ConfigKey::BrowserSortReverse => "sortBackwards",
|
||||||
|
ConfigKey::CurrentDeckID => "curDeck",
|
||||||
|
ConfigKey::CreationOffset => "creationOffset",
|
||||||
|
ConfigKey::Rollover => "rollover",
|
||||||
|
ConfigKey::LocalOffset => "localOffset",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Collection {
|
||||||
|
/// Get config item, returning None if missing/invalid.
|
||||||
|
pub(crate) fn get_config_optional<'a, T, K>(&self, key: K) -> Option<T>
|
||||||
|
where
|
||||||
|
T: DeserializeOwned,
|
||||||
|
K: Into<&'a str>,
|
||||||
|
{
|
||||||
|
match self.storage.get_config_value(key.into()) {
|
||||||
|
Ok(Some(val)) => val,
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// /// Get config item, returning default value if missing/invalid.
|
||||||
|
pub(crate) fn get_config_default<T, K>(&self, key: K) -> T
|
||||||
|
where
|
||||||
|
T: DeserializeOwned + Default,
|
||||||
|
K: Into<&'static str>,
|
||||||
|
{
|
||||||
|
self.get_config_optional(key).unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn set_config<T: Serialize>(&self, key: &str, val: &T) -> Result<()> {
|
||||||
|
self.storage
|
||||||
|
.set_config_value(key, val, self.usn()?, TimestampSecs::now())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn remove_config(&self, key: &str) -> Result<()> {
|
||||||
|
self.storage.remove_config(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_browser_sort_kind(&self) -> SortKind {
|
||||||
|
self.get_config_default(ConfigKey::BrowserSortKind)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_browser_sort_reverse(&self) -> bool {
|
||||||
|
self.get_config_default(ConfigKey::BrowserSortReverse)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_current_deck_id(&self) -> DeckID {
|
||||||
|
self.get_config_optional(ConfigKey::CurrentDeckID)
|
||||||
|
.unwrap_or(DeckID(1))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_creation_mins_west(&self) -> Option<i32> {
|
||||||
|
self.get_config_optional(ConfigKey::CreationOffset)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_local_mins_west(&self) -> Option<i32> {
|
||||||
|
self.get_config_optional(ConfigKey::LocalOffset)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_rollover(&self) -> Option<i8> {
|
||||||
|
self.get_config_optional(ConfigKey::Rollover)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, PartialEq, Debug)]
|
#[derive(Deserialize, PartialEq, Debug)]
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use crate::timestamp::TimestampSecs;
|
||||||
use chrono::{Date, Duration, FixedOffset, Local, TimeZone};
|
use chrono::{Date, Duration, FixedOffset, Local, TimeZone};
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone, Copy)]
|
#[derive(Debug, PartialEq, Clone, Copy)]
|
||||||
|
|
@ -138,25 +139,25 @@ fn sched_timing_today_v2_legacy(
|
||||||
|
|
||||||
/// Based on provided input, get timing info from the relevant function.
|
/// Based on provided input, get timing info from the relevant function.
|
||||||
pub(crate) fn sched_timing_today(
|
pub(crate) fn sched_timing_today(
|
||||||
created_secs: i64,
|
created_secs: TimestampSecs,
|
||||||
now_secs: i64,
|
now_secs: TimestampSecs,
|
||||||
created_mins_west: Option<i32>,
|
created_mins_west: Option<i32>,
|
||||||
now_mins_west: Option<i32>,
|
now_mins_west: Option<i32>,
|
||||||
rollover_hour: Option<i8>,
|
rollover_hour: Option<i8>,
|
||||||
) -> SchedTimingToday {
|
) -> SchedTimingToday {
|
||||||
let now_west = now_mins_west.unwrap_or_else(|| local_minutes_west_for_stamp(now_secs));
|
let now_west = now_mins_west.unwrap_or_else(|| local_minutes_west_for_stamp(now_secs.0));
|
||||||
match (rollover_hour, created_mins_west) {
|
match (rollover_hour, created_mins_west) {
|
||||||
(None, _) => {
|
(None, _) => {
|
||||||
// if rollover unset, v1 scheduler
|
// if rollover unset, v1 scheduler
|
||||||
sched_timing_today_v1(created_secs, now_secs)
|
sched_timing_today_v1(created_secs.0, now_secs.0)
|
||||||
}
|
}
|
||||||
(Some(roll), None) => {
|
(Some(roll), None) => {
|
||||||
// if creationOffset unset, v2 scheduler with legacy cutoff handling
|
// if creationOffset unset, v2 scheduler with legacy cutoff handling
|
||||||
sched_timing_today_v2_legacy(created_secs, roll, now_secs, now_west)
|
sched_timing_today_v2_legacy(created_secs.0, roll, now_secs.0, now_west)
|
||||||
}
|
}
|
||||||
(Some(roll), Some(crt_west)) => {
|
(Some(roll), Some(crt_west)) => {
|
||||||
// v2 scheduler, new cutoff handling
|
// v2 scheduler, new cutoff handling
|
||||||
sched_timing_today_v2_new(created_secs, crt_west, now_secs, now_west, roll)
|
sched_timing_today_v2_new(created_secs.0, crt_west, now_secs.0, now_west, roll)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -33,10 +33,10 @@ pub(crate) fn search_cards<'a, 'b>(
|
||||||
match order {
|
match order {
|
||||||
SortMode::NoOrder => (),
|
SortMode::NoOrder => (),
|
||||||
SortMode::FromConfig => {
|
SortMode::FromConfig => {
|
||||||
let conf = req.storage.all_config()?;
|
let kind = req.get_browser_sort_kind();
|
||||||
prepare_sort(req, &conf.browser_sort_kind)?;
|
prepare_sort(req, &kind)?;
|
||||||
sql.push_str(" order by ");
|
sql.push_str(" order by ");
|
||||||
write_order(&mut sql, &conf.browser_sort_kind, conf.browser_sort_reverse)?;
|
write_order(&mut sql, &kind, req.get_browser_sort_reverse())?;
|
||||||
}
|
}
|
||||||
SortMode::Builtin { kind, reverse } => {
|
SortMode::Builtin { kind, reverse } => {
|
||||||
prepare_sort(req, &kind)?;
|
prepare_sort(req, &kind)?;
|
||||||
|
|
|
||||||
|
|
@ -231,9 +231,9 @@ impl SqlWriter<'_> {
|
||||||
.map(|(_, v)| v)
|
.map(|(_, v)| v)
|
||||||
.collect();
|
.collect();
|
||||||
let dids_with_children = if deck == "current" {
|
let dids_with_children = if deck == "current" {
|
||||||
let config = self.col.storage.all_config()?;
|
let current_id = self.col.get_current_deck_id();
|
||||||
let mut dids_with_children = vec![config.current_deck_id];
|
let mut dids_with_children = vec![current_id];
|
||||||
let current = get_deck(&all_decks, config.current_deck_id)
|
let current = get_deck(&all_decks, current_id)
|
||||||
.ok_or_else(|| AnkiError::invalid_input("invalid current deck"))?;
|
.ok_or_else(|| AnkiError::invalid_input("invalid current deck"))?;
|
||||||
for child_did in child_ids(&all_decks, ¤t.name) {
|
for child_did in child_ids(&all_decks, ¤t.name) {
|
||||||
dids_with_children.push(child_did);
|
dids_with_children.push(child_did);
|
||||||
|
|
|
||||||
4
rslib/src/storage/config/add.sql
Normal file
4
rslib/src/storage/config/add.sql
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
insert
|
||||||
|
or replace into config (key, usn, mtime_secs, val)
|
||||||
|
values
|
||||||
|
(?, ?, ?, ?)
|
||||||
5
rslib/src/storage/config/get.sql
Normal file
5
rslib/src/storage/config/get.sql
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
select
|
||||||
|
val
|
||||||
|
from config
|
||||||
|
where
|
||||||
|
key = ?
|
||||||
88
rslib/src/storage/config/mod.rs
Normal file
88
rslib/src/storage/config/mod.rs
Normal file
|
|
@ -0,0 +1,88 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use super::SqliteStorage;
|
||||||
|
use crate::{err::Result, timestamp::TimestampSecs, types::Usn};
|
||||||
|
use rusqlite::{params, NO_PARAMS};
|
||||||
|
use serde::{de::DeserializeOwned, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
impl SqliteStorage {
|
||||||
|
pub(crate) fn set_config_value<T: Serialize>(
|
||||||
|
&self,
|
||||||
|
key: &str,
|
||||||
|
val: &T,
|
||||||
|
usn: Usn,
|
||||||
|
mtime: TimestampSecs,
|
||||||
|
) -> Result<()> {
|
||||||
|
let json = serde_json::to_string(val)?;
|
||||||
|
self.db
|
||||||
|
.prepare_cached(include_str!("add.sql"))?
|
||||||
|
.execute(params![key, usn, mtime, json])?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn remove_config(&self, key: &str) -> Result<()> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached("delete from config where key=?")?
|
||||||
|
.execute(&[key])?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_config_value<T: DeserializeOwned>(&self, key: &str) -> Result<Option<T>> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached(include_str!("get.sql"))?
|
||||||
|
.query_and_then(&[key], |row| {
|
||||||
|
serde_json::from_str(row.get_raw(0).as_str()?).map_err(Into::into)
|
||||||
|
})?
|
||||||
|
.next()
|
||||||
|
.transpose()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_all_config(&self) -> Result<HashMap<String, Value>> {
|
||||||
|
self.db
|
||||||
|
.prepare("select key, val from config")?
|
||||||
|
.query_and_then(NO_PARAMS, |row| {
|
||||||
|
let val: Value = serde_json::from_str(row.get_raw(1).as_str()?)?;
|
||||||
|
Ok((row.get::<usize, String>(0)?, val))
|
||||||
|
})?
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn set_all_config(
|
||||||
|
&self,
|
||||||
|
conf: HashMap<String, Value>,
|
||||||
|
usn: Usn,
|
||||||
|
mtime: TimestampSecs,
|
||||||
|
) -> Result<()> {
|
||||||
|
self.db.execute("delete from config", NO_PARAMS)?;
|
||||||
|
for (key, val) in conf.iter() {
|
||||||
|
self.set_config_value(key, val, usn, mtime)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upgrading/downgrading
|
||||||
|
|
||||||
|
pub(super) fn upgrade_config_to_schema14(&self) -> Result<()> {
|
||||||
|
let conf = self
|
||||||
|
.db
|
||||||
|
.query_row_and_then("select conf from col", NO_PARAMS, |row| {
|
||||||
|
let conf: Result<HashMap<String, Value>> =
|
||||||
|
serde_json::from_str(row.get_raw(0).as_str()?).map_err(Into::into);
|
||||||
|
conf
|
||||||
|
})?;
|
||||||
|
self.set_all_config(conf, Usn(0), TimestampSecs(0))?;
|
||||||
|
self.db.execute_batch("update col set conf=''")?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn downgrade_config_from_schema14(&self) -> Result<()> {
|
||||||
|
let allconf = self.get_all_config()?;
|
||||||
|
self.db
|
||||||
|
.execute("update col set conf=?", &[serde_json::to_string(&allconf)?])?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -2,6 +2,7 @@
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
mod card;
|
mod card;
|
||||||
|
mod config;
|
||||||
mod deckconf;
|
mod deckconf;
|
||||||
mod sqlite;
|
mod sqlite;
|
||||||
mod tag;
|
mod tag;
|
||||||
|
|
|
||||||
|
|
@ -1,20 +1,13 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use crate::config::Config;
|
use crate::config::schema11_config_as_string;
|
||||||
use crate::decks::DeckID;
|
use crate::decks::DeckID;
|
||||||
use crate::err::Result;
|
use crate::err::Result;
|
||||||
use crate::err::{AnkiError, DBErrorKind};
|
use crate::err::{AnkiError, DBErrorKind};
|
||||||
use crate::notetypes::NoteTypeID;
|
use crate::notetypes::NoteTypeID;
|
||||||
use crate::timestamp::{TimestampMillis, TimestampSecs};
|
use crate::timestamp::{TimestampMillis, TimestampSecs};
|
||||||
use crate::{
|
use crate::{decks::Deck, i18n::I18n, notetypes::NoteType, text::without_combining, types::Usn};
|
||||||
decks::Deck,
|
|
||||||
i18n::I18n,
|
|
||||||
notetypes::NoteType,
|
|
||||||
sched::cutoff::{sched_timing_today, SchedTimingToday},
|
|
||||||
text::without_combining,
|
|
||||||
types::Usn,
|
|
||||||
};
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use rusqlite::{functions::FunctionFlags, params, Connection, NO_PARAMS};
|
use rusqlite::{functions::FunctionFlags, params, Connection, NO_PARAMS};
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
|
|
@ -23,7 +16,7 @@ use unicase::UniCase;
|
||||||
|
|
||||||
const SCHEMA_MIN_VERSION: u8 = 11;
|
const SCHEMA_MIN_VERSION: u8 = 11;
|
||||||
const SCHEMA_STARTING_VERSION: u8 = 11;
|
const SCHEMA_STARTING_VERSION: u8 = 11;
|
||||||
const SCHEMA_MAX_VERSION: u8 = 13;
|
const SCHEMA_MAX_VERSION: u8 = 14;
|
||||||
|
|
||||||
fn unicase_compare(s1: &str, s2: &str) -> Ordering {
|
fn unicase_compare(s1: &str, s2: &str) -> Ordering {
|
||||||
UniCase::new(s1).cmp(&UniCase::new(s2))
|
UniCase::new(s1).cmp(&UniCase::new(s2))
|
||||||
|
|
@ -182,8 +175,12 @@ impl SqliteStorage {
|
||||||
db.execute_batch(include_str!("schema11.sql"))?;
|
db.execute_batch(include_str!("schema11.sql"))?;
|
||||||
// start at schema 11, then upgrade below
|
// start at schema 11, then upgrade below
|
||||||
db.execute(
|
db.execute(
|
||||||
"update col set crt=?, ver=?",
|
"update col set crt=?, ver=?, conf=?",
|
||||||
params![TimestampSecs::now(), SCHEMA_STARTING_VERSION],
|
params![
|
||||||
|
TimestampSecs::now(),
|
||||||
|
SCHEMA_STARTING_VERSION,
|
||||||
|
&schema11_config_as_string()
|
||||||
|
],
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -290,13 +287,6 @@ impl SqliteStorage {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn all_config(&self) -> Result<Config> {
|
|
||||||
self.db
|
|
||||||
.query_row_and_then("select conf from col", NO_PARAMS, |row| -> Result<_> {
|
|
||||||
Ok(serde_json::from_str(row.get_raw(0).as_str()?)?)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn all_note_types(&self) -> Result<HashMap<NoteTypeID, NoteType>> {
|
pub(crate) fn all_note_types(&self) -> Result<HashMap<NoteTypeID, NoteType>> {
|
||||||
let mut stmt = self.db.prepare("select models from col")?;
|
let mut stmt = self.db.prepare("select models from col")?;
|
||||||
let note_types = stmt
|
let note_types = stmt
|
||||||
|
|
@ -313,21 +303,11 @@ impl SqliteStorage {
|
||||||
Ok(note_types)
|
Ok(note_types)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn timing_today(&self, server: bool) -> Result<SchedTimingToday> {
|
pub(crate) fn creation_stamp(&self) -> Result<TimestampSecs> {
|
||||||
let crt: i64 = self
|
self.db
|
||||||
.db
|
|
||||||
.prepare_cached("select crt from col")?
|
.prepare_cached("select crt from col")?
|
||||||
.query_row(NO_PARAMS, |row| row.get(0))?;
|
.query_row(NO_PARAMS, |row| row.get(0))
|
||||||
let conf = self.all_config()?;
|
.map_err(Into::into)
|
||||||
let now_offset = if server { conf.local_offset } else { None };
|
|
||||||
|
|
||||||
Ok(sched_timing_today(
|
|
||||||
crt,
|
|
||||||
TimestampSecs::now().0,
|
|
||||||
conf.creation_offset,
|
|
||||||
now_offset,
|
|
||||||
conf.rollover,
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn schema_modified(&self) -> Result<bool> {
|
pub(crate) fn schema_modified(&self) -> Result<bool> {
|
||||||
|
|
|
||||||
|
|
@ -58,7 +58,7 @@ impl SqliteStorage {
|
||||||
|
|
||||||
// Upgrading/downgrading
|
// Upgrading/downgrading
|
||||||
|
|
||||||
pub(super) fn upgrade_tags_to_schema12(&self) -> Result<()> {
|
pub(super) fn upgrade_tags_to_schema13(&self) -> Result<()> {
|
||||||
let tags = self
|
let tags = self
|
||||||
.db
|
.db
|
||||||
.query_row_and_then("select tags from col", NO_PARAMS, |row| {
|
.query_row_and_then("select tags from col", NO_PARAMS, |row| {
|
||||||
|
|
|
||||||
|
|
@ -14,14 +14,24 @@ impl SqliteStorage {
|
||||||
if ver < 13 {
|
if ver < 13 {
|
||||||
self.db
|
self.db
|
||||||
.execute_batch(include_str!("schema13_upgrade.sql"))?;
|
.execute_batch(include_str!("schema13_upgrade.sql"))?;
|
||||||
self.upgrade_tags_to_schema12()?;
|
self.upgrade_tags_to_schema13()?;
|
||||||
}
|
}
|
||||||
|
if ver < 14 {
|
||||||
|
self.db
|
||||||
|
.execute_batch(include_str!("schema14_upgrade.sql"))?;
|
||||||
|
self.upgrade_config_to_schema14()?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn downgrade_to_schema_11(&self) -> Result<()> {
|
pub(super) fn downgrade_to_schema_11(&self) -> Result<()> {
|
||||||
self.begin_trx()?;
|
self.begin_trx()?;
|
||||||
|
|
||||||
|
self.downgrade_config_from_schema14()?;
|
||||||
|
self.db
|
||||||
|
.execute_batch(include_str!("schema14_downgrade.sql"))?;
|
||||||
|
|
||||||
self.downgrade_tags_from_schema13()?;
|
self.downgrade_tags_from_schema13()?;
|
||||||
self.db
|
self.db
|
||||||
.execute_batch(include_str!("schema13_downgrade.sql"))?;
|
.execute_batch(include_str!("schema13_downgrade.sql"))?;
|
||||||
|
|
|
||||||
4
rslib/src/storage/upgrades/schema14_downgrade.sql
Normal file
4
rslib/src/storage/upgrades/schema14_downgrade.sql
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
drop table config;
|
||||||
|
update col
|
||||||
|
set
|
||||||
|
ver = 13;
|
||||||
9
rslib/src/storage/upgrades/schema14_upgrade.sql
Normal file
9
rslib/src/storage/upgrades/schema14_upgrade.sql
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
create table config (
|
||||||
|
key text not null primary key,
|
||||||
|
usn integer not null,
|
||||||
|
mtime_secs integer not null,
|
||||||
|
val text not null
|
||||||
|
) without rowid;
|
||||||
|
update col
|
||||||
|
set
|
||||||
|
ver = 14;
|
||||||
Loading…
Reference in a new issue