mirror of
https://github.com/ankitects/anki.git
synced 2025-12-17 08:40:57 -05:00
PEP8 pylib (#1443)
* PEP8 scheduler/base.py * PEP8 _backend/__init__.py * PEP8 _backend/genbackend.py * PEP8 _backend/genfluent.py * PEP8 scheduler/__init__.py * PEP8 __init__.py * PEP8 _legacy.py * PEP8 syncserver/__init__.py - Make 'ip' a good name - Overrule `global col` being identified as a constant * PEP8 syncserver/__main__.py * PEP8 buildinfo.py * Implement `DeprecatedNamesMixin` for modules * PEP8 browser.py * PEP8 config.py * PEP8 consts.py * PEP8 db.py * Format * Improve AttributeError for DeprecatedNamesMixin * print the line that imported/referenced the legacy module attr (dae) * DeprecatedNamesMixinStandalone -> ...ForModule
This commit is contained in:
parent
61f3b71664
commit
d665dbc9a7
28 changed files with 231 additions and 146 deletions
|
|
@ -57,3 +57,4 @@ good-names =
|
||||||
tr,
|
tr,
|
||||||
db,
|
db,
|
||||||
ok,
|
ok,
|
||||||
|
ip,
|
||||||
|
|
@ -1,2 +1,4 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
@ -95,8 +97,8 @@ class RustBackend(RustBackendGenerated):
|
||||||
bytes_input = to_json_bytes(input)
|
bytes_input = to_json_bytes(input)
|
||||||
try:
|
try:
|
||||||
return from_json_bytes(self._backend.db_command(bytes_input))
|
return from_json_bytes(self._backend.db_command(bytes_input))
|
||||||
except Exception as e:
|
except Exception as error:
|
||||||
err_bytes = e.args[0]
|
err_bytes = error.args[0]
|
||||||
err = backend_pb2.BackendError()
|
err = backend_pb2.BackendError()
|
||||||
err.ParseFromString(err_bytes)
|
err.ParseFromString(err_bytes)
|
||||||
raise backend_exception_to_pylib(err)
|
raise backend_exception_to_pylib(err)
|
||||||
|
|
@ -125,8 +127,8 @@ class RustBackend(RustBackendGenerated):
|
||||||
input_bytes = input.SerializeToString()
|
input_bytes = input.SerializeToString()
|
||||||
try:
|
try:
|
||||||
return self._backend.command(service, method, input_bytes)
|
return self._backend.command(service, method, input_bytes)
|
||||||
except Exception as e:
|
except Exception as error:
|
||||||
err_bytes = bytes(e.args[0])
|
err_bytes = bytes(error.args[0])
|
||||||
err = backend_pb2.BackendError()
|
err = backend_pb2.BackendError()
|
||||||
err.ParseFromString(err_bytes)
|
err.ParseFromString(err_bytes)
|
||||||
raise backend_exception_to_pylib(err)
|
raise backend_exception_to_pylib(err)
|
||||||
|
|
@ -135,12 +137,12 @@ class RustBackend(RustBackendGenerated):
|
||||||
def translate_string_in(
|
def translate_string_in(
|
||||||
module_index: int, message_index: int, **kwargs: str | int | float
|
module_index: int, message_index: int, **kwargs: str | int | float
|
||||||
) -> i18n_pb2.TranslateStringRequest:
|
) -> i18n_pb2.TranslateStringRequest:
|
||||||
args = {}
|
args = {
|
||||||
for (k, v) in kwargs.items():
|
k: i18n_pb2.TranslateArgValue(str=v)
|
||||||
if isinstance(v, str):
|
if isinstance(v, str)
|
||||||
args[k] = i18n_pb2.TranslateArgValue(str=v)
|
else i18n_pb2.TranslateArgValue(number=v)
|
||||||
else:
|
for k, v in kwargs.items()
|
||||||
args[k] = i18n_pb2.TranslateArgValue(number=v)
|
}
|
||||||
return i18n_pb2.TranslateStringRequest(
|
return i18n_pb2.TranslateStringRequest(
|
||||||
module_index=module_index, message_index=message_index, args=args
|
module_index=module_index, message_index=message_index, args=args
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,8 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
from typing import List, Literal, TypedDict
|
from typing import List, Literal, TypedDict
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
|
|
@ -33,8 +35,8 @@ def print_deprecation_warning(msg: str, frame: int = 2) -> None:
|
||||||
print(f"{path}:{linenum}:{msg}")
|
print(f"{path}:{linenum}:{msg}")
|
||||||
|
|
||||||
|
|
||||||
def _print_warning(old: str, doc: str) -> None:
|
def _print_warning(old: str, doc: str, frame: int = 1) -> None:
|
||||||
return print_deprecation_warning(f"{old} is deprecated: {doc}", frame=1)
|
return print_deprecation_warning(f"{old} is deprecated: {doc}", frame=frame)
|
||||||
|
|
||||||
|
|
||||||
class DeprecatedNamesMixin:
|
class DeprecatedNamesMixin:
|
||||||
|
|
@ -50,19 +52,26 @@ class DeprecatedNamesMixin:
|
||||||
|
|
||||||
@no_type_check
|
@no_type_check
|
||||||
def __getattr__(self, name: str) -> Any:
|
def __getattr__(self, name: str) -> Any:
|
||||||
|
try:
|
||||||
|
remapped, replacement = self._get_remapped_and_replacement(name)
|
||||||
|
out = getattr(self, remapped)
|
||||||
|
except AttributeError:
|
||||||
|
raise AttributeError(
|
||||||
|
f"'{self.__class__.__name__}' object has no attribute '{name}'"
|
||||||
|
) from None
|
||||||
|
|
||||||
|
_print_warning(f"'{name}'", f"please use '{replacement}'")
|
||||||
|
return out
|
||||||
|
|
||||||
|
@no_type_check
|
||||||
|
def _get_remapped_and_replacement(self, name: str) -> tuple[str, str]:
|
||||||
if some_tuple := self._deprecated_attributes.get(name):
|
if some_tuple := self._deprecated_attributes.get(name):
|
||||||
remapped, replacement = some_tuple
|
return some_tuple
|
||||||
else:
|
|
||||||
replacement = remapped = self._deprecated_aliases.get(
|
remapped = self._deprecated_aliases.get(name) or stringcase.snakecase(name)
|
||||||
name
|
|
||||||
) or stringcase.snakecase(name)
|
|
||||||
if remapped == name:
|
if remapped == name:
|
||||||
raise AttributeError
|
raise AttributeError
|
||||||
|
return (remapped, remapped)
|
||||||
out = getattr(self, remapped)
|
|
||||||
_print_warning(f"'{name}'", f"please use '{replacement}'")
|
|
||||||
|
|
||||||
return out
|
|
||||||
|
|
||||||
@no_type_check
|
@no_type_check
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
@ -98,6 +107,37 @@ class DeprecatedNamesMixin:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class DeprecatedNamesMixinForModule(DeprecatedNamesMixin):
|
||||||
|
"""Provides the functionality of DeprecatedNamesMixin for modules.
|
||||||
|
|
||||||
|
It can be invoked like this:
|
||||||
|
```
|
||||||
|
_deprecated_names = DeprecatedNamesMixinForModule(globals())
|
||||||
|
_deprecated_names.register_deprecated_aliases(...
|
||||||
|
_deprecated_names.register_deprecated_attributes(...
|
||||||
|
|
||||||
|
@no_type_check
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
return _deprecated_names.__getattr__(name)
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, module_globals: dict[str, Any]) -> None:
|
||||||
|
self.module_globals = module_globals
|
||||||
|
|
||||||
|
def __getattr__(self, name: str) -> Any:
|
||||||
|
try:
|
||||||
|
remapped, replacement = self._get_remapped_and_replacement(name)
|
||||||
|
out = self.module_globals[remapped]
|
||||||
|
except (AttributeError, KeyError):
|
||||||
|
raise AttributeError(
|
||||||
|
f"Module '{self.module_globals['__name__']}' has no attribute '{name}'"
|
||||||
|
) from None
|
||||||
|
|
||||||
|
_print_warning(f"'{name}'", f"please use '{replacement}'", frame=0)
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
def deprecated(replaced_by: Callable | None = None, info: str = "") -> Callable:
|
def deprecated(replaced_by: Callable | None = None, info: str = "") -> Callable:
|
||||||
"""Print a deprecation warning, telling users to use `replaced_by`, or show `doc`."""
|
"""Print a deprecation warning, telling users to use `replaced_by`, or show `doc`."""
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
|
|
||||||
class BrowserConfig:
|
class BrowserConfig:
|
||||||
ACTIVE_CARD_COLUMNS_KEY = "activeCols"
|
ACTIVE_CARD_COLUMNS_KEY = "activeCols"
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from importlib.resources import open_text
|
from importlib.resources import open_text
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -10,8 +12,8 @@ def _get_build_info() -> dict[str, str]:
|
||||||
for line in file.readlines():
|
for line in file.readlines():
|
||||||
elems = line.split()
|
elems = line.split()
|
||||||
if len(elems) == 2:
|
if len(elems) == 2:
|
||||||
k, v = elems
|
key, val = elems
|
||||||
info[k] = v
|
info[key] = val
|
||||||
|
|
||||||
return info
|
return info
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Config handling
|
Config handling
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,14 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, NewType
|
from typing import Any, NewType, no_type_check
|
||||||
|
|
||||||
|
from anki._legacy import DeprecatedNamesMixinForModule
|
||||||
|
|
||||||
# whether new cards should be mixed with reviews, or shown first or last
|
# whether new cards should be mixed with reviews, or shown first or last
|
||||||
NEW_CARDS_DISTRIBUTE = 0
|
NEW_CARDS_DISTRIBUTE = 0
|
||||||
|
|
@ -107,7 +111,7 @@ def _tr(col: anki.collection.Collection | None) -> Any:
|
||||||
return tr_legacyglobal
|
return tr_legacyglobal
|
||||||
|
|
||||||
|
|
||||||
def newCardOrderLabels(col: anki.collection.Collection | None) -> dict[int, Any]:
|
def new_card_order_labels(col: anki.collection.Collection | None) -> dict[int, Any]:
|
||||||
tr = _tr(col)
|
tr = _tr(col)
|
||||||
return {
|
return {
|
||||||
0: tr.scheduling_show_new_cards_in_random_order(),
|
0: tr.scheduling_show_new_cards_in_random_order(),
|
||||||
|
|
@ -115,7 +119,7 @@ def newCardOrderLabels(col: anki.collection.Collection | None) -> dict[int, Any]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def newCardSchedulingLabels(
|
def new_card_scheduling_labels(
|
||||||
col: anki.collection.Collection | None,
|
col: anki.collection.Collection | None,
|
||||||
) -> dict[int, Any]:
|
) -> dict[int, Any]:
|
||||||
tr = _tr(col)
|
tr = _tr(col)
|
||||||
|
|
@ -124,3 +128,11 @@ def newCardSchedulingLabels(
|
||||||
1: tr.scheduling_show_new_cards_after_reviews(),
|
1: tr.scheduling_show_new_cards_after_reviews(),
|
||||||
2: tr.scheduling_show_new_cards_before_reviews(),
|
2: tr.scheduling_show_new_cards_before_reviews(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
_deprecated_names = DeprecatedNamesMixinForModule(globals())
|
||||||
|
|
||||||
|
|
||||||
|
@no_type_check
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
return _deprecated_names.__getattr__(name)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
"""
|
"""
|
||||||
A convenience wrapper over pysqlite.
|
A convenience wrapper over pysqlite.
|
||||||
|
|
||||||
|
|
@ -18,29 +20,31 @@ from sqlite3 import Cursor
|
||||||
from sqlite3 import dbapi2 as sqlite
|
from sqlite3 import dbapi2 as sqlite
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
from anki._legacy import DeprecatedNamesMixin
|
||||||
|
|
||||||
DBError = sqlite.Error
|
DBError = sqlite.Error
|
||||||
|
|
||||||
|
|
||||||
class DB:
|
class DB(DeprecatedNamesMixin):
|
||||||
def __init__(self, path: str, timeout: int = 0) -> None:
|
def __init__(self, path: str, timeout: int = 0) -> None:
|
||||||
self._db = sqlite.connect(path, timeout=timeout)
|
self._db = sqlite.connect(path, timeout=timeout)
|
||||||
self._db.text_factory = self._textFactory
|
self._db.text_factory = self._text_factory
|
||||||
self._path = path
|
self._path = path
|
||||||
self.echo = os.environ.get("DBECHO")
|
self.echo = os.environ.get("DBECHO")
|
||||||
self.mod = False
|
self.mod = False
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
d = dict(self.__dict__)
|
dict_ = dict(self.__dict__)
|
||||||
del d["_db"]
|
del dict_["_db"]
|
||||||
return f"{super().__repr__()} {pprint.pformat(d, width=300)}"
|
return f"{super().__repr__()} {pprint.pformat(dict_, width=300)}"
|
||||||
|
|
||||||
def execute(self, sql: str, *a: Any, **ka: Any) -> Cursor:
|
def execute(self, sql: str, *a: Any, **ka: Any) -> Cursor:
|
||||||
s = sql.strip().lower()
|
canonized = sql.strip().lower()
|
||||||
# mark modified?
|
# mark modified?
|
||||||
for stmt in "insert", "update", "delete":
|
for stmt in "insert", "update", "delete":
|
||||||
if s.startswith(stmt):
|
if canonized.startswith(stmt):
|
||||||
self.mod = True
|
self.mod = True
|
||||||
t = time.time()
|
start_time = time.time()
|
||||||
if ka:
|
if ka:
|
||||||
# execute("...where id = :id", id=5)
|
# execute("...where id = :id", id=5)
|
||||||
res = self._db.execute(sql, ka)
|
res = self._db.execute(sql, ka)
|
||||||
|
|
@ -49,25 +53,25 @@ class DB:
|
||||||
res = self._db.execute(sql, a)
|
res = self._db.execute(sql, a)
|
||||||
if self.echo:
|
if self.echo:
|
||||||
# print a, ka
|
# print a, ka
|
||||||
print(sql, f"{(time.time() - t) * 1000:0.3f}ms")
|
print(sql, f"{(time.time() - start_time) * 1000:0.3f}ms")
|
||||||
if self.echo == "2":
|
if self.echo == "2":
|
||||||
print(a, ka)
|
print(a, ka)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def executemany(self, sql: str, l: Any) -> None:
|
def executemany(self, sql: str, iterable: Any) -> None:
|
||||||
self.mod = True
|
self.mod = True
|
||||||
t = time.time()
|
start_time = time.time()
|
||||||
self._db.executemany(sql, l)
|
self._db.executemany(sql, iterable)
|
||||||
if self.echo:
|
if self.echo:
|
||||||
print(sql, f"{(time.time() - t) * 1000:0.3f}ms")
|
print(sql, f"{(time.time() - start_time) * 1000:0.3f}ms")
|
||||||
if self.echo == "2":
|
if self.echo == "2":
|
||||||
print(l)
|
print(iterable)
|
||||||
|
|
||||||
def commit(self) -> None:
|
def commit(self) -> None:
|
||||||
t = time.time()
|
start_time = time.time()
|
||||||
self._db.commit()
|
self._db.commit()
|
||||||
if self.echo:
|
if self.echo:
|
||||||
print(f"commit {(time.time() - t) * 1000:0.3f}ms")
|
print(f"commit {(time.time() - start_time) * 1000:0.3f}ms")
|
||||||
|
|
||||||
def executescript(self, sql: str) -> None:
|
def executescript(self, sql: str) -> None:
|
||||||
self.mod = True
|
self.mod = True
|
||||||
|
|
@ -88,9 +92,9 @@ class DB:
|
||||||
return self.execute(*a, **kw).fetchall()
|
return self.execute(*a, **kw).fetchall()
|
||||||
|
|
||||||
def first(self, *a: Any, **kw: Any) -> Any:
|
def first(self, *a: Any, **kw: Any) -> Any:
|
||||||
c = self.execute(*a, **kw)
|
cursor = self.execute(*a, **kw)
|
||||||
res = c.fetchone()
|
res = cursor.fetchone()
|
||||||
c.close()
|
cursor.close()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def list(self, *a: Any, **kw: Any) -> list:
|
def list(self, *a: Any, **kw: Any) -> list:
|
||||||
|
|
@ -110,20 +114,20 @@ class DB:
|
||||||
def __exit__(self, *args: Any) -> None:
|
def __exit__(self, *args: Any) -> None:
|
||||||
self._db.close()
|
self._db.close()
|
||||||
|
|
||||||
def totalChanges(self) -> Any:
|
def total_changes(self) -> Any:
|
||||||
return self._db.total_changes
|
return self._db.total_changes
|
||||||
|
|
||||||
def interrupt(self) -> None:
|
def interrupt(self) -> None:
|
||||||
self._db.interrupt()
|
self._db.interrupt()
|
||||||
|
|
||||||
def setAutocommit(self, autocommit: bool) -> None:
|
def set_autocommit(self, autocommit: bool) -> None:
|
||||||
if autocommit:
|
if autocommit:
|
||||||
self._db.isolation_level = None
|
self._db.isolation_level = None
|
||||||
else:
|
else:
|
||||||
self._db.isolation_level = ""
|
self._db.isolation_level = ""
|
||||||
|
|
||||||
# strip out invalid utf-8 when reading from db
|
# strip out invalid utf-8 when reading from db
|
||||||
def _textFactory(self, data: bytes) -> str:
|
def _text_factory(self, data: bytes) -> str:
|
||||||
return str(data, errors="ignore")
|
return str(data, errors="ignore")
|
||||||
|
|
||||||
def cursor(self, factory: type[Cursor] = Cursor) -> Cursor:
|
def cursor(self, factory: type[Cursor] = Cursor) -> Cursor:
|
||||||
|
|
|
||||||
|
|
@ -366,7 +366,7 @@ class DeckManager(DeprecatedNamesMixin):
|
||||||
self.update_config(new)
|
self.update_config(new)
|
||||||
# if it was previously randomized, re-sort
|
# if it was previously randomized, re-sort
|
||||||
if not old_order:
|
if not old_order:
|
||||||
self.col.sched.resortConf(new)
|
self.col.sched.resort_conf(new)
|
||||||
|
|
||||||
# Deck utils
|
# Deck utils
|
||||||
#############################################################
|
#############################################################
|
||||||
|
|
|
||||||
|
|
@ -246,7 +246,7 @@ class AnkiExporter(Exporter):
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# need to reset card state
|
# need to reset card state
|
||||||
self.dst.sched.resetCards(cids)
|
self.dst.sched.reset_cards(cids)
|
||||||
# models - start with zero
|
# models - start with zero
|
||||||
self.dst.mod_schema(check=False)
|
self.dst.mod_schema(check=False)
|
||||||
self.dst.models.remove_all_notetypes()
|
self.dst.models.remove_all_notetypes()
|
||||||
|
|
|
||||||
|
|
@ -467,7 +467,7 @@ insert or ignore into revlog values (?,?,?,?,?,?,?,?,?)""",
|
||||||
|
|
||||||
def _postImport(self) -> None:
|
def _postImport(self) -> None:
|
||||||
for did in list(self._decks.values()):
|
for did in list(self._decks.values()):
|
||||||
self.col.sched.maybeRandomizeDeck(did)
|
self.col.sched.maybe_randomize_deck(did)
|
||||||
# make sure new position is correct
|
# make sure new position is correct
|
||||||
self.dst.conf["nextPos"] = (
|
self.dst.conf["nextPos"] = (
|
||||||
self.dst.db.scalar("select max(due)+1 from cards where type = 0") or 0
|
self.dst.db.scalar("select max(due)+1 from cards where type = 0") or 0
|
||||||
|
|
|
||||||
|
|
@ -217,7 +217,7 @@ class NoteImporter(Importer):
|
||||||
conf = self.col.decks.config_dict_for_deck_id(did)
|
conf = self.col.decks.config_dict_for_deck_id(did)
|
||||||
# in order due?
|
# in order due?
|
||||||
if not conf["dyn"] and conf["new"]["order"] == NEW_CARDS_RANDOM:
|
if not conf["dyn"] and conf["new"]["order"] == NEW_CARDS_RANDOM:
|
||||||
self.col.sched.randomizeCards(did)
|
self.col.sched.randomize_cards(did)
|
||||||
|
|
||||||
part1 = self.col.tr.importing_note_added(count=len(new))
|
part1 = self.col.tr.importing_note_added(count=len(new))
|
||||||
part2 = self.col.tr.importing_note_updated(count=self.updateCount)
|
part2 = self.col.tr.importing_note_updated(count=self.updateCount)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,13 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import anki
|
import anki
|
||||||
from anki import decks_pb2, scheduler_pb2
|
from anki import decks_pb2, scheduler_pb2
|
||||||
|
from anki._legacy import DeprecatedNamesMixin
|
||||||
from anki.collection import OpChanges, OpChangesWithCount, OpChangesWithId
|
from anki.collection import OpChanges, OpChangesWithCount, OpChangesWithId
|
||||||
from anki.config import Config
|
from anki.config import Config
|
||||||
|
|
||||||
|
|
@ -25,7 +28,7 @@ from anki.notes import NoteId
|
||||||
from anki.utils import ids2str, intTime
|
from anki.utils import ids2str, intTime
|
||||||
|
|
||||||
|
|
||||||
class SchedulerBase:
|
class SchedulerBase(DeprecatedNamesMixin):
|
||||||
"Actions shared between schedulers."
|
"Actions shared between schedulers."
|
||||||
version = 0
|
version = 0
|
||||||
|
|
||||||
|
|
@ -40,7 +43,7 @@ class SchedulerBase:
|
||||||
return self._timing_today().days_elapsed
|
return self._timing_today().days_elapsed
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def dayCutoff(self) -> int:
|
def day_cutoff(self) -> int:
|
||||||
return self._timing_today().next_day_at
|
return self._timing_today().next_day_at
|
||||||
|
|
||||||
# Deck list
|
# Deck list
|
||||||
|
|
@ -57,34 +60,34 @@ class SchedulerBase:
|
||||||
def congratulations_info(self) -> CongratsInfo:
|
def congratulations_info(self) -> CongratsInfo:
|
||||||
return self.col._backend.congrats_info()
|
return self.col._backend.congrats_info()
|
||||||
|
|
||||||
def haveBuriedSiblings(self) -> bool:
|
def have_buried_siblings(self) -> bool:
|
||||||
return self.congratulations_info().have_sched_buried
|
return self.congratulations_info().have_sched_buried
|
||||||
|
|
||||||
def haveManuallyBuried(self) -> bool:
|
def have_manually_buried(self) -> bool:
|
||||||
return self.congratulations_info().have_user_buried
|
return self.congratulations_info().have_user_buried
|
||||||
|
|
||||||
def haveBuried(self) -> bool:
|
def have_buried(self) -> bool:
|
||||||
info = self.congratulations_info()
|
info = self.congratulations_info()
|
||||||
return info.have_sched_buried or info.have_user_buried
|
return info.have_sched_buried or info.have_user_buried
|
||||||
|
|
||||||
def extendLimits(self, new: int, rev: int) -> None:
|
def extend_limits(self, new: int, rev: int) -> None:
|
||||||
did = self.col.decks.current()["id"]
|
did = self.col.decks.current()["id"]
|
||||||
self.col._backend.extend_limits(deck_id=did, new_delta=new, review_delta=rev)
|
self.col._backend.extend_limits(deck_id=did, new_delta=new, review_delta=rev)
|
||||||
|
|
||||||
# fixme: used by custom study
|
# fixme: used by custom study
|
||||||
def totalRevForCurrentDeck(self) -> int:
|
def total_rev_for_current_deck(self) -> int:
|
||||||
assert self.col.db
|
assert self.col.db
|
||||||
return self.col.db.scalar(
|
return self.col.db.scalar(
|
||||||
f"""
|
f"""
|
||||||
select count() from cards where id in (
|
select count() from cards where id in (
|
||||||
select id from cards where did in %s and queue = {QUEUE_TYPE_REV} and due <= ? limit 9999)"""
|
select id from cards where did in %s and queue = {QUEUE_TYPE_REV} and due <= ? limit 9999)"""
|
||||||
% self._deckLimit(),
|
% self._deck_limit(),
|
||||||
self.today,
|
self.today,
|
||||||
)
|
)
|
||||||
|
|
||||||
# fixme: only used by totalRevForCurrentDeck and old deck stats;
|
# fixme: only used by total_rev_for_current_deck and old deck stats;
|
||||||
# schedv2 defines separate version
|
# schedv2 defines separate version
|
||||||
def _deckLimit(self) -> str:
|
def _deck_limit(self) -> str:
|
||||||
return ids2str(
|
return ids2str(
|
||||||
self.col.decks.deck_and_child_ids(self.col.decks.get_current_id())
|
self.col.decks.deck_and_child_ids(self.col.decks.get_current_id())
|
||||||
)
|
)
|
||||||
|
|
@ -179,12 +182,12 @@ select id from cards where did in %s and queue = {QUEUE_TYPE_REV} and due <= ? l
|
||||||
config_key=key, # type: ignore
|
config_key=key, # type: ignore
|
||||||
)
|
)
|
||||||
|
|
||||||
def resetCards(self, ids: list[CardId]) -> None:
|
def reset_cards(self, ids: list[CardId]) -> None:
|
||||||
"Completely reset cards for export."
|
"Completely reset cards for export."
|
||||||
sids = ids2str(ids)
|
sids = ids2str(ids)
|
||||||
assert self.col.db
|
assert self.col.db
|
||||||
# we want to avoid resetting due number of existing new cards on export
|
# we want to avoid resetting due number of existing new cards on export
|
||||||
nonNew = self.col.db.list(
|
non_new = self.col.db.list(
|
||||||
f"select id from cards where id in %s and (queue != {QUEUE_TYPE_NEW} or type != {CARD_TYPE_NEW})"
|
f"select id from cards where id in %s and (queue != {QUEUE_TYPE_NEW} or type != {CARD_TYPE_NEW})"
|
||||||
% sids
|
% sids
|
||||||
)
|
)
|
||||||
|
|
@ -194,7 +197,7 @@ select id from cards where did in %s and queue = {QUEUE_TYPE_REV} and due <= ? l
|
||||||
" where id in %s" % sids
|
" where id in %s" % sids
|
||||||
)
|
)
|
||||||
# and forget any non-new cards, changing their due numbers
|
# and forget any non-new cards, changing their due numbers
|
||||||
self.col._backend.schedule_cards_as_new(card_ids=nonNew, log=False)
|
self.col._backend.schedule_cards_as_new(card_ids=non_new, log=False)
|
||||||
|
|
||||||
# Repositioning new cards
|
# Repositioning new cards
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
@ -215,30 +218,29 @@ select id from cards where did in %s and queue = {QUEUE_TYPE_REV} and due <= ? l
|
||||||
shift_existing=shift_existing,
|
shift_existing=shift_existing,
|
||||||
)
|
)
|
||||||
|
|
||||||
def randomizeCards(self, did: DeckId) -> None:
|
def randomize_cards(self, did: DeckId) -> None:
|
||||||
self.col._backend.sort_deck(deck_id=did, randomize=True)
|
self.col._backend.sort_deck(deck_id=did, randomize=True)
|
||||||
|
|
||||||
def orderCards(self, did: DeckId) -> None:
|
def order_cards(self, did: DeckId) -> None:
|
||||||
self.col._backend.sort_deck(deck_id=did, randomize=False)
|
self.col._backend.sort_deck(deck_id=did, randomize=False)
|
||||||
|
|
||||||
def resortConf(self, conf: DeckConfigDict) -> None:
|
def resort_conf(self, conf: DeckConfigDict) -> None:
|
||||||
for did in self.col.decks.decks_using_config(conf):
|
for did in self.col.decks.decks_using_config(conf):
|
||||||
if conf["new"]["order"] == 0:
|
if conf["new"]["order"] == 0:
|
||||||
self.randomizeCards(did)
|
self.randomize_cards(did)
|
||||||
else:
|
else:
|
||||||
self.orderCards(did)
|
self.order_cards(did)
|
||||||
|
|
||||||
# for post-import
|
# for post-import
|
||||||
def maybeRandomizeDeck(self, did: DeckId | None = None) -> None:
|
def maybe_randomize_deck(self, did: DeckId | None = None) -> None:
|
||||||
if not did:
|
if not did:
|
||||||
did = self.col.decks.selected()
|
did = self.col.decks.selected()
|
||||||
conf = self.col.decks.config_dict_for_deck_id(did)
|
conf = self.col.decks.config_dict_for_deck_id(did)
|
||||||
# in order due?
|
# in order due?
|
||||||
if conf["new"]["order"] == NEW_CARDS_RANDOM:
|
if conf["new"]["order"] == NEW_CARDS_RANDOM:
|
||||||
self.randomizeCards(did)
|
self.randomize_cards(did)
|
||||||
|
|
||||||
# legacy
|
def _legacy_sort_cards(
|
||||||
def sortCards(
|
|
||||||
self,
|
self,
|
||||||
cids: list[CardId],
|
cids: list[CardId],
|
||||||
start: int = 1,
|
start: int = 1,
|
||||||
|
|
@ -247,3 +249,8 @@ select id from cards where did in %s and queue = {QUEUE_TYPE_REV} and due <= ? l
|
||||||
shift: bool = False,
|
shift: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.reposition_new_cards(cids, start, step, shuffle, shift)
|
self.reposition_new_cards(cids, start, step, shuffle, shift)
|
||||||
|
|
||||||
|
|
||||||
|
SchedulerBase.register_deprecated_attributes(
|
||||||
|
sortCards=(SchedulerBase._legacy_sort_cards, SchedulerBase.reposition_new_cards)
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -162,8 +162,8 @@ class Scheduler(V2):
|
||||||
f"""
|
f"""
|
||||||
select sum(left/1000) from (select left from cards where
|
select sum(left/1000) from (select left from cards where
|
||||||
did in %s and queue = {QUEUE_TYPE_LRN} and due < ? limit %d)"""
|
did in %s and queue = {QUEUE_TYPE_LRN} and due < ? limit %d)"""
|
||||||
% (self._deckLimit(), self.reportLimit),
|
% (self._deck_limit(), self.reportLimit),
|
||||||
self.dayCutoff,
|
self.day_cutoff,
|
||||||
)
|
)
|
||||||
or 0
|
or 0
|
||||||
)
|
)
|
||||||
|
|
@ -172,7 +172,7 @@ did in %s and queue = {QUEUE_TYPE_LRN} and due < ? limit %d)"""
|
||||||
f"""
|
f"""
|
||||||
select count() from cards where did in %s and queue = {QUEUE_TYPE_DAY_LEARN_RELEARN}
|
select count() from cards where did in %s and queue = {QUEUE_TYPE_DAY_LEARN_RELEARN}
|
||||||
and due <= ? limit %d"""
|
and due <= ? limit %d"""
|
||||||
% (self._deckLimit(), self.reportLimit),
|
% (self._deck_limit(), self.reportLimit),
|
||||||
self.today,
|
self.today,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -193,8 +193,8 @@ and due <= ? limit %d"""
|
||||||
select due, id from cards where
|
select due, id from cards where
|
||||||
did in %s and queue = {QUEUE_TYPE_LRN} and due < ?
|
did in %s and queue = {QUEUE_TYPE_LRN} and due < ?
|
||||||
limit %d"""
|
limit %d"""
|
||||||
% (self._deckLimit(), self.reportLimit),
|
% (self._deck_limit(), self.reportLimit),
|
||||||
self.dayCutoff,
|
self.day_cutoff,
|
||||||
)
|
)
|
||||||
self._lrnQueue = [tuple(e) for e in self._lrnQueue]
|
self._lrnQueue = [tuple(e) for e in self._lrnQueue]
|
||||||
# as it arrives sorted by did first, we need to sort it
|
# as it arrives sorted by did first, we need to sort it
|
||||||
|
|
@ -257,7 +257,7 @@ limit %d"""
|
||||||
delay *= int(random.uniform(1, 1.25))
|
delay *= int(random.uniform(1, 1.25))
|
||||||
card.due = int(time.time() + delay)
|
card.due = int(time.time() + delay)
|
||||||
# due today?
|
# due today?
|
||||||
if card.due < self.dayCutoff:
|
if card.due < self.day_cutoff:
|
||||||
self.lrnCount += card.left // 1000
|
self.lrnCount += card.left // 1000
|
||||||
# if the queue is not empty and there's nothing else to do, make
|
# if the queue is not empty and there's nothing else to do, make
|
||||||
# sure we don't put it at the head of the queue and end up showing
|
# sure we don't put it at the head of the queue and end up showing
|
||||||
|
|
@ -270,7 +270,7 @@ limit %d"""
|
||||||
else:
|
else:
|
||||||
# the card is due in one or more days, so we need to use the
|
# the card is due in one or more days, so we need to use the
|
||||||
# day learn queue
|
# day learn queue
|
||||||
ahead = ((card.due - self.dayCutoff) // 86400) + 1
|
ahead = ((card.due - self.day_cutoff) // 86400) + 1
|
||||||
card.due = self.today + ahead
|
card.due = self.today + ahead
|
||||||
card.queue = QUEUE_TYPE_DAY_LEARN_RELEARN
|
card.queue = QUEUE_TYPE_DAY_LEARN_RELEARN
|
||||||
self._logLrn(card, ease, conf, leaving, type, lastLeft)
|
self._logLrn(card, ease, conf, leaving, type, lastLeft)
|
||||||
|
|
@ -513,13 +513,13 @@ did = ? and queue = {QUEUE_TYPE_REV} and due <= ? limit ?""",
|
||||||
card.due = int(delay + time.time())
|
card.due = int(delay + time.time())
|
||||||
card.left = self._startingLeft(card)
|
card.left = self._startingLeft(card)
|
||||||
# queue 1
|
# queue 1
|
||||||
if card.due < self.dayCutoff:
|
if card.due < self.day_cutoff:
|
||||||
self.lrnCount += card.left // 1000
|
self.lrnCount += card.left // 1000
|
||||||
card.queue = QUEUE_TYPE_LRN
|
card.queue = QUEUE_TYPE_LRN
|
||||||
heappush(self._lrnQueue, (card.due, card.id))
|
heappush(self._lrnQueue, (card.due, card.id))
|
||||||
else:
|
else:
|
||||||
# day learn queue
|
# day learn queue
|
||||||
ahead = ((card.due - self.dayCutoff) // 86400) + 1
|
ahead = ((card.due - self.day_cutoff) // 86400) + 1
|
||||||
card.due = self.today + ahead
|
card.due = self.today + ahead
|
||||||
card.queue = QUEUE_TYPE_DAY_LEARN_RELEARN
|
card.queue = QUEUE_TYPE_DAY_LEARN_RELEARN
|
||||||
return delay
|
return delay
|
||||||
|
|
@ -674,8 +674,8 @@ did = ? and queue = {QUEUE_TYPE_REV} and due <= ? limit ?""",
|
||||||
# Deck finished state
|
# Deck finished state
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
def haveBuried(self) -> bool:
|
def have_buried(self) -> bool:
|
||||||
sdids = self._deckLimit()
|
sdids = self._deck_limit()
|
||||||
cnt = self.col.db.scalar(
|
cnt = self.col.db.scalar(
|
||||||
f"select 1 from cards where queue = {QUEUE_TYPE_SIBLING_BURIED} and did in %s limit 1"
|
f"select 1 from cards where queue = {QUEUE_TYPE_SIBLING_BURIED} and did in %s limit 1"
|
||||||
% sdids
|
% sdids
|
||||||
|
|
|
||||||
|
|
@ -68,7 +68,7 @@ class Scheduler(SchedulerBaseWithLegacy):
|
||||||
|
|
||||||
def _checkDay(self) -> None:
|
def _checkDay(self) -> None:
|
||||||
# check if the day has rolled over
|
# check if the day has rolled over
|
||||||
if time.time() > self.dayCutoff:
|
if time.time() > self.day_cutoff:
|
||||||
self.reset()
|
self.reset()
|
||||||
|
|
||||||
# Fetching the next card
|
# Fetching the next card
|
||||||
|
|
@ -260,7 +260,7 @@ select count() from
|
||||||
f"""
|
f"""
|
||||||
select count() from cards where id in (
|
select count() from cards where id in (
|
||||||
select id from cards where did in %s and queue = {QUEUE_TYPE_NEW} limit ?)"""
|
select id from cards where did in %s and queue = {QUEUE_TYPE_NEW} limit ?)"""
|
||||||
% self._deckLimit(),
|
% self._deck_limit(),
|
||||||
self.reportLimit,
|
self.reportLimit,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -286,7 +286,7 @@ select id from cards where did in %s and queue = {QUEUE_TYPE_NEW} limit ?)"""
|
||||||
f"""
|
f"""
|
||||||
select count() from cards where did in %s and queue = {QUEUE_TYPE_LRN}
|
select count() from cards where did in %s and queue = {QUEUE_TYPE_LRN}
|
||||||
and due < ?"""
|
and due < ?"""
|
||||||
% (self._deckLimit()),
|
% (self._deck_limit()),
|
||||||
self._lrnCutoff,
|
self._lrnCutoff,
|
||||||
)
|
)
|
||||||
or 0
|
or 0
|
||||||
|
|
@ -296,7 +296,7 @@ and due < ?"""
|
||||||
f"""
|
f"""
|
||||||
select count() from cards where did in %s and queue = {QUEUE_TYPE_DAY_LEARN_RELEARN}
|
select count() from cards where did in %s and queue = {QUEUE_TYPE_DAY_LEARN_RELEARN}
|
||||||
and due <= ?"""
|
and due <= ?"""
|
||||||
% (self._deckLimit()),
|
% (self._deck_limit()),
|
||||||
self.today,
|
self.today,
|
||||||
)
|
)
|
||||||
# previews
|
# previews
|
||||||
|
|
@ -304,7 +304,7 @@ and due <= ?"""
|
||||||
f"""
|
f"""
|
||||||
select count() from cards where did in %s and queue = {QUEUE_TYPE_PREVIEW}
|
select count() from cards where did in %s and queue = {QUEUE_TYPE_PREVIEW}
|
||||||
"""
|
"""
|
||||||
% (self._deckLimit())
|
% (self._deck_limit())
|
||||||
)
|
)
|
||||||
|
|
||||||
def _resetLrn(self) -> None:
|
def _resetLrn(self) -> None:
|
||||||
|
|
@ -326,7 +326,7 @@ select count() from cards where did in %s and queue = {QUEUE_TYPE_PREVIEW}
|
||||||
select due, id from cards where
|
select due, id from cards where
|
||||||
did in %s and queue in ({QUEUE_TYPE_LRN},{QUEUE_TYPE_PREVIEW}) and due < ?
|
did in %s and queue in ({QUEUE_TYPE_LRN},{QUEUE_TYPE_PREVIEW}) and due < ?
|
||||||
limit %d"""
|
limit %d"""
|
||||||
% (self._deckLimit(), self.reportLimit),
|
% (self._deck_limit(), self.reportLimit),
|
||||||
cutoff,
|
cutoff,
|
||||||
)
|
)
|
||||||
self._lrnQueue = [cast(tuple[int, CardId], tuple(e)) for e in self._lrnQueue]
|
self._lrnQueue = [cast(tuple[int, CardId], tuple(e)) for e in self._lrnQueue]
|
||||||
|
|
@ -422,7 +422,7 @@ select id from cards where
|
||||||
did in %s and queue = {QUEUE_TYPE_REV} and due <= ?
|
did in %s and queue = {QUEUE_TYPE_REV} and due <= ?
|
||||||
order by due, random()
|
order by due, random()
|
||||||
limit ?"""
|
limit ?"""
|
||||||
% self._deckLimit(),
|
% self._deck_limit(),
|
||||||
self.today,
|
self.today,
|
||||||
lim,
|
lim,
|
||||||
)
|
)
|
||||||
|
|
@ -503,7 +503,7 @@ limit ?"""
|
||||||
def _cardConf(self, card: Card) -> DeckConfigDict:
|
def _cardConf(self, card: Card) -> DeckConfigDict:
|
||||||
return self.col.decks.config_dict_for_deck_id(card.did)
|
return self.col.decks.config_dict_for_deck_id(card.did)
|
||||||
|
|
||||||
def _deckLimit(self) -> str:
|
def _deck_limit(self) -> str:
|
||||||
return ids2str(self.col.decks.active())
|
return ids2str(self.col.decks.active())
|
||||||
|
|
||||||
# Answering (re)learning cards
|
# Answering (re)learning cards
|
||||||
|
|
@ -609,11 +609,11 @@ limit ?"""
|
||||||
|
|
||||||
card.due = int(time.time() + delay)
|
card.due = int(time.time() + delay)
|
||||||
# due today?
|
# due today?
|
||||||
if card.due < self.dayCutoff:
|
if card.due < self.day_cutoff:
|
||||||
# add some randomness, up to 5 minutes or 25%
|
# add some randomness, up to 5 minutes or 25%
|
||||||
maxExtra = min(300, int(delay * 0.25))
|
maxExtra = min(300, int(delay * 0.25))
|
||||||
fuzz = random.randrange(0, max(1, maxExtra))
|
fuzz = random.randrange(0, max(1, maxExtra))
|
||||||
card.due = min(self.dayCutoff - 1, card.due + fuzz)
|
card.due = min(self.day_cutoff - 1, card.due + fuzz)
|
||||||
card.queue = QUEUE_TYPE_LRN
|
card.queue = QUEUE_TYPE_LRN
|
||||||
if card.due < (intTime() + self.col.conf["collapseTime"]):
|
if card.due < (intTime() + self.col.conf["collapseTime"]):
|
||||||
self.lrnCount += 1
|
self.lrnCount += 1
|
||||||
|
|
@ -627,7 +627,7 @@ limit ?"""
|
||||||
else:
|
else:
|
||||||
# the card is due in one or more days, so we need to use the
|
# the card is due in one or more days, so we need to use the
|
||||||
# day learn queue
|
# day learn queue
|
||||||
ahead = ((card.due - self.dayCutoff) // 86400) + 1
|
ahead = ((card.due - self.day_cutoff) // 86400) + 1
|
||||||
card.due = self.today + ahead
|
card.due = self.today + ahead
|
||||||
card.queue = QUEUE_TYPE_DAY_LEARN_RELEARN
|
card.queue = QUEUE_TYPE_DAY_LEARN_RELEARN
|
||||||
return delay
|
return delay
|
||||||
|
|
@ -701,7 +701,7 @@ limit ?"""
|
||||||
ok = 0
|
ok = 0
|
||||||
for idx, delay in enumerate(delays):
|
for idx, delay in enumerate(delays):
|
||||||
now += int(delay * 60)
|
now += int(delay * 60)
|
||||||
if now > self.dayCutoff:
|
if now > self.day_cutoff:
|
||||||
break
|
break
|
||||||
ok = idx
|
ok = idx
|
||||||
return ok + 1
|
return ok + 1
|
||||||
|
|
|
||||||
|
|
@ -155,7 +155,7 @@ sum(case when type = {REVLOG_RELRN} then 1 else 0 end), /* relearn */
|
||||||
sum(case when type = {REVLOG_CRAM} then 1 else 0 end) /* filter */
|
sum(case when type = {REVLOG_CRAM} then 1 else 0 end) /* filter */
|
||||||
from revlog where id > ? """
|
from revlog where id > ? """
|
||||||
+ lim,
|
+ lim,
|
||||||
(self.col.sched.dayCutoff - 86400) * 1000,
|
(self.col.sched.day_cutoff - 86400) * 1000,
|
||||||
)
|
)
|
||||||
cards = cards or 0
|
cards = cards or 0
|
||||||
thetime = thetime or 0
|
thetime = thetime or 0
|
||||||
|
|
@ -189,7 +189,7 @@ from revlog where id > ? """
|
||||||
select count(), sum(case when ease = 1 then 0 else 1 end) from revlog
|
select count(), sum(case when ease = 1 then 0 else 1 end) from revlog
|
||||||
where lastIvl >= 21 and id > ?"""
|
where lastIvl >= 21 and id > ?"""
|
||||||
+ lim,
|
+ lim,
|
||||||
(self.col.sched.dayCutoff - 86400) * 1000,
|
(self.col.sched.day_cutoff - 86400) * 1000,
|
||||||
)
|
)
|
||||||
b += "<br>"
|
b += "<br>"
|
||||||
if mcnt:
|
if mcnt:
|
||||||
|
|
@ -497,7 +497,7 @@ group by day order by day"""
|
||||||
lims = []
|
lims = []
|
||||||
if num is not None:
|
if num is not None:
|
||||||
lims.append(
|
lims.append(
|
||||||
"id > %d" % ((self.col.sched.dayCutoff - (num * chunk * 86400)) * 1000)
|
"id > %d" % ((self.col.sched.day_cutoff - (num * chunk * 86400)) * 1000)
|
||||||
)
|
)
|
||||||
lims.append("did in %s" % self._limit())
|
lims.append("did in %s" % self._limit())
|
||||||
if lims:
|
if lims:
|
||||||
|
|
@ -516,7 +516,7 @@ count(id)
|
||||||
from cards %s
|
from cards %s
|
||||||
group by day order by day"""
|
group by day order by day"""
|
||||||
% lim,
|
% lim,
|
||||||
self.col.sched.dayCutoff,
|
self.col.sched.day_cutoff,
|
||||||
chunk,
|
chunk,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -524,7 +524,7 @@ group by day order by day"""
|
||||||
lims = []
|
lims = []
|
||||||
if num is not None:
|
if num is not None:
|
||||||
lims.append(
|
lims.append(
|
||||||
"id > %d" % ((self.col.sched.dayCutoff - (num * chunk * 86400)) * 1000)
|
"id > %d" % ((self.col.sched.day_cutoff - (num * chunk * 86400)) * 1000)
|
||||||
)
|
)
|
||||||
lim = self._revlogLimit()
|
lim = self._revlogLimit()
|
||||||
if lim:
|
if lim:
|
||||||
|
|
@ -555,7 +555,7 @@ sum(case when type = {REVLOG_CRAM} then time/1000.0 else 0 end)/? -- cram time
|
||||||
from revlog %s
|
from revlog %s
|
||||||
group by day order by day"""
|
group by day order by day"""
|
||||||
% lim,
|
% lim,
|
||||||
self.col.sched.dayCutoff,
|
self.col.sched.day_cutoff,
|
||||||
chunk,
|
chunk,
|
||||||
tf,
|
tf,
|
||||||
tf,
|
tf,
|
||||||
|
|
@ -568,7 +568,9 @@ group by day order by day"""
|
||||||
lims = []
|
lims = []
|
||||||
num = self._periodDays()
|
num = self._periodDays()
|
||||||
if num:
|
if num:
|
||||||
lims.append("id > %d" % ((self.col.sched.dayCutoff - (num * 86400)) * 1000))
|
lims.append(
|
||||||
|
"id > %d" % ((self.col.sched.day_cutoff - (num * 86400)) * 1000)
|
||||||
|
)
|
||||||
rlim = self._revlogLimit()
|
rlim = self._revlogLimit()
|
||||||
if rlim:
|
if rlim:
|
||||||
lims.append(rlim)
|
lims.append(rlim)
|
||||||
|
|
@ -583,7 +585,7 @@ select count(), abs(min(day)) from (select
|
||||||
from revlog %s
|
from revlog %s
|
||||||
group by day order by day)"""
|
group by day order by day)"""
|
||||||
% lim,
|
% lim,
|
||||||
self.col.sched.dayCutoff,
|
self.col.sched.day_cutoff,
|
||||||
)
|
)
|
||||||
assert ret
|
assert ret
|
||||||
return ret
|
return ret
|
||||||
|
|
@ -742,7 +744,7 @@ select count(), avg(ivl), max(ivl) from cards where did in %s and queue = {QUEUE
|
||||||
days = self._periodDays()
|
days = self._periodDays()
|
||||||
if days is not None:
|
if days is not None:
|
||||||
lims.append(
|
lims.append(
|
||||||
"id > %d" % ((self.col.sched.dayCutoff - (days * 86400)) * 1000)
|
"id > %d" % ((self.col.sched.day_cutoff - (days * 86400)) * 1000)
|
||||||
)
|
)
|
||||||
if lims:
|
if lims:
|
||||||
lim = "where " + " and ".join(lims)
|
lim = "where " + " and ".join(lims)
|
||||||
|
|
@ -845,7 +847,7 @@ order by thetype, ease"""
|
||||||
rolloverHour = self.col.conf.get("rollover", 4)
|
rolloverHour = self.col.conf.get("rollover", 4)
|
||||||
pd = self._periodDays()
|
pd = self._periodDays()
|
||||||
if pd:
|
if pd:
|
||||||
lim += " and id > %d" % ((self.col.sched.dayCutoff - (86400 * pd)) * 1000)
|
lim += " and id > %d" % ((self.col.sched.day_cutoff - (86400 * pd)) * 1000)
|
||||||
return self.col.db.all(
|
return self.col.db.all(
|
||||||
f"""
|
f"""
|
||||||
select
|
select
|
||||||
|
|
@ -856,7 +858,7 @@ count()
|
||||||
from revlog where type in ({REVLOG_LRN},{REVLOG_REV},{REVLOG_RELRN}) %s
|
from revlog where type in ({REVLOG_LRN},{REVLOG_REV},{REVLOG_RELRN}) %s
|
||||||
group by hour having count() > 30 order by hour"""
|
group by hour having count() > 30 order by hour"""
|
||||||
% lim,
|
% lim,
|
||||||
self.col.sched.dayCutoff - (rolloverHour * 3600),
|
self.col.sched.day_cutoff - (rolloverHour * 3600),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Cards
|
# Cards
|
||||||
|
|
@ -1079,7 +1081,7 @@ $(function () {
|
||||||
def _limit(self) -> Any:
|
def _limit(self) -> Any:
|
||||||
if self.wholeCollection:
|
if self.wholeCollection:
|
||||||
return ids2str([d["id"] for d in self.col.decks.all()])
|
return ids2str([d["id"] for d in self.col.decks.all()])
|
||||||
return self.col.sched._deckLimit()
|
return self.col.sched._deck_limit()
|
||||||
|
|
||||||
def _revlogLimit(self) -> str:
|
def _revlogLimit(self) -> str:
|
||||||
if self.wholeCollection:
|
if self.wholeCollection:
|
||||||
|
|
@ -1106,7 +1108,7 @@ $(function () {
|
||||||
if not t:
|
if not t:
|
||||||
period = 1
|
period = 1
|
||||||
else:
|
else:
|
||||||
period = max(1, int(1 + ((self.col.sched.dayCutoff - (t / 1000)) / 86400)))
|
period = max(1, int(1 + ((self.col.sched.day_cutoff - (t / 1000)) / 86400)))
|
||||||
return period
|
return period
|
||||||
|
|
||||||
def _periodDays(self) -> int | None:
|
def _periodDays(self) -> int | None:
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,8 @@
|
||||||
# Please see /docs/syncserver.md
|
# Please see /docs/syncserver.md
|
||||||
#
|
#
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import gzip
|
import gzip
|
||||||
|
|
@ -19,8 +21,8 @@ from typing import Iterable, Optional
|
||||||
try:
|
try:
|
||||||
import flask
|
import flask
|
||||||
from waitress.server import create_server
|
from waitress.server import create_server
|
||||||
except ImportError as e:
|
except ImportError as error:
|
||||||
print(e, "- to use the server, 'pip install anki[syncserver]'")
|
print(error, "- to use the server, 'pip install anki[syncserver]'")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -72,10 +74,10 @@ def handle_sync_request(method_str: str) -> Response:
|
||||||
col.close_for_full_sync()
|
col.close_for_full_sync()
|
||||||
try:
|
try:
|
||||||
outdata = col._backend.sync_server_method(method=method, data=data)
|
outdata = col._backend.sync_server_method(method=method, data=data)
|
||||||
except Exception as e:
|
except Exception as error:
|
||||||
if method == Method.META:
|
if method == Method.META:
|
||||||
# if parallel syncing requests come in, block them
|
# if parallel syncing requests come in, block them
|
||||||
print("exception in meta", e)
|
print("exception in meta", error)
|
||||||
return flask.make_response("Conflict", 409)
|
return flask.make_response("Conflict", 409)
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
|
|
@ -91,8 +93,8 @@ def handle_sync_request(method_str: str) -> Response:
|
||||||
path = outdata.decode("utf8")
|
path = outdata.decode("utf8")
|
||||||
|
|
||||||
def stream_reply() -> Iterable[bytes]:
|
def stream_reply() -> Iterable[bytes]:
|
||||||
with open(path, "rb") as f:
|
with open(path, "rb") as file:
|
||||||
while chunk := f.read(16 * 1024):
|
while chunk := file.read(16 * 1024):
|
||||||
yield chunk
|
yield chunk
|
||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
|
|
||||||
|
|
@ -117,30 +119,29 @@ def after_full_sync() -> None:
|
||||||
def get_method(
|
def get_method(
|
||||||
method_str: str,
|
method_str: str,
|
||||||
) -> SyncServerMethodRequest.Method.V | None: # pylint: disable=no-member
|
) -> SyncServerMethodRequest.Method.V | None: # pylint: disable=no-member
|
||||||
s = method_str
|
if method_str == "hostKey":
|
||||||
if s == "hostKey":
|
|
||||||
return Method.HOST_KEY
|
return Method.HOST_KEY
|
||||||
elif s == "meta":
|
elif method_str == "meta":
|
||||||
return Method.META
|
return Method.META
|
||||||
elif s == "start":
|
elif method_str == "start":
|
||||||
return Method.START
|
return Method.START
|
||||||
elif s == "applyGraves":
|
elif method_str == "applyGraves":
|
||||||
return Method.APPLY_GRAVES
|
return Method.APPLY_GRAVES
|
||||||
elif s == "applyChanges":
|
elif method_str == "applyChanges":
|
||||||
return Method.APPLY_CHANGES
|
return Method.APPLY_CHANGES
|
||||||
elif s == "chunk":
|
elif method_str == "chunk":
|
||||||
return Method.CHUNK
|
return Method.CHUNK
|
||||||
elif s == "applyChunk":
|
elif method_str == "applyChunk":
|
||||||
return Method.APPLY_CHUNK
|
return Method.APPLY_CHUNK
|
||||||
elif s == "sanityCheck2":
|
elif method_str == "sanityCheck2":
|
||||||
return Method.SANITY_CHECK
|
return Method.SANITY_CHECK
|
||||||
elif s == "finish":
|
elif method_str == "finish":
|
||||||
return Method.FINISH
|
return Method.FINISH
|
||||||
elif s == "abort":
|
elif method_str == "abort":
|
||||||
return Method.ABORT
|
return Method.ABORT
|
||||||
elif s == "upload":
|
elif method_str == "upload":
|
||||||
return Method.FULL_UPLOAD
|
return Method.FULL_UPLOAD
|
||||||
elif s == "download":
|
elif method_str == "download":
|
||||||
return Method.FULL_DOWNLOAD
|
return Method.FULL_DOWNLOAD
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
@ -170,7 +171,7 @@ def col_path() -> str:
|
||||||
|
|
||||||
|
|
||||||
def serve() -> None:
|
def serve() -> None:
|
||||||
global col
|
global col # pylint: disable=C0103
|
||||||
|
|
||||||
col = Collection(col_path(), server=True)
|
col = Collection(col_path(), server=True)
|
||||||
# don't hold an outer transaction open
|
# don't hold an outer transaction open
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from anki.syncserver import serve
|
from anki.syncserver import serve
|
||||||
|
|
||||||
serve()
|
serve()
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,7 @@ def getEmptyCol() -> Collection:
|
||||||
|
|
||||||
def test_clock():
|
def test_clock():
|
||||||
col = getEmptyCol()
|
col = getEmptyCol()
|
||||||
if (col.sched.dayCutoff - intTime()) < 10 * 60:
|
if (col.sched.day_cutoff - intTime()) < 10 * 60:
|
||||||
raise Exception("Unit tests will fail around the day rollover.")
|
raise Exception("Unit tests will fail around the day rollover.")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -33,7 +33,7 @@ def getEmptyCol():
|
||||||
|
|
||||||
def test_clock():
|
def test_clock():
|
||||||
col = getEmptyCol()
|
col = getEmptyCol()
|
||||||
if (col.sched.dayCutoff - intTime()) < 10 * 60:
|
if (col.sched.day_cutoff - intTime()) < 10 * 60:
|
||||||
raise Exception("Unit tests will fail around the day rollover.")
|
raise Exception("Unit tests will fail around the day rollover.")
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1170,12 +1170,12 @@ def test_reorder():
|
||||||
found = False
|
found = False
|
||||||
# 50/50 chance of being reordered
|
# 50/50 chance of being reordered
|
||||||
for i in range(20):
|
for i in range(20):
|
||||||
col.sched.randomizeCards(1)
|
col.sched.randomize_cards(1)
|
||||||
if note.cards()[0].due != note.id:
|
if note.cards()[0].due != note.id:
|
||||||
found = True
|
found = True
|
||||||
break
|
break
|
||||||
assert found
|
assert found
|
||||||
col.sched.orderCards(1)
|
col.sched.order_cards(1)
|
||||||
assert note.cards()[0].due == 1
|
assert note.cards()[0].due == 1
|
||||||
# shifting
|
# shifting
|
||||||
note3 = col.newNote()
|
note3 = col.newNote()
|
||||||
|
|
|
||||||
|
|
@ -74,7 +74,7 @@ class CustomStudy(QDialog):
|
||||||
smin = -DYN_MAX_SIZE
|
smin = -DYN_MAX_SIZE
|
||||||
smax = newExceeding
|
smax = newExceeding
|
||||||
elif idx == RADIO_REV:
|
elif idx == RADIO_REV:
|
||||||
rev = self.mw.col.sched.totalRevForCurrentDeck()
|
rev = self.mw.col.sched.total_rev_for_current_deck()
|
||||||
# get the number of review due in deck that exceed the review due limit
|
# get the number of review due in deck that exceed the review due limit
|
||||||
revUnderLearning = min(
|
revUnderLearning = min(
|
||||||
rev, self.conf["rev"]["perDay"] - self.deck["revToday"][1]
|
rev, self.conf["rev"]["perDay"] - self.deck["revToday"][1]
|
||||||
|
|
@ -126,14 +126,14 @@ class CustomStudy(QDialog):
|
||||||
if i == RADIO_NEW:
|
if i == RADIO_NEW:
|
||||||
self.deck["extendNew"] = spin
|
self.deck["extendNew"] = spin
|
||||||
self.mw.col.decks.save(self.deck)
|
self.mw.col.decks.save(self.deck)
|
||||||
self.mw.col.sched.extendLimits(spin, 0)
|
self.mw.col.sched.extend_limits(spin, 0)
|
||||||
self.mw.reset()
|
self.mw.reset()
|
||||||
QDialog.accept(self)
|
QDialog.accept(self)
|
||||||
return
|
return
|
||||||
elif i == RADIO_REV:
|
elif i == RADIO_REV:
|
||||||
self.deck["extendRev"] = spin
|
self.deck["extendRev"] = spin
|
||||||
self.mw.col.decks.save(self.deck)
|
self.mw.col.decks.save(self.deck)
|
||||||
self.mw.col.sched.extendLimits(0, spin)
|
self.mw.col.sched.extend_limits(0, spin)
|
||||||
self.mw.reset()
|
self.mw.reset()
|
||||||
QDialog.accept(self)
|
QDialog.accept(self)
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -66,7 +66,7 @@ class DeckConf(QDialog):
|
||||||
import anki.consts as cs
|
import anki.consts as cs
|
||||||
|
|
||||||
f = self.form
|
f = self.form
|
||||||
f.newOrder.addItems(list(cs.newCardOrderLabels(self.mw.col).values()))
|
f.newOrder.addItems(list(cs.new_card_order_labels(self.mw.col).values()))
|
||||||
qconnect(f.newOrder.currentIndexChanged, self.onNewOrderChanged)
|
qconnect(f.newOrder.currentIndexChanged, self.onNewOrderChanged)
|
||||||
|
|
||||||
# Conf list
|
# Conf list
|
||||||
|
|
@ -253,7 +253,7 @@ class DeckConf(QDialog):
|
||||||
return
|
return
|
||||||
self.conf["new"]["order"] = new
|
self.conf["new"]["order"] = new
|
||||||
self.mw.progress.start()
|
self.mw.progress.start()
|
||||||
self.mw.col.sched.resortConf(self.conf)
|
self.mw.col.sched.resort_conf(self.conf)
|
||||||
self.mw.progress.finish()
|
self.mw.progress.finish()
|
||||||
|
|
||||||
# Saving
|
# Saving
|
||||||
|
|
@ -294,9 +294,9 @@ class DeckConf(QDialog):
|
||||||
if self._origNewOrder != c["order"]:
|
if self._origNewOrder != c["order"]:
|
||||||
# order of current deck has changed, so have to resort
|
# order of current deck has changed, so have to resort
|
||||||
if c["order"] == NEW_CARDS_RANDOM:
|
if c["order"] == NEW_CARDS_RANDOM:
|
||||||
self.mw.col.sched.randomizeCards(self.deck["id"])
|
self.mw.col.sched.randomize_cards(self.deck["id"])
|
||||||
else:
|
else:
|
||||||
self.mw.col.sched.orderCards(self.deck["id"])
|
self.mw.col.sched.order_cards(self.deck["id"])
|
||||||
# rev
|
# rev
|
||||||
c = self.conf["rev"]
|
c = self.conf["rev"]
|
||||||
c["perDay"] = f.revPerDay.value()
|
c["perDay"] = f.revPerDay.value()
|
||||||
|
|
|
||||||
|
|
@ -266,7 +266,7 @@ class Overview:
|
||||||
else:
|
else:
|
||||||
links.append(["C", "studymore", tr.actions_custom_study()])
|
links.append(["C", "studymore", tr.actions_custom_study()])
|
||||||
# links.append(["F", "cram", _("Filter/Cram")])
|
# links.append(["F", "cram", _("Filter/Cram")])
|
||||||
if self.mw.col.sched.haveBuried():
|
if self.mw.col.sched.have_buried():
|
||||||
links.append(["U", "unbury", tr.studying_unbury()])
|
links.append(["U", "unbury", tr.studying_unbury()])
|
||||||
links.append(["", "description", tr.scheduling_description()])
|
links.append(["", "description", tr.scheduling_description()])
|
||||||
buf = ""
|
buf = ""
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ from typing import Any, cast
|
||||||
import anki.lang
|
import anki.lang
|
||||||
import aqt
|
import aqt
|
||||||
from anki.collection import OpChanges
|
from anki.collection import OpChanges
|
||||||
from anki.consts import newCardSchedulingLabels
|
from anki.consts import new_card_scheduling_labels
|
||||||
from aqt import AnkiQt
|
from aqt import AnkiQt
|
||||||
from aqt.operations.collection import set_preferences
|
from aqt.operations.collection import set_preferences
|
||||||
from aqt.profiles import VideoDriver
|
from aqt.profiles import VideoDriver
|
||||||
|
|
@ -71,7 +71,7 @@ class Preferences(QDialog):
|
||||||
form.sched2021.setVisible(version >= 2)
|
form.sched2021.setVisible(version >= 2)
|
||||||
|
|
||||||
form.lrnCutoff.setValue(int(scheduling.learn_ahead_secs / 60.0))
|
form.lrnCutoff.setValue(int(scheduling.learn_ahead_secs / 60.0))
|
||||||
form.newSpread.addItems(list(newCardSchedulingLabels(self.mw.col).values()))
|
form.newSpread.addItems(list(new_card_scheduling_labels(self.mw.col).values()))
|
||||||
form.newSpread.setCurrentIndex(scheduling.new_review_mix)
|
form.newSpread.setCurrentIndex(scheduling.new_review_mix)
|
||||||
form.dayLearnFirst.setChecked(scheduling.day_learn_first)
|
form.dayLearnFirst.setChecked(scheduling.day_learn_first)
|
||||||
form.dayOffset.setValue(scheduling.rollover)
|
form.dayOffset.setValue(scheduling.rollover)
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue