PEP8 pylib (#1443)

* PEP8 scheduler/base.py

* PEP8 _backend/__init__.py

* PEP8 _backend/genbackend.py

* PEP8 _backend/genfluent.py

* PEP8 scheduler/__init__.py

* PEP8 __init__.py

* PEP8 _legacy.py

* PEP8 syncserver/__init__.py

- Make 'ip' a good name
- Overrule `global col` being identified as a constant

* PEP8 syncserver/__main__.py

* PEP8 buildinfo.py

* Implement `DeprecatedNamesMixin` for modules

* PEP8 browser.py

* PEP8 config.py

* PEP8 consts.py

* PEP8 db.py

* Format

* Improve AttributeError for DeprecatedNamesMixin

* print the line that imported/referenced the legacy module attr (dae)

* DeprecatedNamesMixinStandalone -> ...ForModule
This commit is contained in:
RumovZ 2021-10-22 12:39:49 +02:00 committed by GitHub
parent 61f3b71664
commit d665dbc9a7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
28 changed files with 231 additions and 146 deletions

View file

@ -57,3 +57,4 @@ good-names =
tr,
db,
ok,
ip,

View file

@ -1,2 +1,4 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# pylint: enable=invalid-name

View file

@ -1,6 +1,8 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# pylint: enable=invalid-name
from __future__ import annotations
import sys
@ -95,8 +97,8 @@ class RustBackend(RustBackendGenerated):
bytes_input = to_json_bytes(input)
try:
return from_json_bytes(self._backend.db_command(bytes_input))
except Exception as e:
err_bytes = e.args[0]
except Exception as error:
err_bytes = error.args[0]
err = backend_pb2.BackendError()
err.ParseFromString(err_bytes)
raise backend_exception_to_pylib(err)
@ -125,8 +127,8 @@ class RustBackend(RustBackendGenerated):
input_bytes = input.SerializeToString()
try:
return self._backend.command(service, method, input_bytes)
except Exception as e:
err_bytes = bytes(e.args[0])
except Exception as error:
err_bytes = bytes(error.args[0])
err = backend_pb2.BackendError()
err.ParseFromString(err_bytes)
raise backend_exception_to_pylib(err)
@ -135,12 +137,12 @@ class RustBackend(RustBackendGenerated):
def translate_string_in(
module_index: int, message_index: int, **kwargs: str | int | float
) -> i18n_pb2.TranslateStringRequest:
args = {}
for (k, v) in kwargs.items():
if isinstance(v, str):
args[k] = i18n_pb2.TranslateArgValue(str=v)
else:
args[k] = i18n_pb2.TranslateArgValue(number=v)
args = {
k: i18n_pb2.TranslateArgValue(str=v)
if isinstance(v, str)
else i18n_pb2.TranslateArgValue(number=v)
for k, v in kwargs.items()
}
return i18n_pb2.TranslateStringRequest(
module_index=module_index, message_index=message_index, args=args
)

View file

@ -2,6 +2,8 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# pylint: enable=invalid-name
import os
import re
import sys

View file

@ -1,6 +1,8 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# pylint: enable=invalid-name
import json
import sys
from typing import List, Literal, TypedDict

View file

@ -1,6 +1,8 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# pylint: enable=invalid-name
from __future__ import annotations
import functools
@ -33,8 +35,8 @@ def print_deprecation_warning(msg: str, frame: int = 2) -> None:
print(f"{path}:{linenum}:{msg}")
def _print_warning(old: str, doc: str) -> None:
return print_deprecation_warning(f"{old} is deprecated: {doc}", frame=1)
def _print_warning(old: str, doc: str, frame: int = 1) -> None:
return print_deprecation_warning(f"{old} is deprecated: {doc}", frame=frame)
class DeprecatedNamesMixin:
@ -50,20 +52,27 @@ class DeprecatedNamesMixin:
@no_type_check
def __getattr__(self, name: str) -> Any:
if some_tuple := self._deprecated_attributes.get(name):
remapped, replacement = some_tuple
else:
replacement = remapped = self._deprecated_aliases.get(
name
) or stringcase.snakecase(name)
if remapped == name:
raise AttributeError
try:
remapped, replacement = self._get_remapped_and_replacement(name)
out = getattr(self, remapped)
except AttributeError:
raise AttributeError(
f"'{self.__class__.__name__}' object has no attribute '{name}'"
) from None
out = getattr(self, remapped)
_print_warning(f"'{name}'", f"please use '{replacement}'")
return out
@no_type_check
def _get_remapped_and_replacement(self, name: str) -> tuple[str, str]:
if some_tuple := self._deprecated_attributes.get(name):
return some_tuple
remapped = self._deprecated_aliases.get(name) or stringcase.snakecase(name)
if remapped == name:
raise AttributeError
return (remapped, remapped)
@no_type_check
@classmethod
def register_deprecated_aliases(cls, **kwargs: DeprecatedAliasTarget) -> None:
@ -98,6 +107,37 @@ class DeprecatedNamesMixin:
}
class DeprecatedNamesMixinForModule(DeprecatedNamesMixin):
"""Provides the functionality of DeprecatedNamesMixin for modules.
It can be invoked like this:
```
_deprecated_names = DeprecatedNamesMixinForModule(globals())
_deprecated_names.register_deprecated_aliases(...
_deprecated_names.register_deprecated_attributes(...
@no_type_check
def __getattr__(name: str) -> Any:
return _deprecated_names.__getattr__(name)
```
"""
def __init__(self, module_globals: dict[str, Any]) -> None:
self.module_globals = module_globals
def __getattr__(self, name: str) -> Any:
try:
remapped, replacement = self._get_remapped_and_replacement(name)
out = self.module_globals[remapped]
except (AttributeError, KeyError):
raise AttributeError(
f"Module '{self.module_globals['__name__']}' has no attribute '{name}'"
) from None
_print_warning(f"'{name}'", f"please use '{replacement}'", frame=0)
return out
def deprecated(replaced_by: Callable | None = None, info: str = "") -> Callable:
"""Print a deprecation warning, telling users to use `replaced_by`, or show `doc`."""

View file

@ -1,6 +1,8 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# pylint: enable=invalid-name
class BrowserConfig:
ACTIVE_CARD_COLUMNS_KEY = "activeCols"

View file

@ -1,6 +1,8 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# pylint: enable=invalid-name
from importlib.resources import open_text
@ -10,8 +12,8 @@ def _get_build_info() -> dict[str, str]:
for line in file.readlines():
elems = line.split()
if len(elems) == 2:
k, v = elems
info[k] = v
key, val = elems
info[key] = val
return info

View file

@ -1,6 +1,8 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# pylint: enable=invalid-name
"""
Config handling

View file

@ -1,10 +1,14 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# pylint: enable=invalid-name
from __future__ import annotations
import sys
from typing import Any, NewType
from typing import Any, NewType, no_type_check
from anki._legacy import DeprecatedNamesMixinForModule
# whether new cards should be mixed with reviews, or shown first or last
NEW_CARDS_DISTRIBUTE = 0
@ -107,7 +111,7 @@ def _tr(col: anki.collection.Collection | None) -> Any:
return tr_legacyglobal
def newCardOrderLabels(col: anki.collection.Collection | None) -> dict[int, Any]:
def new_card_order_labels(col: anki.collection.Collection | None) -> dict[int, Any]:
tr = _tr(col)
return {
0: tr.scheduling_show_new_cards_in_random_order(),
@ -115,7 +119,7 @@ def newCardOrderLabels(col: anki.collection.Collection | None) -> dict[int, Any]
}
def newCardSchedulingLabels(
def new_card_scheduling_labels(
col: anki.collection.Collection | None,
) -> dict[int, Any]:
tr = _tr(col)
@ -124,3 +128,11 @@ def newCardSchedulingLabels(
1: tr.scheduling_show_new_cards_after_reviews(),
2: tr.scheduling_show_new_cards_before_reviews(),
}
_deprecated_names = DeprecatedNamesMixinForModule(globals())
@no_type_check
def __getattr__(name: str) -> Any:
return _deprecated_names.__getattr__(name)

View file

@ -1,6 +1,8 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# pylint: enable=invalid-name
"""
A convenience wrapper over pysqlite.
@ -18,29 +20,31 @@ from sqlite3 import Cursor
from sqlite3 import dbapi2 as sqlite
from typing import Any
from anki._legacy import DeprecatedNamesMixin
DBError = sqlite.Error
class DB:
class DB(DeprecatedNamesMixin):
def __init__(self, path: str, timeout: int = 0) -> None:
self._db = sqlite.connect(path, timeout=timeout)
self._db.text_factory = self._textFactory
self._db.text_factory = self._text_factory
self._path = path
self.echo = os.environ.get("DBECHO")
self.mod = False
def __repr__(self) -> str:
d = dict(self.__dict__)
del d["_db"]
return f"{super().__repr__()} {pprint.pformat(d, width=300)}"
dict_ = dict(self.__dict__)
del dict_["_db"]
return f"{super().__repr__()} {pprint.pformat(dict_, width=300)}"
def execute(self, sql: str, *a: Any, **ka: Any) -> Cursor:
s = sql.strip().lower()
canonized = sql.strip().lower()
# mark modified?
for stmt in "insert", "update", "delete":
if s.startswith(stmt):
if canonized.startswith(stmt):
self.mod = True
t = time.time()
start_time = time.time()
if ka:
# execute("...where id = :id", id=5)
res = self._db.execute(sql, ka)
@ -49,25 +53,25 @@ class DB:
res = self._db.execute(sql, a)
if self.echo:
# print a, ka
print(sql, f"{(time.time() - t) * 1000:0.3f}ms")
print(sql, f"{(time.time() - start_time) * 1000:0.3f}ms")
if self.echo == "2":
print(a, ka)
return res
def executemany(self, sql: str, l: Any) -> None:
def executemany(self, sql: str, iterable: Any) -> None:
self.mod = True
t = time.time()
self._db.executemany(sql, l)
start_time = time.time()
self._db.executemany(sql, iterable)
if self.echo:
print(sql, f"{(time.time() - t) * 1000:0.3f}ms")
print(sql, f"{(time.time() - start_time) * 1000:0.3f}ms")
if self.echo == "2":
print(l)
print(iterable)
def commit(self) -> None:
t = time.time()
start_time = time.time()
self._db.commit()
if self.echo:
print(f"commit {(time.time() - t) * 1000:0.3f}ms")
print(f"commit {(time.time() - start_time) * 1000:0.3f}ms")
def executescript(self, sql: str) -> None:
self.mod = True
@ -88,9 +92,9 @@ class DB:
return self.execute(*a, **kw).fetchall()
def first(self, *a: Any, **kw: Any) -> Any:
c = self.execute(*a, **kw)
res = c.fetchone()
c.close()
cursor = self.execute(*a, **kw)
res = cursor.fetchone()
cursor.close()
return res
def list(self, *a: Any, **kw: Any) -> list:
@ -110,20 +114,20 @@ class DB:
def __exit__(self, *args: Any) -> None:
self._db.close()
def totalChanges(self) -> Any:
def total_changes(self) -> Any:
return self._db.total_changes
def interrupt(self) -> None:
self._db.interrupt()
def setAutocommit(self, autocommit: bool) -> None:
def set_autocommit(self, autocommit: bool) -> None:
if autocommit:
self._db.isolation_level = None
else:
self._db.isolation_level = ""
# strip out invalid utf-8 when reading from db
def _textFactory(self, data: bytes) -> str:
def _text_factory(self, data: bytes) -> str:
return str(data, errors="ignore")
def cursor(self, factory: type[Cursor] = Cursor) -> Cursor:

View file

@ -366,7 +366,7 @@ class DeckManager(DeprecatedNamesMixin):
self.update_config(new)
# if it was previously randomized, re-sort
if not old_order:
self.col.sched.resortConf(new)
self.col.sched.resort_conf(new)
# Deck utils
#############################################################

View file

@ -246,7 +246,7 @@ class AnkiExporter(Exporter):
)
else:
# need to reset card state
self.dst.sched.resetCards(cids)
self.dst.sched.reset_cards(cids)
# models - start with zero
self.dst.mod_schema(check=False)
self.dst.models.remove_all_notetypes()

View file

@ -467,7 +467,7 @@ insert or ignore into revlog values (?,?,?,?,?,?,?,?,?)""",
def _postImport(self) -> None:
for did in list(self._decks.values()):
self.col.sched.maybeRandomizeDeck(did)
self.col.sched.maybe_randomize_deck(did)
# make sure new position is correct
self.dst.conf["nextPos"] = (
self.dst.db.scalar("select max(due)+1 from cards where type = 0") or 0

View file

@ -217,7 +217,7 @@ class NoteImporter(Importer):
conf = self.col.decks.config_dict_for_deck_id(did)
# in order due?
if not conf["dyn"] and conf["new"]["order"] == NEW_CARDS_RANDOM:
self.col.sched.randomizeCards(did)
self.col.sched.randomize_cards(did)
part1 = self.col.tr.importing_note_added(count=len(new))
part2 = self.col.tr.importing_note_updated(count=self.updateCount)

View file

@ -1,6 +1,8 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# pylint: enable=invalid-name
from __future__ import annotations
import sys

View file

@ -1,10 +1,13 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# pylint: enable=invalid-name
from __future__ import annotations
import anki
from anki import decks_pb2, scheduler_pb2
from anki._legacy import DeprecatedNamesMixin
from anki.collection import OpChanges, OpChangesWithCount, OpChangesWithId
from anki.config import Config
@ -25,7 +28,7 @@ from anki.notes import NoteId
from anki.utils import ids2str, intTime
class SchedulerBase:
class SchedulerBase(DeprecatedNamesMixin):
"Actions shared between schedulers."
version = 0
@ -40,7 +43,7 @@ class SchedulerBase:
return self._timing_today().days_elapsed
@property
def dayCutoff(self) -> int:
def day_cutoff(self) -> int:
return self._timing_today().next_day_at
# Deck list
@ -57,34 +60,34 @@ class SchedulerBase:
def congratulations_info(self) -> CongratsInfo:
return self.col._backend.congrats_info()
def haveBuriedSiblings(self) -> bool:
def have_buried_siblings(self) -> bool:
return self.congratulations_info().have_sched_buried
def haveManuallyBuried(self) -> bool:
def have_manually_buried(self) -> bool:
return self.congratulations_info().have_user_buried
def haveBuried(self) -> bool:
def have_buried(self) -> bool:
info = self.congratulations_info()
return info.have_sched_buried or info.have_user_buried
def extendLimits(self, new: int, rev: int) -> None:
def extend_limits(self, new: int, rev: int) -> None:
did = self.col.decks.current()["id"]
self.col._backend.extend_limits(deck_id=did, new_delta=new, review_delta=rev)
# fixme: used by custom study
def totalRevForCurrentDeck(self) -> int:
def total_rev_for_current_deck(self) -> int:
assert self.col.db
return self.col.db.scalar(
f"""
select count() from cards where id in (
select id from cards where did in %s and queue = {QUEUE_TYPE_REV} and due <= ? limit 9999)"""
% self._deckLimit(),
% self._deck_limit(),
self.today,
)
# fixme: only used by totalRevForCurrentDeck and old deck stats;
# fixme: only used by total_rev_for_current_deck and old deck stats;
# schedv2 defines separate version
def _deckLimit(self) -> str:
def _deck_limit(self) -> str:
return ids2str(
self.col.decks.deck_and_child_ids(self.col.decks.get_current_id())
)
@ -179,12 +182,12 @@ select id from cards where did in %s and queue = {QUEUE_TYPE_REV} and due <= ? l
config_key=key, # type: ignore
)
def resetCards(self, ids: list[CardId]) -> None:
def reset_cards(self, ids: list[CardId]) -> None:
"Completely reset cards for export."
sids = ids2str(ids)
assert self.col.db
# we want to avoid resetting due number of existing new cards on export
nonNew = self.col.db.list(
non_new = self.col.db.list(
f"select id from cards where id in %s and (queue != {QUEUE_TYPE_NEW} or type != {CARD_TYPE_NEW})"
% sids
)
@ -194,7 +197,7 @@ select id from cards where did in %s and queue = {QUEUE_TYPE_REV} and due <= ? l
" where id in %s" % sids
)
# and forget any non-new cards, changing their due numbers
self.col._backend.schedule_cards_as_new(card_ids=nonNew, log=False)
self.col._backend.schedule_cards_as_new(card_ids=non_new, log=False)
# Repositioning new cards
##########################################################################
@ -215,30 +218,29 @@ select id from cards where did in %s and queue = {QUEUE_TYPE_REV} and due <= ? l
shift_existing=shift_existing,
)
def randomizeCards(self, did: DeckId) -> None:
def randomize_cards(self, did: DeckId) -> None:
self.col._backend.sort_deck(deck_id=did, randomize=True)
def orderCards(self, did: DeckId) -> None:
def order_cards(self, did: DeckId) -> None:
self.col._backend.sort_deck(deck_id=did, randomize=False)
def resortConf(self, conf: DeckConfigDict) -> None:
def resort_conf(self, conf: DeckConfigDict) -> None:
for did in self.col.decks.decks_using_config(conf):
if conf["new"]["order"] == 0:
self.randomizeCards(did)
self.randomize_cards(did)
else:
self.orderCards(did)
self.order_cards(did)
# for post-import
def maybeRandomizeDeck(self, did: DeckId | None = None) -> None:
def maybe_randomize_deck(self, did: DeckId | None = None) -> None:
if not did:
did = self.col.decks.selected()
conf = self.col.decks.config_dict_for_deck_id(did)
# in order due?
if conf["new"]["order"] == NEW_CARDS_RANDOM:
self.randomizeCards(did)
self.randomize_cards(did)
# legacy
def sortCards(
def _legacy_sort_cards(
self,
cids: list[CardId],
start: int = 1,
@ -247,3 +249,8 @@ select id from cards where did in %s and queue = {QUEUE_TYPE_REV} and due <= ? l
shift: bool = False,
) -> None:
self.reposition_new_cards(cids, start, step, shuffle, shift)
SchedulerBase.register_deprecated_attributes(
sortCards=(SchedulerBase._legacy_sort_cards, SchedulerBase.reposition_new_cards)
)

View file

@ -162,8 +162,8 @@ class Scheduler(V2):
f"""
select sum(left/1000) from (select left from cards where
did in %s and queue = {QUEUE_TYPE_LRN} and due < ? limit %d)"""
% (self._deckLimit(), self.reportLimit),
self.dayCutoff,
% (self._deck_limit(), self.reportLimit),
self.day_cutoff,
)
or 0
)
@ -172,7 +172,7 @@ did in %s and queue = {QUEUE_TYPE_LRN} and due < ? limit %d)"""
f"""
select count() from cards where did in %s and queue = {QUEUE_TYPE_DAY_LEARN_RELEARN}
and due <= ? limit %d"""
% (self._deckLimit(), self.reportLimit),
% (self._deck_limit(), self.reportLimit),
self.today,
)
@ -193,8 +193,8 @@ and due <= ? limit %d"""
select due, id from cards where
did in %s and queue = {QUEUE_TYPE_LRN} and due < ?
limit %d"""
% (self._deckLimit(), self.reportLimit),
self.dayCutoff,
% (self._deck_limit(), self.reportLimit),
self.day_cutoff,
)
self._lrnQueue = [tuple(e) for e in self._lrnQueue]
# as it arrives sorted by did first, we need to sort it
@ -257,7 +257,7 @@ limit %d"""
delay *= int(random.uniform(1, 1.25))
card.due = int(time.time() + delay)
# due today?
if card.due < self.dayCutoff:
if card.due < self.day_cutoff:
self.lrnCount += card.left // 1000
# if the queue is not empty and there's nothing else to do, make
# sure we don't put it at the head of the queue and end up showing
@ -270,7 +270,7 @@ limit %d"""
else:
# the card is due in one or more days, so we need to use the
# day learn queue
ahead = ((card.due - self.dayCutoff) // 86400) + 1
ahead = ((card.due - self.day_cutoff) // 86400) + 1
card.due = self.today + ahead
card.queue = QUEUE_TYPE_DAY_LEARN_RELEARN
self._logLrn(card, ease, conf, leaving, type, lastLeft)
@ -513,13 +513,13 @@ did = ? and queue = {QUEUE_TYPE_REV} and due <= ? limit ?""",
card.due = int(delay + time.time())
card.left = self._startingLeft(card)
# queue 1
if card.due < self.dayCutoff:
if card.due < self.day_cutoff:
self.lrnCount += card.left // 1000
card.queue = QUEUE_TYPE_LRN
heappush(self._lrnQueue, (card.due, card.id))
else:
# day learn queue
ahead = ((card.due - self.dayCutoff) // 86400) + 1
ahead = ((card.due - self.day_cutoff) // 86400) + 1
card.due = self.today + ahead
card.queue = QUEUE_TYPE_DAY_LEARN_RELEARN
return delay
@ -674,8 +674,8 @@ did = ? and queue = {QUEUE_TYPE_REV} and due <= ? limit ?""",
# Deck finished state
##########################################################################
def haveBuried(self) -> bool:
sdids = self._deckLimit()
def have_buried(self) -> bool:
sdids = self._deck_limit()
cnt = self.col.db.scalar(
f"select 1 from cards where queue = {QUEUE_TYPE_SIBLING_BURIED} and did in %s limit 1"
% sdids

View file

@ -68,7 +68,7 @@ class Scheduler(SchedulerBaseWithLegacy):
def _checkDay(self) -> None:
# check if the day has rolled over
if time.time() > self.dayCutoff:
if time.time() > self.day_cutoff:
self.reset()
# Fetching the next card
@ -260,7 +260,7 @@ select count() from
f"""
select count() from cards where id in (
select id from cards where did in %s and queue = {QUEUE_TYPE_NEW} limit ?)"""
% self._deckLimit(),
% self._deck_limit(),
self.reportLimit,
)
@ -286,7 +286,7 @@ select id from cards where did in %s and queue = {QUEUE_TYPE_NEW} limit ?)"""
f"""
select count() from cards where did in %s and queue = {QUEUE_TYPE_LRN}
and due < ?"""
% (self._deckLimit()),
% (self._deck_limit()),
self._lrnCutoff,
)
or 0
@ -296,7 +296,7 @@ and due < ?"""
f"""
select count() from cards where did in %s and queue = {QUEUE_TYPE_DAY_LEARN_RELEARN}
and due <= ?"""
% (self._deckLimit()),
% (self._deck_limit()),
self.today,
)
# previews
@ -304,7 +304,7 @@ and due <= ?"""
f"""
select count() from cards where did in %s and queue = {QUEUE_TYPE_PREVIEW}
"""
% (self._deckLimit())
% (self._deck_limit())
)
def _resetLrn(self) -> None:
@ -326,7 +326,7 @@ select count() from cards where did in %s and queue = {QUEUE_TYPE_PREVIEW}
select due, id from cards where
did in %s and queue in ({QUEUE_TYPE_LRN},{QUEUE_TYPE_PREVIEW}) and due < ?
limit %d"""
% (self._deckLimit(), self.reportLimit),
% (self._deck_limit(), self.reportLimit),
cutoff,
)
self._lrnQueue = [cast(tuple[int, CardId], tuple(e)) for e in self._lrnQueue]
@ -422,7 +422,7 @@ select id from cards where
did in %s and queue = {QUEUE_TYPE_REV} and due <= ?
order by due, random()
limit ?"""
% self._deckLimit(),
% self._deck_limit(),
self.today,
lim,
)
@ -503,7 +503,7 @@ limit ?"""
def _cardConf(self, card: Card) -> DeckConfigDict:
return self.col.decks.config_dict_for_deck_id(card.did)
def _deckLimit(self) -> str:
def _deck_limit(self) -> str:
return ids2str(self.col.decks.active())
# Answering (re)learning cards
@ -609,11 +609,11 @@ limit ?"""
card.due = int(time.time() + delay)
# due today?
if card.due < self.dayCutoff:
if card.due < self.day_cutoff:
# add some randomness, up to 5 minutes or 25%
maxExtra = min(300, int(delay * 0.25))
fuzz = random.randrange(0, max(1, maxExtra))
card.due = min(self.dayCutoff - 1, card.due + fuzz)
card.due = min(self.day_cutoff - 1, card.due + fuzz)
card.queue = QUEUE_TYPE_LRN
if card.due < (intTime() + self.col.conf["collapseTime"]):
self.lrnCount += 1
@ -627,7 +627,7 @@ limit ?"""
else:
# the card is due in one or more days, so we need to use the
# day learn queue
ahead = ((card.due - self.dayCutoff) // 86400) + 1
ahead = ((card.due - self.day_cutoff) // 86400) + 1
card.due = self.today + ahead
card.queue = QUEUE_TYPE_DAY_LEARN_RELEARN
return delay
@ -701,7 +701,7 @@ limit ?"""
ok = 0
for idx, delay in enumerate(delays):
now += int(delay * 60)
if now > self.dayCutoff:
if now > self.day_cutoff:
break
ok = idx
return ok + 1

View file

@ -155,7 +155,7 @@ sum(case when type = {REVLOG_RELRN} then 1 else 0 end), /* relearn */
sum(case when type = {REVLOG_CRAM} then 1 else 0 end) /* filter */
from revlog where id > ? """
+ lim,
(self.col.sched.dayCutoff - 86400) * 1000,
(self.col.sched.day_cutoff - 86400) * 1000,
)
cards = cards or 0
thetime = thetime or 0
@ -189,7 +189,7 @@ from revlog where id > ? """
select count(), sum(case when ease = 1 then 0 else 1 end) from revlog
where lastIvl >= 21 and id > ?"""
+ lim,
(self.col.sched.dayCutoff - 86400) * 1000,
(self.col.sched.day_cutoff - 86400) * 1000,
)
b += "<br>"
if mcnt:
@ -497,7 +497,7 @@ group by day order by day"""
lims = []
if num is not None:
lims.append(
"id > %d" % ((self.col.sched.dayCutoff - (num * chunk * 86400)) * 1000)
"id > %d" % ((self.col.sched.day_cutoff - (num * chunk * 86400)) * 1000)
)
lims.append("did in %s" % self._limit())
if lims:
@ -516,7 +516,7 @@ count(id)
from cards %s
group by day order by day"""
% lim,
self.col.sched.dayCutoff,
self.col.sched.day_cutoff,
chunk,
)
@ -524,7 +524,7 @@ group by day order by day"""
lims = []
if num is not None:
lims.append(
"id > %d" % ((self.col.sched.dayCutoff - (num * chunk * 86400)) * 1000)
"id > %d" % ((self.col.sched.day_cutoff - (num * chunk * 86400)) * 1000)
)
lim = self._revlogLimit()
if lim:
@ -555,7 +555,7 @@ sum(case when type = {REVLOG_CRAM} then time/1000.0 else 0 end)/? -- cram time
from revlog %s
group by day order by day"""
% lim,
self.col.sched.dayCutoff,
self.col.sched.day_cutoff,
chunk,
tf,
tf,
@ -568,7 +568,9 @@ group by day order by day"""
lims = []
num = self._periodDays()
if num:
lims.append("id > %d" % ((self.col.sched.dayCutoff - (num * 86400)) * 1000))
lims.append(
"id > %d" % ((self.col.sched.day_cutoff - (num * 86400)) * 1000)
)
rlim = self._revlogLimit()
if rlim:
lims.append(rlim)
@ -583,7 +585,7 @@ select count(), abs(min(day)) from (select
from revlog %s
group by day order by day)"""
% lim,
self.col.sched.dayCutoff,
self.col.sched.day_cutoff,
)
assert ret
return ret
@ -742,7 +744,7 @@ select count(), avg(ivl), max(ivl) from cards where did in %s and queue = {QUEUE
days = self._periodDays()
if days is not None:
lims.append(
"id > %d" % ((self.col.sched.dayCutoff - (days * 86400)) * 1000)
"id > %d" % ((self.col.sched.day_cutoff - (days * 86400)) * 1000)
)
if lims:
lim = "where " + " and ".join(lims)
@ -845,7 +847,7 @@ order by thetype, ease"""
rolloverHour = self.col.conf.get("rollover", 4)
pd = self._periodDays()
if pd:
lim += " and id > %d" % ((self.col.sched.dayCutoff - (86400 * pd)) * 1000)
lim += " and id > %d" % ((self.col.sched.day_cutoff - (86400 * pd)) * 1000)
return self.col.db.all(
f"""
select
@ -856,7 +858,7 @@ count()
from revlog where type in ({REVLOG_LRN},{REVLOG_REV},{REVLOG_RELRN}) %s
group by hour having count() > 30 order by hour"""
% lim,
self.col.sched.dayCutoff - (rolloverHour * 3600),
self.col.sched.day_cutoff - (rolloverHour * 3600),
)
# Cards
@ -1079,7 +1081,7 @@ $(function () {
def _limit(self) -> Any:
if self.wholeCollection:
return ids2str([d["id"] for d in self.col.decks.all()])
return self.col.sched._deckLimit()
return self.col.sched._deck_limit()
def _revlogLimit(self) -> str:
if self.wholeCollection:
@ -1106,7 +1108,7 @@ $(function () {
if not t:
period = 1
else:
period = max(1, int(1 + ((self.col.sched.dayCutoff - (t / 1000)) / 86400)))
period = max(1, int(1 + ((self.col.sched.day_cutoff - (t / 1000)) / 86400)))
return period
def _periodDays(self) -> int | None:

View file

@ -4,6 +4,8 @@
# Please see /docs/syncserver.md
#
# pylint: enable=invalid-name
from __future__ import annotations
import gzip
@ -19,8 +21,8 @@ from typing import Iterable, Optional
try:
import flask
from waitress.server import create_server
except ImportError as e:
print(e, "- to use the server, 'pip install anki[syncserver]'")
except ImportError as error:
print(error, "- to use the server, 'pip install anki[syncserver]'")
sys.exit(1)
@ -72,10 +74,10 @@ def handle_sync_request(method_str: str) -> Response:
col.close_for_full_sync()
try:
outdata = col._backend.sync_server_method(method=method, data=data)
except Exception as e:
except Exception as error:
if method == Method.META:
# if parallel syncing requests come in, block them
print("exception in meta", e)
print("exception in meta", error)
return flask.make_response("Conflict", 409)
else:
raise
@ -91,8 +93,8 @@ def handle_sync_request(method_str: str) -> Response:
path = outdata.decode("utf8")
def stream_reply() -> Iterable[bytes]:
with open(path, "rb") as f:
while chunk := f.read(16 * 1024):
with open(path, "rb") as file:
while chunk := file.read(16 * 1024):
yield chunk
os.unlink(path)
@ -117,30 +119,29 @@ def after_full_sync() -> None:
def get_method(
method_str: str,
) -> SyncServerMethodRequest.Method.V | None: # pylint: disable=no-member
s = method_str
if s == "hostKey":
if method_str == "hostKey":
return Method.HOST_KEY
elif s == "meta":
elif method_str == "meta":
return Method.META
elif s == "start":
elif method_str == "start":
return Method.START
elif s == "applyGraves":
elif method_str == "applyGraves":
return Method.APPLY_GRAVES
elif s == "applyChanges":
elif method_str == "applyChanges":
return Method.APPLY_CHANGES
elif s == "chunk":
elif method_str == "chunk":
return Method.CHUNK
elif s == "applyChunk":
elif method_str == "applyChunk":
return Method.APPLY_CHUNK
elif s == "sanityCheck2":
elif method_str == "sanityCheck2":
return Method.SANITY_CHECK
elif s == "finish":
elif method_str == "finish":
return Method.FINISH
elif s == "abort":
elif method_str == "abort":
return Method.ABORT
elif s == "upload":
elif method_str == "upload":
return Method.FULL_UPLOAD
elif s == "download":
elif method_str == "download":
return Method.FULL_DOWNLOAD
else:
return None
@ -170,7 +171,7 @@ def col_path() -> str:
def serve() -> None:
global col
global col # pylint: disable=C0103
col = Collection(col_path(), server=True)
# don't hold an outer transaction open

View file

@ -1,6 +1,8 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
# pylint: enable=invalid-name
from anki.syncserver import serve
serve()

View file

@ -21,7 +21,7 @@ def getEmptyCol() -> Collection:
def test_clock():
col = getEmptyCol()
if (col.sched.dayCutoff - intTime()) < 10 * 60:
if (col.sched.day_cutoff - intTime()) < 10 * 60:
raise Exception("Unit tests will fail around the day rollover.")

View file

@ -33,7 +33,7 @@ def getEmptyCol():
def test_clock():
col = getEmptyCol()
if (col.sched.dayCutoff - intTime()) < 10 * 60:
if (col.sched.day_cutoff - intTime()) < 10 * 60:
raise Exception("Unit tests will fail around the day rollover.")
@ -1170,12 +1170,12 @@ def test_reorder():
found = False
# 50/50 chance of being reordered
for i in range(20):
col.sched.randomizeCards(1)
col.sched.randomize_cards(1)
if note.cards()[0].due != note.id:
found = True
break
assert found
col.sched.orderCards(1)
col.sched.order_cards(1)
assert note.cards()[0].due == 1
# shifting
note3 = col.newNote()

View file

@ -74,7 +74,7 @@ class CustomStudy(QDialog):
smin = -DYN_MAX_SIZE
smax = newExceeding
elif idx == RADIO_REV:
rev = self.mw.col.sched.totalRevForCurrentDeck()
rev = self.mw.col.sched.total_rev_for_current_deck()
# get the number of review due in deck that exceed the review due limit
revUnderLearning = min(
rev, self.conf["rev"]["perDay"] - self.deck["revToday"][1]
@ -126,14 +126,14 @@ class CustomStudy(QDialog):
if i == RADIO_NEW:
self.deck["extendNew"] = spin
self.mw.col.decks.save(self.deck)
self.mw.col.sched.extendLimits(spin, 0)
self.mw.col.sched.extend_limits(spin, 0)
self.mw.reset()
QDialog.accept(self)
return
elif i == RADIO_REV:
self.deck["extendRev"] = spin
self.mw.col.decks.save(self.deck)
self.mw.col.sched.extendLimits(0, spin)
self.mw.col.sched.extend_limits(0, spin)
self.mw.reset()
QDialog.accept(self)
return

View file

@ -66,7 +66,7 @@ class DeckConf(QDialog):
import anki.consts as cs
f = self.form
f.newOrder.addItems(list(cs.newCardOrderLabels(self.mw.col).values()))
f.newOrder.addItems(list(cs.new_card_order_labels(self.mw.col).values()))
qconnect(f.newOrder.currentIndexChanged, self.onNewOrderChanged)
# Conf list
@ -253,7 +253,7 @@ class DeckConf(QDialog):
return
self.conf["new"]["order"] = new
self.mw.progress.start()
self.mw.col.sched.resortConf(self.conf)
self.mw.col.sched.resort_conf(self.conf)
self.mw.progress.finish()
# Saving
@ -294,9 +294,9 @@ class DeckConf(QDialog):
if self._origNewOrder != c["order"]:
# order of current deck has changed, so have to resort
if c["order"] == NEW_CARDS_RANDOM:
self.mw.col.sched.randomizeCards(self.deck["id"])
self.mw.col.sched.randomize_cards(self.deck["id"])
else:
self.mw.col.sched.orderCards(self.deck["id"])
self.mw.col.sched.order_cards(self.deck["id"])
# rev
c = self.conf["rev"]
c["perDay"] = f.revPerDay.value()

View file

@ -266,7 +266,7 @@ class Overview:
else:
links.append(["C", "studymore", tr.actions_custom_study()])
# links.append(["F", "cram", _("Filter/Cram")])
if self.mw.col.sched.haveBuried():
if self.mw.col.sched.have_buried():
links.append(["U", "unbury", tr.studying_unbury()])
links.append(["", "description", tr.scheduling_description()])
buf = ""

View file

@ -6,7 +6,7 @@ from typing import Any, cast
import anki.lang
import aqt
from anki.collection import OpChanges
from anki.consts import newCardSchedulingLabels
from anki.consts import new_card_scheduling_labels
from aqt import AnkiQt
from aqt.operations.collection import set_preferences
from aqt.profiles import VideoDriver
@ -71,7 +71,7 @@ class Preferences(QDialog):
form.sched2021.setVisible(version >= 2)
form.lrnCutoff.setValue(int(scheduling.learn_ahead_secs / 60.0))
form.newSpread.addItems(list(newCardSchedulingLabels(self.mw.col).values()))
form.newSpread.addItems(list(new_card_scheduling_labels(self.mw.col).values()))
form.newSpread.setCurrentIndex(scheduling.new_review_mix)
form.dayLearnFirst.setChecked(scheduling.day_learn_first)
form.dayOffset.setValue(scheduling.rollover)