Merge branch 'master' into add/dockerfile

This commit is contained in:
Jakub Kaczmarzyk 2020-09-08 14:59:32 -04:00 committed by GitHub
commit d808415a9e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
59 changed files with 1269 additions and 743 deletions

View file

@ -13,6 +13,16 @@ licensed under the BSD 3 clause license. If any pull request you make contains
code that you don't own the copyright to, you agree to make that clear when code that you don't own the copyright to, you agree to make that clear when
submitting the request. submitting the request.
When submitting a pull request, GitHub Actions will check that the Git email you
are submitting from matches the one you used to edit this file. A common issue
is adding yourself to this file using the username on your computer, but then
using GitHub to rebase or edit a pull request online. This will result in your
Git email becoming something like user@noreply.github.com. To prevent the
automatic check from failing, you can edit this file again using GitHub's online
editor, making a trivial edit like adding a space after your name, and then pull
requests will work regardless of whether you create them using your computer or
GitHub's online interface.
For users who previously confirmed the license of their contributions on the For users who previously confirmed the license of their contributions on the
support site, it would be great if you could add your name below as well. support site, it would be great if you could add your name below as well.
@ -48,6 +58,7 @@ abdo <github.com/ANH25>
aplaice <plaice.adam+github@gmail.com> aplaice <plaice.adam+github@gmail.com>
phwoo <github.com/phwoo> phwoo <github.com/phwoo>
Soren Bjornstad <anki@sorenbjornstad.com> Soren Bjornstad <anki@sorenbjornstad.com>
Aleksa Sarai <cyphar@cyphar.com>
Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com> Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com>
******************** ********************

View file

@ -26,7 +26,8 @@ $ pyenv/bin/python -c 'import aqt; aqt.run()'
Building from source Building from source
-------------------- --------------------
To start, make sure you have the following installed: You will need the following dependencies. Some OS-specific tips on installing
them are further down this document.
- Python 3.7+ - Python 3.7+
- portaudio - portaudio
@ -44,14 +45,14 @@ To start, make sure you have the following installed:
- git - git
- curl - curl
The build scripts assume a UNIX-like environment, so on Windows you will
need to use WSL or Cygwin to use them.
Once you've installed the above components, execute ./run in this repo, Once you've installed the above components, execute ./run in this repo,
which will build the subcomponents, and start Anki. Any arguments included which will build the subcomponents, and start Anki. Any arguments included
on the command line will be passed on to Anki. The first run will take on the command line will be passed on to Anki. The first run will take
quite a while to download and build everything - please be patient. quite a while to download and build everything - please be patient.
Don't name the Git checkout ~/Anki or ~/Documents/Anki, as those folders
were used on old Anki versions and will be automatically moved.
Before contributing code, please read README.contributing. Before contributing code, please read README.contributing.
If you'd like to contribute translations, please see the translations section If you'd like to contribute translations, please see the translations section
@ -67,6 +68,10 @@ Subcomponents
- proto contains the interface used to communicate between different - proto contains the interface used to communicate between different
languages. languages.
The pyenv folder is created when running make for the first time.
It is a Python virtual environment that contains Anki's libraries
and all the required dependencies.
Makefile Makefile
-------------- --------------
@ -99,7 +104,7 @@ Install Python 3.7+ if it's not installed.
Install other dependencies: Install other dependencies:
``` ```
sudo apt install portaudio19-dev mpv lame npm rsync gcc gettext git curl sudo apt install portaudio19-dev mpv lame npm rsync gcc gettext git curl python3-dev python3-venv libxcb-xinerama0
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
source $HOME/.cargo/env source $HOME/.cargo/env
wget https://github.com/protocolbuffers/protobuf/releases/download/v3.11.4/protoc-3.11.4-linux-x86_64.zip wget https://github.com/protocolbuffers/protobuf/releases/download/v3.11.4/protoc-3.11.4-linux-x86_64.zip
@ -120,6 +125,9 @@ $ brew link gettext --force
Windows users (using Visual Studio) Windows users (using Visual Studio)
---------- ----------
The build scripts assume a UNIX-like environment, so on Windows you will
need to use WSL or Cygwin to use them.
User-contributed instructions for building using Cygwin: User-contributed instructions for building using Cygwin:
1. Download and install Cygwin and put its `/bin/` directory on your system path (This PC > Properties > Advancded system settings > Environment Variables > double-click Path > New). 1. Download and install Cygwin and put its `/bin/` directory on your system path (This PC > Properties > Advancded system settings > Environment Variables > double-click Path > New).

View file

@ -95,7 +95,8 @@ service BackendService {
rpc LocalMinutesWest (Int64) returns (Int32); rpc LocalMinutesWest (Int64) returns (Int32);
rpc SetLocalMinutesWest (Int32) returns (Empty); rpc SetLocalMinutesWest (Int32) returns (Empty);
rpc SchedTimingToday (Empty) returns (SchedTimingTodayOut); rpc SchedTimingToday (Empty) returns (SchedTimingTodayOut);
rpc StudiedToday (StudiedTodayIn) returns (String); rpc StudiedToday (Empty) returns (String);
rpc StudiedTodayMessage (StudiedTodayMessageIn) returns (String);
rpc UpdateStats (UpdateStatsIn) returns (Empty); rpc UpdateStats (UpdateStatsIn) returns (Empty);
rpc ExtendLimits (ExtendLimitsIn) returns (Empty); rpc ExtendLimits (ExtendLimitsIn) returns (Empty);
rpc CountsForDeckToday (DeckID) returns (CountsForDeckTodayOut); rpc CountsForDeckToday (DeckID) returns (CountsForDeckTodayOut);
@ -103,6 +104,12 @@ service BackendService {
rpc RestoreBuriedAndSuspendedCards (CardIDs) returns (Empty); rpc RestoreBuriedAndSuspendedCards (CardIDs) returns (Empty);
rpc UnburyCardsInCurrentDeck (UnburyCardsInCurrentDeckIn) returns (Empty); rpc UnburyCardsInCurrentDeck (UnburyCardsInCurrentDeckIn) returns (Empty);
rpc BuryOrSuspendCards (BuryOrSuspendCardsIn) returns (Empty); rpc BuryOrSuspendCards (BuryOrSuspendCardsIn) returns (Empty);
rpc EmptyFilteredDeck (DeckID) returns (Empty);
rpc RebuildFilteredDeck (DeckID) returns (UInt32);
rpc ScheduleCardsAsReviews (ScheduleCardsAsReviewsIn) returns (Empty);
rpc ScheduleCardsAsNew (CardIDs) returns (Empty);
rpc SortCards (SortCardsIn) returns (Empty);
rpc SortDeck (SortDeckIn) returns (Empty);
// stats // stats
@ -143,6 +150,7 @@ service BackendService {
rpc UpdateCard (Card) returns (Empty); rpc UpdateCard (Card) returns (Empty);
rpc AddCard (Card) returns (CardID); rpc AddCard (Card) returns (CardID);
rpc RemoveCards (RemoveCardsIn) returns (Empty); rpc RemoveCards (RemoveCardsIn) returns (Empty);
rpc SetDeck (SetDeckIn) returns (Empty);
// notes // notes
@ -682,7 +690,7 @@ message FormatTimespanIn {
Context context = 2; Context context = 2;
} }
message StudiedTodayIn { message StudiedTodayMessageIn {
uint32 cards = 1; uint32 cards = 1;
double seconds = 2; double seconds = 2;
} }
@ -1013,6 +1021,7 @@ message RevlogEntry {
REVIEW = 1; REVIEW = 1;
RELEARNING = 2; RELEARNING = 2;
EARLY_REVIEW = 3; EARLY_REVIEW = 3;
MANUAL = 4;
} }
int64 id = 1; int64 id = 1;
int64 cid = 2; int64 cid = 2;
@ -1054,3 +1063,27 @@ message BuryOrSuspendCardsIn {
repeated int64 card_ids = 1; repeated int64 card_ids = 1;
Mode mode = 2; Mode mode = 2;
} }
message ScheduleCardsAsReviewsIn {
repeated int64 card_ids = 1;
uint32 min_interval = 2;
uint32 max_interval = 3;
}
message SortCardsIn {
repeated int64 card_ids = 1;
uint32 starting_from = 2;
uint32 step_size = 3;
bool randomize = 4;
bool shift_existing = 5;
}
message SortDeckIn {
int64 deck_id = 1;
bool randomize = 2;
}
message SetDeckIn {
repeated int64 card_ids = 1;
int64 deck_id = 2;
}

View file

@ -384,6 +384,9 @@ class Collection:
"You probably want .remove_notes_by_card() instead." "You probably want .remove_notes_by_card() instead."
self.backend.remove_cards(card_ids=card_ids) self.backend.remove_cards(card_ids=card_ids)
def set_deck(self, card_ids: List[int], deck_id: int) -> None:
self.backend.set_deck(card_ids=card_ids, deck_id=deck_id)
# legacy # legacy
def remCards(self, ids: List[int], notes: bool = True) -> None: def remCards(self, ids: List[int], notes: bool = True) -> None:
@ -516,6 +519,9 @@ table.review-log {{ {revlog_style} }}
return style + self.backend.card_stats(card_id) return style + self.backend.card_stats(card_id)
def studied_today(self) -> str:
return self.backend.studied_today()
# legacy # legacy
def cardStats(self, card: Card) -> str: def cardStats(self, card: Card) -> str:

View file

@ -556,7 +556,7 @@ class DeckManager:
# Dynamic decks # Dynamic decks
########################################################################## ##########################################################################
def newDyn(self, name: str) -> int: def new_filtered(self, name: str) -> int:
"Return a new dynamic deck and set it as the current deck." "Return a new dynamic deck and set it as the current deck."
did = self.id(name, type=1) did = self.id(name, type=1)
self.select(did) self.select(did)
@ -565,3 +565,7 @@ class DeckManager:
# 1 for dyn, 0 for standard # 1 for dyn, 0 for standard
def isDyn(self, did: Union[int, str]) -> int: def isDyn(self, did: Union[int, str]) -> int:
return self.get(did)["dyn"] return self.get(did)["dyn"]
# legacy
newDyn = new_filtered

View file

@ -6,7 +6,7 @@ from __future__ import annotations
import random import random
import time import time
from heapq import * from heapq import *
from typing import Any, List, Optional, Sequence, Tuple, Union from typing import Any, List, Optional, Tuple, Union
import anki import anki
from anki import hooks from anki import hooks
@ -599,77 +599,9 @@ did = ? and queue = {QUEUE_TYPE_REV} and due <= ? limit ?""",
idealIvl = self._fuzzedIvl(idealIvl) idealIvl = self._fuzzedIvl(idealIvl)
return idealIvl return idealIvl
# Dynamic deck handling # Filtered deck handling
########################################################################## ##########################################################################
def rebuildDyn(self, did: Optional[int] = None) -> Optional[Sequence[int]]: # type: ignore[override]
"Rebuild a dynamic deck."
did = did or self.col.decks.selected()
deck = self.col.decks.get(did)
assert deck["dyn"]
# move any existing cards back first, then fill
self.emptyDyn(did)
ids = self._fillDyn(deck)
if not ids:
return None
# and change to our new deck
self.col.decks.select(did)
return ids
def _fillDyn(self, deck: Deck) -> Sequence[int]: # type: ignore[override]
search, limit, order = deck["terms"][0]
orderlimit = self._dynOrder(order, limit)
if search.strip():
search = "(%s)" % search
search = "%s -is:suspended -is:buried -deck:filtered -is:learn" % search
try:
ids = self.col.findCards(search, order=orderlimit)
except:
ids = []
return ids
# move the cards over
self.col.log(deck["id"], ids)
self._moveToDyn(deck["id"], ids)
return ids
def emptyDyn(self, did: Optional[int], lim: Optional[str] = None) -> None:
if not lim:
lim = "did = %s" % did
self.col.log(self.col.db.list("select id from cards where %s" % lim))
# move out of cram queue
self.col.db.execute(
f"""
update cards set did = odid, queue = (case when type = {CARD_TYPE_LRN} then {QUEUE_TYPE_NEW}
else type end), type = (case when type = {CARD_TYPE_LRN} then {CARD_TYPE_NEW} else type end),
due = odue, odue = 0, odid = 0, usn = ? where %s"""
% lim,
self.col.usn(),
)
def _moveToDyn(self, did: int, ids: Sequence[int]) -> None: # type: ignore[override]
deck = self.col.decks.get(did)
data = []
t = intTime()
u = self.col.usn()
for c, id in enumerate(ids):
# start at -100000 so that reviews are all due
data.append((did, -100000 + c, u, id))
# due reviews stay in the review queue. careful: can't use
# "odid or did", as sqlite converts to boolean
queue = f"""
(case when type={CARD_TYPE_REV} and (case when odue then odue <= %d else due <= %d end)
then {QUEUE_TYPE_REV} else {QUEUE_TYPE_NEW} end)"""
queue %= (self.today, self.today)
self.col.db.executemany(
"""
update cards set
odid = (case when odid then odid else did end),
odue = (case when odue then odue else due end),
did = ?, queue = %s, due = ?, usn = ? where id = ?"""
% queue,
data,
)
def _dynIvlBoost(self, card: Card) -> int: def _dynIvlBoost(self, card: Card) -> int:
assert card.odid and card.type == CARD_TYPE_REV assert card.odid and card.type == CARD_TYPE_REV
assert card.factor assert card.factor

View file

@ -15,7 +15,6 @@ from typing import (
List, List,
Optional, Optional,
Sequence, Sequence,
Set,
Tuple, Tuple,
Union, Union,
) )
@ -25,7 +24,7 @@ import anki.backend_pb2 as pb
from anki import hooks from anki import hooks
from anki.cards import Card from anki.cards import Card
from anki.consts import * from anki.consts import *
from anki.decks import Deck, DeckConfig, DeckManager, FilteredDeck, QueueConfig from anki.decks import Deck, DeckConfig, DeckManager, QueueConfig
from anki.lang import _ from anki.lang import _
from anki.notes import Note from anki.notes import Note
from anki.rsbackend import ( from anki.rsbackend import (
@ -1062,117 +1061,14 @@ select id from cards where did in %s and queue = {QUEUE_TYPE_REV} and due <= ? l
return ivl return ivl
# Dynamic deck handling # Filtered deck handling
########################################################################## ##########################################################################
_restoreQueueWhenEmptyingSnippet = f""" def rebuild_filtered_deck(self, deck_id: int) -> int:
queue = (case when queue < 0 then queue return self.col.backend.rebuild_filtered_deck(deck_id)
when type in (1,{CARD_TYPE_RELEARNING}) then
(case when (case when odue then odue else due end) > 1000000000 then 1 else
{QUEUE_TYPE_DAY_LEARN_RELEARN} end)
else
type
end)
"""
def rebuildDyn(self, did: Optional[int] = None) -> Optional[int]: def empty_filtered_deck(self, deck_id: int) -> None:
"Rebuild a dynamic deck." self.col.backend.empty_filtered_deck(deck_id)
did = did or self.col.decks.selected()
deck = self.col.decks.get(did)
assert deck["dyn"]
# move any existing cards back first, then fill
self.emptyDyn(did)
cnt = self._fillDyn(deck)
if not cnt:
return None
# and change to our new deck
self.col.decks.select(did)
return cnt
def _fillDyn(self, deck: FilteredDeck) -> int:
start = -100000
total = 0
for search, limit, order in deck["terms"]:
orderlimit = self._dynOrder(order, limit)
if search.strip():
search = "(%s)" % search
search = "%s -is:suspended -is:buried -deck:filtered" % search
try:
ids = self.col.findCards(search, order=orderlimit)
except:
return total
# move the cards over
self.col.log(deck["id"], ids)
self._moveToDyn(deck["id"], ids, start=start + total)
total += len(ids)
return total
def emptyDyn(self, did: Optional[int], lim: Optional[str] = None) -> None:
if not lim:
lim = "did = %s" % did
self.col.log(self.col.db.list("select id from cards where %s" % lim))
self.col.db.execute(
"""
update cards set did = odid, %s,
due = (case when odue>0 then odue else due end), odue = 0, odid = 0, usn = ? where %s"""
% (self._restoreQueueWhenEmptyingSnippet, lim),
self.col.usn(),
)
def remFromDyn(self, cids: List[int]) -> None:
self.emptyDyn(None, "id in %s and odid" % ids2str(cids))
def _dynOrder(self, o: int, l: int) -> str:
if o == DYN_OLDEST:
t = "(select max(id) from revlog where cid=c.id)"
elif o == DYN_RANDOM:
t = "random()"
elif o == DYN_SMALLINT:
t = "ivl"
elif o == DYN_BIGINT:
t = "ivl desc"
elif o == DYN_LAPSES:
t = "lapses desc"
elif o == DYN_ADDED:
t = "n.id"
elif o == DYN_REVADDED:
t = "n.id desc"
elif o == DYN_DUEPRIORITY:
t = (
f"(case when queue={QUEUE_TYPE_REV} and due <= %d then (ivl / cast(%d-due+0.001 as real)) else 100000+due end)"
% (self.today, self.today)
)
else: # DYN_DUE or unknown
t = "c.due, c.ord"
return t + " limit %d" % l
def _moveToDyn(self, did: int, ids: Sequence[int], start: int = -100000) -> None:
deck = self.col.decks.get(did)
data = []
u = self.col.usn()
due = start
for id in ids:
data.append((did, due, u, id))
due += 1
queue = ""
if not deck["resched"]:
queue = f",queue={QUEUE_TYPE_REV}"
query = (
"""
update cards set
odid = did, odue = due,
did = ?,
due = (case when due <= 0 then due else ? end),
usn = ?
%s
where id = ?
"""
% queue
)
self.col.db.executemany(query, data)
def _removeFromFiltered(self, card: Card) -> None: def _removeFromFiltered(self, card: Card) -> None:
if card.odid: if card.odid:
@ -1195,6 +1091,42 @@ where id = ?
else: else:
card.queue = card.type card.queue = card.type
# legacy
def rebuildDyn(self, did: Optional[int] = None) -> Optional[int]:
did = did or self.col.decks.selected()
count = self.rebuild_filtered_deck(did) or None
if not count:
return None
# and change to our new deck
self.col.decks.select(did)
return count
def emptyDyn(self, did: Optional[int], lim: Optional[str] = None) -> None:
if lim is None:
self.empty_filtered_deck(did)
return
queue = f"""
queue = (case when queue < 0 then queue
when type in (1,{CARD_TYPE_RELEARNING}) then
(case when (case when odue then odue else due end) > 1000000000 then 1 else
{QUEUE_TYPE_DAY_LEARN_RELEARN} end)
else
type
end)
"""
self.col.db.execute(
"""
update cards set did = odid, %s,
due = (case when odue>0 then odue else due end), odue = 0, odid = 0, usn = ? where %s"""
% (queue, lim),
self.col.usn(),
)
def remFromDyn(self, cids: List[int]) -> None:
self.emptyDyn(None, "id in %s and odid" % ids2str(cids))
# Leeches # Leeches
########################################################################## ##########################################################################
@ -1474,47 +1406,17 @@ and (queue={QUEUE_TYPE_NEW} or (queue={QUEUE_TYPE_REV} and due<=?))""",
# Resetting # Resetting
########################################################################## ##########################################################################
def forgetCards(self, ids: List[int]) -> None: def schedule_cards_as_new(self, card_ids: List[int]) -> None:
"Put cards at the end of the new queue." "Put cards at the end of the new queue."
self.remFromDyn(ids) self.col.backend.schedule_cards_as_new(card_ids)
self.col.db.execute(
f"update cards set type={CARD_TYPE_NEW},queue={QUEUE_TYPE_NEW},ivl=0,due=0,odue=0,factor=?"
" where id in " + ids2str(ids),
STARTING_FACTOR,
)
pmax = (
self.col.db.scalar(f"select max(due) from cards where type={CARD_TYPE_NEW}")
or 0
)
# takes care of mod + usn
self.sortCards(ids, start=pmax + 1)
self.col.log(ids)
def reschedCards(self, ids: List[int], imin: int, imax: int) -> None: def schedule_cards_as_reviews(
"Put cards in review queue with a new interval in days (min, max)." self, card_ids: List[int], min_interval: int, max_interval: int
d = [] ) -> None:
t = self.today "Make cards review cards, with a new interval randomly selected from range."
mod = intTime() self.col.backend.schedule_cards_as_reviews(
for id in ids: card_ids=card_ids, min_interval=min_interval, max_interval=max_interval
r = random.randint(imin, imax)
d.append(
(
max(1, r),
r + t,
self.col.usn(),
mod,
STARTING_FACTOR,
id,
)
)
self.remFromDyn(ids)
self.col.db.executemany(
f"""
update cards set type={CARD_TYPE_REV},queue={QUEUE_TYPE_REV},ivl=?,due=?,odue=0,
usn=?,mod=?,factor=? where id=?""",
d,
) )
self.col.log(ids)
def resetCards(self, ids: List[int]) -> None: def resetCards(self, ids: List[int]) -> None:
"Completely reset cards for export." "Completely reset cards for export."
@ -1533,6 +1435,11 @@ usn=?,mod=?,factor=? where id=?""",
self.forgetCards(nonNew) self.forgetCards(nonNew)
self.col.log(ids) self.col.log(ids)
# legacy
forgetCards = schedule_cards_as_new
reschedCards = schedule_cards_as_reviews
# Repositioning new cards # Repositioning new cards
########################################################################## ##########################################################################
@ -1544,60 +1451,19 @@ usn=?,mod=?,factor=? where id=?""",
shuffle: bool = False, shuffle: bool = False,
shift: bool = False, shift: bool = False,
) -> None: ) -> None:
scids = ids2str(cids) self.col.backend.sort_cards(
now = intTime() card_ids=cids,
nids = [] starting_from=start,
nidsSet: Set[int] = set() step_size=step,
for id in cids: randomize=shuffle,
nid = self.col.db.scalar("select nid from cards where id = ?", id) shift_existing=shift,
if nid not in nidsSet: )
nids.append(nid)
nidsSet.add(nid)
if not nids:
# no new cards
return
# determine nid ordering
due = {}
if shuffle:
random.shuffle(nids)
for c, nid in enumerate(nids):
due[nid] = start + c * step
# pylint: disable=undefined-loop-variable
high = start + c * step
# shift?
if shift:
low = self.col.db.scalar(
f"select min(due) from cards where due >= ? and type = {CARD_TYPE_NEW} "
"and id not in %s" % scids,
start,
)
if low is not None:
shiftby = high - low + 1
self.col.db.execute(
f"""
update cards set mod=?, usn=?, due=due+? where id not in %s
and due >= ? and queue = {QUEUE_TYPE_NEW}"""
% scids,
now,
self.col.usn(),
shiftby,
low,
)
# reorder cards
d = []
for id, nid in self.col.db.execute(
f"select id, nid from cards where type = {CARD_TYPE_NEW} and id in " + scids
):
d.append((due[nid], now, self.col.usn(), id))
self.col.db.executemany("update cards set due=?,mod=?,usn=? where id = ?", d)
def randomizeCards(self, did: int) -> None: def randomizeCards(self, did: int) -> None:
cids = self.col.db.list("select id from cards where did = ?", did) self.col.backend.sort_deck(deck_id=did, randomize=True)
self.sortCards(cids, shuffle=True)
def orderCards(self, did: int) -> None: def orderCards(self, did: int) -> None:
cids = self.col.db.list("select id from cards where did = ? order by nid", did) self.col.backend.sort_deck(deck_id=did, randomize=False)
self.sortCards(cids)
def resortConf(self, conf) -> None: def resortConf(self, conf) -> None:
for did in self.col.decks.didsForConf(conf): for did in self.col.decks.didsForConf(conf):

View file

@ -145,7 +145,9 @@ from revlog where id > ? """
return "<b>" + str(s) + "</b>" return "<b>" + str(s) + "</b>"
if cards: if cards:
b += self.col.backend.studied_today(cards=cards, seconds=float(thetime)) b += self.col.backend.studied_today_message(
cards=cards, seconds=float(thetime)
)
# again/pass count # again/pass count
b += "<br>" + _("Again count: %s") % bold(failed) b += "<br>" + _("Again count: %s") % bold(failed)
if cards: if cards:

View file

@ -84,7 +84,7 @@ def test_rename():
for n in "yo", "yo::two", "yo::two::three": for n in "yo", "yo::two", "yo::two::three":
assert n in names assert n in names
# over filtered # over filtered
filteredId = col.decks.newDyn("filtered") filteredId = col.decks.new_filtered("filtered")
filtered = col.decks.get(filteredId) filtered = col.decks.get(filteredId)
childId = col.decks.id("child") childId = col.decks.id("child")
child = col.decks.get(childId) child = col.decks.get(childId)

View file

@ -545,8 +545,8 @@ def test_suspend():
# should cope with cards in cram decks # should cope with cards in cram decks
c.due = 1 c.due = 1
c.flush() c.flush()
col.decks.newDyn("tmp") did = col.decks.new_filtered("tmp")
col.sched.rebuildDyn() col.sched.rebuild_filtered_deck(did)
c.load() c.load()
assert c.due != 1 assert c.due != 1
assert c.did != 1 assert c.did != 1
@ -575,8 +575,8 @@ def test_cram():
assert col.sched.counts() == (0, 0, 0) assert col.sched.counts() == (0, 0, 0)
cardcopy = copy.copy(c) cardcopy = copy.copy(c)
# create a dynamic deck and refresh it # create a dynamic deck and refresh it
did = col.decks.newDyn("Cram") did = col.decks.new_filtered("Cram")
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
# should appear as new in the deck list # should appear as new in the deck list
assert sorted(col.sched.deck_due_tree().children)[0].new_count == 1 assert sorted(col.sched.deck_due_tree().children)[0].new_count == 1
@ -616,7 +616,7 @@ def test_cram():
# and it will have moved back to the previous deck # and it will have moved back to the previous deck
assert c.did == 1 assert c.did == 1
# cram the deck again # cram the deck again
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
c = col.sched.getCard() c = col.sched.getCard()
# check ivls again - passing should be idempotent # check ivls again - passing should be idempotent
@ -646,8 +646,8 @@ def test_cram():
col.reset() col.reset()
assert col.sched.counts() == (0, 0, 1) assert col.sched.counts() == (0, 0, 1)
# cram again # cram again
did = col.decks.newDyn("Cram") did = col.decks.new_filtered("Cram")
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
assert col.sched.counts() == (0, 0, 1) assert col.sched.counts() == (0, 0, 1)
c.load() c.load()
@ -673,8 +673,8 @@ def test_cram_rem():
note["Front"] = "one" note["Front"] = "one"
col.addNote(note) col.addNote(note)
oldDue = note.cards()[0].due oldDue = note.cards()[0].due
did = col.decks.newDyn("Cram") did = col.decks.new_filtered("Cram")
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
c = col.sched.getCard() c = col.sched.getCard()
col.sched.answerCard(c, 2) col.sched.answerCard(c, 2)
@ -682,7 +682,7 @@ def test_cram_rem():
assert c.type == CARD_TYPE_LRN and c.queue == QUEUE_TYPE_LRN assert c.type == CARD_TYPE_LRN and c.queue == QUEUE_TYPE_LRN
assert c.due != oldDue assert c.due != oldDue
# if we terminate cramming prematurely it should be set back to new # if we terminate cramming prematurely it should be set back to new
col.sched.emptyDyn(did) col.sched.empty_filtered_deck(did)
c.load() c.load()
assert c.type == CARD_TYPE_NEW and c.queue == QUEUE_TYPE_NEW assert c.type == CARD_TYPE_NEW and c.queue == QUEUE_TYPE_NEW
assert c.due == oldDue assert c.due == oldDue
@ -695,11 +695,11 @@ def test_cram_resched():
note["Front"] = "one" note["Front"] = "one"
col.addNote(note) col.addNote(note)
# cram deck # cram deck
did = col.decks.newDyn("Cram") did = col.decks.new_filtered("Cram")
cram = col.decks.get(did) cram = col.decks.get(did)
cram["resched"] = False cram["resched"] = False
col.decks.save(cram) col.decks.save(cram)
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
# graduate should return it to new # graduate should return it to new
c = col.sched.getCard() c = col.sched.getCard()
@ -718,7 +718,7 @@ def test_cram_resched():
c.factor = STARTING_FACTOR c.factor = STARTING_FACTOR
c.flush() c.flush()
cardcopy = copy.copy(c) cardcopy = copy.copy(c)
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
c = col.sched.getCard() c = col.sched.getCard()
assert ni(c, 1) == 600 assert ni(c, 1) == 600
@ -730,23 +730,23 @@ def test_cram_resched():
# check failure too # check failure too
c = cardcopy c = cardcopy
c.flush() c.flush()
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
c = col.sched.getCard() c = col.sched.getCard()
col.sched.answerCard(c, 1) col.sched.answerCard(c, 1)
col.sched.emptyDyn(did) col.sched.empty_filtered_deck(did)
c.load() c.load()
assert c.ivl == 100 assert c.ivl == 100
assert c.due == col.sched.today + 25 assert c.due == col.sched.today + 25
# fail+grad early # fail+grad early
c = cardcopy c = cardcopy
c.flush() c.flush()
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
c = col.sched.getCard() c = col.sched.getCard()
col.sched.answerCard(c, 1) col.sched.answerCard(c, 1)
col.sched.answerCard(c, 3) col.sched.answerCard(c, 3)
col.sched.emptyDyn(did) col.sched.empty_filtered_deck(did)
c.load() c.load()
assert c.ivl == 100 assert c.ivl == 100
assert c.due == col.sched.today + 25 assert c.due == col.sched.today + 25
@ -754,11 +754,11 @@ def test_cram_resched():
c = cardcopy c = cardcopy
c.due = -25 c.due = -25
c.flush() c.flush()
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
c = col.sched.getCard() c = col.sched.getCard()
col.sched.answerCard(c, 3) col.sched.answerCard(c, 3)
col.sched.emptyDyn(did) col.sched.empty_filtered_deck(did)
c.load() c.load()
assert c.ivl == 100 assert c.ivl == 100
assert c.due == -25 assert c.due == -25
@ -766,11 +766,11 @@ def test_cram_resched():
c = cardcopy c = cardcopy
c.due = -25 c.due = -25
c.flush() c.flush()
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
c = col.sched.getCard() c = col.sched.getCard()
col.sched.answerCard(c, 1) col.sched.answerCard(c, 1)
col.sched.emptyDyn(did) col.sched.empty_filtered_deck(did)
c.load() c.load()
assert c.ivl == 100 assert c.ivl == 100
assert c.due == -25 assert c.due == -25
@ -778,7 +778,7 @@ def test_cram_resched():
c = cardcopy c = cardcopy
c.due = -25 c.due = -25
c.flush() c.flush()
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
c = col.sched.getCard() c = col.sched.getCard()
col.sched.answerCard(c, 1) col.sched.answerCard(c, 1)
@ -789,7 +789,7 @@ def test_cram_resched():
# lapsed card pulled into cram # lapsed card pulled into cram
# col.sched._cardConf(c)['lapse']['mult']=0.5 # col.sched._cardConf(c)['lapse']['mult']=0.5
# col.sched.answerCard(c, 1) # col.sched.answerCard(c, 1)
# col.sched.rebuildDyn(did) # col.sched.rebuild_filtered_deck(did)
# col.reset() # col.reset()
# c = col.sched.getCard() # c = col.sched.getCard()
# col.sched.answerCard(c, 2) # col.sched.answerCard(c, 2)

View file

@ -668,8 +668,8 @@ def test_suspend():
# should cope with cards in cram decks # should cope with cards in cram decks
c.due = 1 c.due = 1
c.flush() c.flush()
col.decks.newDyn("tmp") did = col.decks.new_filtered("tmp")
col.sched.rebuildDyn() col.sched.rebuild_filtered_deck(did)
c.load() c.load()
assert c.due != 1 assert c.due != 1
assert c.did != 1 assert c.did != 1
@ -698,8 +698,8 @@ def test_filt_reviewing_early_normal():
col.reset() col.reset()
assert col.sched.counts() == (0, 0, 0) assert col.sched.counts() == (0, 0, 0)
# create a dynamic deck and refresh it # create a dynamic deck and refresh it
did = col.decks.newDyn("Cram") did = col.decks.new_filtered("Cram")
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
# should appear as normal in the deck list # should appear as normal in the deck list
assert sorted(col.sched.deck_due_tree().children)[0].review_count == 1 assert sorted(col.sched.deck_due_tree().children)[0].review_count == 1
@ -727,7 +727,7 @@ def test_filt_reviewing_early_normal():
c.ivl = 100 c.ivl = 100
c.due = col.sched.today + 75 c.due = col.sched.today + 75
c.flush() c.flush()
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
c = col.sched.getCard() c = col.sched.getCard()
@ -758,8 +758,8 @@ def test_filt_keep_lrn_state():
assert c.type == CARD_TYPE_LRN and c.queue == QUEUE_TYPE_LRN assert c.type == CARD_TYPE_LRN and c.queue == QUEUE_TYPE_LRN
# create a dynamic deck and refresh it # create a dynamic deck and refresh it
did = col.decks.newDyn("Cram") did = col.decks.new_filtered("Cram")
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
# card should still be in learning state # card should still be in learning state
@ -773,7 +773,7 @@ def test_filt_keep_lrn_state():
assert c.due - intTime() > 60 * 60 assert c.due - intTime() > 60 * 60
# emptying the deck preserves learning state # emptying the deck preserves learning state
col.sched.emptyDyn(did) col.sched.empty_filtered_deck(did)
c.load() c.load()
assert c.type == CARD_TYPE_LRN and c.queue == QUEUE_TYPE_LRN assert c.type == CARD_TYPE_LRN and c.queue == QUEUE_TYPE_LRN
assert c.left == 1001 assert c.left == 1001
@ -792,11 +792,11 @@ def test_preview():
note2["Front"] = "two" note2["Front"] = "two"
col.addNote(note2) col.addNote(note2)
# cram deck # cram deck
did = col.decks.newDyn("Cram") did = col.decks.new_filtered("Cram")
cram = col.decks.get(did) cram = col.decks.get(did)
cram["resched"] = False cram["resched"] = False
col.decks.save(cram) col.decks.save(cram)
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.reset() col.reset()
# grab the first card # grab the first card
c = col.sched.getCard() c = col.sched.getCard()
@ -823,7 +823,7 @@ def test_preview():
assert c.id == orig.id assert c.id == orig.id
# emptying the filtered deck should restore card # emptying the filtered deck should restore card
col.sched.emptyDyn(did) col.sched.empty_filtered_deck(did)
c.load() c.load()
assert c.queue == QUEUE_TYPE_NEW assert c.queue == QUEUE_TYPE_NEW
assert c.reps == 0 assert c.reps == 0
@ -1253,9 +1253,9 @@ def test_negativeDueFilter():
c.flush() c.flush()
# into and out of filtered deck # into and out of filtered deck
did = col.decks.newDyn("Cram") did = col.decks.new_filtered("Cram")
col.sched.rebuildDyn(did) col.sched.rebuild_filtered_deck(did)
col.sched.emptyDyn(did) col.sched.empty_filtered_deck(did)
col.reset() col.reset()
c.load() c.load()

View file

@ -22,7 +22,7 @@ from anki.models import NoteType
from anki.notes import Note from anki.notes import Note
from anki.rsbackend import TR, DeckTreeNode, InvalidInput from anki.rsbackend import TR, DeckTreeNode, InvalidInput
from anki.stats import CardStats from anki.stats import CardStats
from anki.utils import htmlToTextLine, ids2str, intTime, isMac, isWin from anki.utils import htmlToTextLine, ids2str, isMac, isWin
from aqt import AnkiQt, gui_hooks from aqt import AnkiQt, gui_hooks
from aqt.editor import Editor from aqt.editor import Editor
from aqt.exporting import ExportDialog from aqt.exporting import ExportDialog
@ -1601,21 +1601,7 @@ where id in %s"""
return return
self.model.beginReset() self.model.beginReset()
self.mw.checkpoint(_("Change Deck")) self.mw.checkpoint(_("Change Deck"))
mod = intTime() self.col.set_deck(cids, did)
usn = self.col.usn()
# normal cards
scids = ids2str(cids)
# remove any cards from filtered deck first
self.col.sched.remFromDyn(cids)
# then move into new deck
self.col.db.execute(
"""
update cards set usn=?, mod=?, did=? where id in """
+ scids,
usn,
mod,
did,
)
self.model.endReset() self.model.endReset()
self.mw.requireReset(reason=ResetReason.BrowserSetDeck, context=self) self.mw.requireReset(reason=ResetReason.BrowserSetDeck, context=self)

View file

@ -145,12 +145,12 @@ class CustomStudy(QDialog):
return QDialog.accept(self) return QDialog.accept(self)
else: else:
# safe to empty # safe to empty
self.mw.col.sched.emptyDyn(cur["id"]) self.mw.col.sched.empty_filtered_deck(cur["id"])
# reuse; don't delete as it may have children # reuse; don't delete as it may have children
dyn = cur dyn = cur
self.mw.col.decks.select(cur["id"]) self.mw.col.decks.select(cur["id"])
else: else:
did = self.mw.col.decks.newDyn(_("Custom Study Session")) did = self.mw.col.decks.new_filtered(_("Custom Study Session"))
dyn = self.mw.col.decks.get(did) dyn = self.mw.col.decks.get(did)
# and then set various options # and then set various options
if i == RADIO_FORGOT: if i == RADIO_FORGOT:
@ -186,7 +186,7 @@ class CustomStudy(QDialog):
self.mw.col.decks.save(dyn) self.mw.col.decks.save(dyn)
# generate cards # generate cards
self.created_custom_study = True self.created_custom_study = True
if not self.mw.col.sched.rebuildDyn(): if not self.mw.col.sched.rebuild_filtered_deck(dyn["id"]):
return showWarning(_("No cards matched the criteria you provided.")) return showWarning(_("No cards matched the criteria you provided."))
self.mw.moveToState("overview") self.mw.moveToState("overview")
QDialog.accept(self) QDialog.accept(self)

View file

@ -138,16 +138,7 @@ class DeckBrowser:
self.web.eval("$(function() { window.scrollTo(0, %d, 'instant'); });" % offset) self.web.eval("$(function() { window.scrollTo(0, %d, 'instant'); });" % offset)
def _renderStats(self): def _renderStats(self):
cards, thetime = self.mw.col.db.first( return self.mw.col.studied_today()
"""
select count(), sum(time)/1000 from revlog
where id > ?""",
(self.mw.col.sched.dayCutoff - 86400) * 1000,
)
cards = cards or 0
thetime = thetime or 0
buf = self.mw.col.backend.studied_today(cards=cards, seconds=float(thetime))
return buf
def _renderDeckTree(self, top: DeckTreeNode) -> str: def _renderDeckTree(self, top: DeckTreeNode) -> str:
buf = """ buf = """

View file

@ -122,7 +122,7 @@ class DeckConf(QDialog):
def accept(self): def accept(self):
if not self.saveConf(): if not self.saveConf():
return return
if not self.mw.col.sched.rebuildDyn(): if not self.mw.col.sched.rebuild_filtered_deck(self.deck["id"]):
if askUser( if askUser(
_( _(
"""\ """\

View file

@ -493,7 +493,9 @@ class Editor:
self.web.eval("setBackgrounds(%s);" % json.dumps(cols)) self.web.eval("setBackgrounds(%s);" % json.dumps(cols))
def showDupes(self): def showDupes(self):
contents = html.escape(stripHTMLMedia(self.note.fields[0])) contents = html.escape(
stripHTMLMedia(self.note.fields[0]), quote=False
).replace('"', r"\"")
browser = aqt.dialogs.open("Browser", self.mw) browser = aqt.dialogs.open("Browser", self.mw)
browser.form.searchEdit.lineEdit().setText( browser.form.searchEdit.lineEdit().setText(
'"dupe:%s,%s"' % (self.note.model()["id"], contents) '"dupe:%s,%s"' % (self.note.model()["id"], contents)
@ -743,7 +745,6 @@ to a cloze type first, via 'Notes>Change Note Type'"""
) )
return return
if file: if file:
av_player.play_file(file)
self.addMedia(file) self.addMedia(file)
# Media downloads # Media downloads
@ -761,7 +762,8 @@ to a cloze type first, via 'Notes>Change Note Type'"""
name = urllib.parse.quote(fname.encode("utf8")) name = urllib.parse.quote(fname.encode("utf8"))
return '<img src="%s">' % name return '<img src="%s">' % name
else: else:
return "[sound:%s]" % fname av_player.play_file(fname)
return "[sound:%s]" % html.escape(fname, quote=False)
def urlToFile(self, url: str) -> Optional[str]: def urlToFile(self, url: str) -> Optional[str]:
l = url.lower() l = url.lower()

View file

@ -1159,7 +1159,7 @@ title="%s" %s>%s</button>""" % (
while self.col.decks.id_for_name(_("Filtered Deck %d") % n): while self.col.decks.id_for_name(_("Filtered Deck %d") % n):
n += 1 n += 1
name = _("Filtered Deck %d") % n name = _("Filtered Deck %d") % n
did = self.col.decks.newDyn(name) did = self.col.decks.new_filtered(name)
diag = aqt.dyndeckconf.DeckConf(self, first=True, search=search) diag = aqt.dyndeckconf.DeckConf(self, first=True, search=search)
if not diag.ok: if not diag.ok:
# user cancelled first config # user cancelled first config

View file

@ -76,10 +76,10 @@ class Overview:
deck = self.mw.col.decks.current() deck = self.mw.col.decks.current()
self.mw.onCram("'deck:%s'" % deck["name"]) self.mw.onCram("'deck:%s'" % deck["name"])
elif url == "refresh": elif url == "refresh":
self.mw.col.sched.rebuildDyn() self.mw.col.sched.rebuild_filtered_deck(self.mw.col.decks.selected())
self.mw.reset() self.mw.reset()
elif url == "empty": elif url == "empty":
self.mw.col.sched.emptyDyn(self.mw.col.decks.selected()) self.mw.col.sched.empty_filtered_deck(self.mw.col.decks.selected())
self.mw.reset() self.mw.reset()
elif url == "decks": elif url == "decks":
self.mw.moveToState("deckBrowser") self.mw.moveToState("deckBrowser")
@ -107,12 +107,12 @@ class Overview:
def onRebuildKey(self): def onRebuildKey(self):
if self._filteredDeck(): if self._filteredDeck():
self.mw.col.sched.rebuildDyn() self.mw.col.sched.rebuild_filtered_deck(self.mw.col.decks.selected())
self.mw.reset() self.mw.reset()
def onEmptyKey(self): def onEmptyKey(self):
if self._filteredDeck(): if self._filteredDeck():
self.mw.col.sched.emptyDyn(self.mw.col.decks.selected()) self.mw.col.sched.empty_filtered_deck(self.mw.col.decks.selected())
self.mw.reset() self.mw.reset()
def onCustomStudyKey(self): def onCustomStudyKey(self):

View file

@ -28,7 +28,7 @@ serde = "1.0.114"
serde_json = "1.0.56" serde_json = "1.0.56"
tokio = { version = "0.2.21", features = ["fs", "rt-threaded"] } tokio = { version = "0.2.21", features = ["fs", "rt-threaded"] }
serde_derive = "1.0.114" serde_derive = "1.0.114"
zip = "0.5.6" zip = { version = "0.5.6", default-features = false, features = ["deflate", "time"] }
serde_tuple = "0.5.0" serde_tuple = "0.5.0"
coarsetime = { git = "https://github.com/ankitects/rust-coarsetime.git", branch="old-mac-compat" } coarsetime = { git = "https://github.com/ankitects/rust-coarsetime.git", branch="old-mac-compat" }
utime = "0.3.1" utime = "0.3.1"
@ -52,6 +52,8 @@ pin-project = "0.4.22"
async-compression = { version = "0.3.5", features = ["stream", "gzip"] } async-compression = { version = "0.3.5", features = ["stream", "gzip"] }
askama = "0.10.1" askama = "0.10.1"
hyper = "0.13.7" hyper = "0.13.7"
once_cell = "1.4.1"
scopeguard = "1.1.0"
[target.'cfg(target_vendor="apple")'.dependencies.rusqlite] [target.'cfg(target_vendor="apple")'.dependencies.rusqlite]
version = "0.23.1" version = "0.23.1"

View file

@ -1,6 +1,7 @@
use std::fmt::Write; use std::fmt::Write;
use std::fs; use std::fs;
use std::path::Path; use std::path::Path;
use std::process::Command;
use fluent_syntax::ast::{Entry::Message, ResourceEntry}; use fluent_syntax::ast::{Entry::Message, ResourceEntry};
use fluent_syntax::parser::parse; use fluent_syntax::parser::parse;
@ -115,7 +116,7 @@ fn write_method_trait(buf: &mut String, service: &prost_build::Service) {
use prost::Message; use prost::Message;
pub type BackendResult<T> = std::result::Result<T, crate::err::AnkiError>; pub type BackendResult<T> = std::result::Result<T, crate::err::AnkiError>;
pub trait BackendService { pub trait BackendService {
fn run_command_bytes2_inner(&mut self, method: u32, input: &[u8]) -> std::result::Result<Vec<u8>, crate::err::AnkiError> { fn run_command_bytes2_inner(&self, method: u32, input: &[u8]) -> std::result::Result<Vec<u8>, crate::err::AnkiError> {
match method { match method {
"#, "#,
); );
@ -145,7 +146,7 @@ pub trait BackendService {
write!( write!(
buf, buf,
concat!( concat!(
" fn {method_name}(&mut self, input: {input_type}) -> ", " fn {method_name}(&self, input: {input_type}) -> ",
"BackendResult<{output_type}>;\n" "BackendResult<{output_type}>;\n"
), ),
method_name = method.name, method_name = method.name,
@ -200,15 +201,20 @@ fn main() -> std::io::Result<()> {
fs::write(rust_string_path, rust_string_vec(&idents))?; fs::write(rust_string_path, rust_string_vec(&idents))?;
// output protobuf generated code // output protobuf generated code
// we avoid default OUT_DIR for now, as it breaks code completion
std::env::set_var("OUT_DIR", "src");
println!("cargo:rerun-if-changed=../proto/backend.proto"); println!("cargo:rerun-if-changed=../proto/backend.proto");
let mut config = prost_build::Config::new(); let mut config = prost_build::Config::new();
config.service_generator(service_generator());
config config
// we avoid default OUT_DIR for now, as it breaks code completion
.out_dir("src")
.service_generator(service_generator())
.compile_protos(&["../proto/backend.proto"], &["../proto"]) .compile_protos(&["../proto/backend.proto"], &["../proto"])
.unwrap(); .unwrap();
// rustfmt the protobuf code
let rustfmt = Command::new("rustfmt")
.arg(Path::new("src/backend_proto.rs"))
.status()
.unwrap();
assert!(rustfmt.success(), "rustfmt backend_proto.rs failed");
// write the other language ftl files // write the other language ftl files
let mut ftl_lang_dirs = vec!["./ftl/repo/core".to_string()]; let mut ftl_lang_dirs = vec!["./ftl/repo/core".to_string()];

View file

@ -21,3 +21,5 @@ card-stats-review-log-type-learn = Learn
card-stats-review-log-type-review = Review card-stats-review-log-type-review = Review
card-stats-review-log-type-relearn = Relearn card-stats-review-log-type-relearn = Relearn
card-stats-review-log-type-filtered = Filtered card-stats-review-log-type-filtered = Filtered
card-stats-review-log-type-manual = Manual

View file

@ -1 +1 @@
nightly-2020-06-25 stable

View file

@ -1 +0,0 @@
ignore = ["backend_proto.rs"]

View file

@ -31,8 +31,9 @@ use crate::{
RenderCardOutput, RenderCardOutput,
}, },
sched::cutoff::local_minutes_west_for_stamp, sched::cutoff::local_minutes_west_for_stamp,
sched::timespan::{answer_button_time, studied_today, time_span}, sched::timespan::{answer_button_time, time_span},
search::SortMode, search::SortMode,
stats::studied_today,
sync::{ sync::{
get_remote_sync_meta, sync_abort, sync_login, FullSyncProgress, NormalSyncProgress, get_remote_sync_meta, sync_abort, sync_login, FullSyncProgress, NormalSyncProgress,
SyncActionRequired, SyncAuth, SyncMeta, SyncOutput, SyncStage, SyncActionRequired, SyncAuth, SyncMeta, SyncOutput, SyncStage,
@ -43,11 +44,13 @@ use crate::{
types::Usn, types::Usn,
}; };
use fluent::FluentValue; use fluent::FluentValue;
use futures::future::{AbortHandle, Abortable}; use futures::future::{AbortHandle, AbortRegistration, Abortable};
use log::error; use log::error;
use once_cell::sync::OnceCell;
use pb::{sync_status_out, BackendService}; use pb::{sync_status_out, BackendService};
use prost::Message; use prost::Message;
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use slog::warn;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::convert::TryFrom; use std::convert::TryFrom;
use std::{ use std::{
@ -84,13 +87,16 @@ struct ProgressState {
last_progress: Option<Progress>, last_progress: Option<Progress>,
} }
// fixme: this should support multiple abort handles.
type AbortHandleSlot = Arc<Mutex<Option<AbortHandle>>>;
pub struct Backend { pub struct Backend {
col: Arc<Mutex<Option<Collection>>>, col: Arc<Mutex<Option<Collection>>>,
i18n: I18n, i18n: I18n,
server: bool, server: bool,
sync_abort: Option<AbortHandle>, sync_abort: AbortHandleSlot,
progress_state: Arc<Mutex<ProgressState>>, progress_state: Arc<Mutex<ProgressState>>,
runtime: Option<Runtime>, runtime: OnceCell<Runtime>,
state: Arc<Mutex<BackendState>>, state: Arc<Mutex<BackendState>>,
} }
@ -266,12 +272,12 @@ impl From<pb::DeckConfigId> for DeckConfID {
} }
impl BackendService for Backend { impl BackendService for Backend {
fn latest_progress(&mut self, _input: Empty) -> BackendResult<pb::Progress> { fn latest_progress(&self, _input: Empty) -> BackendResult<pb::Progress> {
let progress = self.progress_state.lock().unwrap().last_progress; let progress = self.progress_state.lock().unwrap().last_progress;
Ok(progress_to_proto(progress, &self.i18n)) Ok(progress_to_proto(progress, &self.i18n))
} }
fn set_wants_abort(&mut self, _input: Empty) -> BackendResult<Empty> { fn set_wants_abort(&self, _input: Empty) -> BackendResult<Empty> {
self.progress_state.lock().unwrap().want_abort = true; self.progress_state.lock().unwrap().want_abort = true;
Ok(().into()) Ok(().into())
} }
@ -279,7 +285,7 @@ impl BackendService for Backend {
// card rendering // card rendering
fn render_existing_card( fn render_existing_card(
&mut self, &self,
input: pb::RenderExistingCardIn, input: pb::RenderExistingCardIn,
) -> BackendResult<pb::RenderCardOut> { ) -> BackendResult<pb::RenderCardOut> {
self.with_col(|col| { self.with_col(|col| {
@ -289,7 +295,7 @@ impl BackendService for Backend {
} }
fn render_uncommitted_card( fn render_uncommitted_card(
&mut self, &self,
input: pb::RenderUncommittedCardIn, input: pb::RenderUncommittedCardIn,
) -> BackendResult<pb::RenderCardOut> { ) -> BackendResult<pb::RenderCardOut> {
let schema11: CardTemplateSchema11 = serde_json::from_slice(&input.template)?; let schema11: CardTemplateSchema11 = serde_json::from_slice(&input.template)?;
@ -306,7 +312,7 @@ impl BackendService for Backend {
}) })
} }
fn get_empty_cards(&mut self, _input: pb::Empty) -> Result<pb::EmptyCardsReport> { fn get_empty_cards(&self, _input: pb::Empty) -> Result<pb::EmptyCardsReport> {
self.with_col(|col| { self.with_col(|col| {
let mut empty = col.empty_cards()?; let mut empty = col.empty_cards()?;
let report = col.empty_cards_report(&mut empty)?; let report = col.empty_cards_report(&mut empty)?;
@ -326,16 +332,13 @@ impl BackendService for Backend {
}) })
} }
fn strip_av_tags(&mut self, input: pb::String) -> BackendResult<pb::String> { fn strip_av_tags(&self, input: pb::String) -> BackendResult<pb::String> {
Ok(pb::String { Ok(pb::String {
val: strip_av_tags(&input.val).into(), val: strip_av_tags(&input.val).into(),
}) })
} }
fn extract_av_tags( fn extract_av_tags(&self, input: pb::ExtractAvTagsIn) -> BackendResult<pb::ExtractAvTagsOut> {
&mut self,
input: pb::ExtractAvTagsIn,
) -> BackendResult<pb::ExtractAvTagsOut> {
let (text, tags) = extract_av_tags(&input.text, input.question_side); let (text, tags) = extract_av_tags(&input.text, input.question_side);
let pt_tags = tags let pt_tags = tags
.into_iter() .into_iter()
@ -367,7 +370,7 @@ impl BackendService for Backend {
}) })
} }
fn extract_latex(&mut self, input: pb::ExtractLatexIn) -> BackendResult<pb::ExtractLatexOut> { fn extract_latex(&self, input: pb::ExtractLatexIn) -> BackendResult<pb::ExtractLatexOut> {
let func = if input.expand_clozes { let func = if input.expand_clozes {
extract_latex_expanding_clozes extract_latex_expanding_clozes
} else { } else {
@ -390,7 +393,7 @@ impl BackendService for Backend {
// searching // searching
//----------------------------------------------- //-----------------------------------------------
fn search_cards(&mut self, input: pb::SearchCardsIn) -> Result<pb::SearchCardsOut> { fn search_cards(&self, input: pb::SearchCardsIn) -> Result<pb::SearchCardsOut> {
self.with_col(|col| { self.with_col(|col| {
let order = if let Some(order) = input.order { let order = if let Some(order) = input.order {
use pb::sort_order::Value as V; use pb::sort_order::Value as V;
@ -414,7 +417,7 @@ impl BackendService for Backend {
}) })
} }
fn search_notes(&mut self, input: pb::SearchNotesIn) -> Result<pb::SearchNotesOut> { fn search_notes(&self, input: pb::SearchNotesIn) -> Result<pb::SearchNotesOut> {
self.with_col(|col| { self.with_col(|col| {
let nids = col.search_notes(&input.search)?; let nids = col.search_notes(&input.search)?;
Ok(pb::SearchNotesOut { Ok(pb::SearchNotesOut {
@ -423,7 +426,7 @@ impl BackendService for Backend {
}) })
} }
fn find_and_replace(&mut self, input: pb::FindAndReplaceIn) -> BackendResult<pb::UInt32> { fn find_and_replace(&self, input: pb::FindAndReplaceIn) -> BackendResult<pb::UInt32> {
let mut search = if input.regex { let mut search = if input.regex {
input.search input.search
} else { } else {
@ -450,7 +453,7 @@ impl BackendService for Backend {
/// This behaves like _updateCutoff() in older code - it also unburies at the start of /// This behaves like _updateCutoff() in older code - it also unburies at the start of
/// a new day. /// a new day.
fn sched_timing_today(&mut self, _input: pb::Empty) -> Result<pb::SchedTimingTodayOut> { fn sched_timing_today(&self, _input: pb::Empty) -> Result<pb::SchedTimingTodayOut> {
self.with_col(|col| { self.with_col(|col| {
let timing = col.timing_today()?; let timing = col.timing_today()?;
col.unbury_if_day_rolled_over(timing)?; col.unbury_if_day_rolled_over(timing)?;
@ -458,13 +461,13 @@ impl BackendService for Backend {
}) })
} }
fn local_minutes_west(&mut self, input: pb::Int64) -> BackendResult<pb::Int32> { fn local_minutes_west(&self, input: pb::Int64) -> BackendResult<pb::Int32> {
Ok(pb::Int32 { Ok(pb::Int32 {
val: local_minutes_west_for_stamp(input.val), val: local_minutes_west_for_stamp(input.val),
}) })
} }
fn set_local_minutes_west(&mut self, input: pb::Int32) -> BackendResult<Empty> { fn set_local_minutes_west(&self, input: pb::Int32) -> BackendResult<Empty> {
self.with_col(|col| { self.with_col(|col| {
col.transact(None, |col| { col.transact(None, |col| {
col.set_local_mins_west(input.val).map(Into::into) col.set_local_mins_west(input.val).map(Into::into)
@ -472,11 +475,17 @@ impl BackendService for Backend {
}) })
} }
fn studied_today(&mut self, input: pb::StudiedTodayIn) -> BackendResult<pb::String> { /// Fetch data from DB and return rendered string.
Ok(studied_today(input.cards as usize, input.seconds as f32, &self.i18n).into()) fn studied_today(&self, _input: pb::Empty) -> BackendResult<pb::String> {
self.with_col(|col| col.studied_today().map(Into::into))
} }
fn update_stats(&mut self, input: pb::UpdateStatsIn) -> BackendResult<Empty> { /// Message rendering only, for old graphs.
fn studied_today_message(&self, input: pb::StudiedTodayMessageIn) -> BackendResult<pb::String> {
Ok(studied_today(input.cards, input.seconds as f32, &self.i18n).into())
}
fn update_stats(&self, input: pb::UpdateStatsIn) -> BackendResult<Empty> {
self.with_col(|col| { self.with_col(|col| {
col.transact(None, |col| { col.transact(None, |col| {
let today = col.current_due_day(0)?; let today = col.current_due_day(0)?;
@ -486,7 +495,7 @@ impl BackendService for Backend {
}) })
} }
fn extend_limits(&mut self, input: pb::ExtendLimitsIn) -> BackendResult<Empty> { fn extend_limits(&self, input: pb::ExtendLimitsIn) -> BackendResult<Empty> {
self.with_col(|col| { self.with_col(|col| {
col.transact(None, |col| { col.transact(None, |col| {
let today = col.current_due_day(0)?; let today = col.current_due_day(0)?;
@ -503,18 +512,15 @@ impl BackendService for Backend {
}) })
} }
fn counts_for_deck_today( fn counts_for_deck_today(&self, input: pb::DeckId) -> BackendResult<pb::CountsForDeckTodayOut> {
&mut self,
input: pb::DeckId,
) -> BackendResult<pb::CountsForDeckTodayOut> {
self.with_col(|col| col.counts_for_deck_today(input.did.into())) self.with_col(|col| col.counts_for_deck_today(input.did.into()))
} }
fn congrats_info(&mut self, _input: Empty) -> BackendResult<pb::CongratsInfoOut> { fn congrats_info(&self, _input: Empty) -> BackendResult<pb::CongratsInfoOut> {
self.with_col(|col| col.congrats_info()) self.with_col(|col| col.congrats_info())
} }
fn restore_buried_and_suspended_cards(&mut self, input: pb::CardIDs) -> BackendResult<Empty> { fn restore_buried_and_suspended_cards(&self, input: pb::CardIDs) -> BackendResult<Empty> {
self.with_col(|col| { self.with_col(|col| {
col.unbury_or_unsuspend_cards(&input.into_native()) col.unbury_or_unsuspend_cards(&input.into_native())
.map(Into::into) .map(Into::into)
@ -522,7 +528,7 @@ impl BackendService for Backend {
} }
fn unbury_cards_in_current_deck( fn unbury_cards_in_current_deck(
&mut self, &self,
input: pb::UnburyCardsInCurrentDeckIn, input: pb::UnburyCardsInCurrentDeckIn,
) -> BackendResult<Empty> { ) -> BackendResult<Empty> {
self.with_col(|col| { self.with_col(|col| {
@ -531,7 +537,7 @@ impl BackendService for Backend {
}) })
} }
fn bury_or_suspend_cards(&mut self, input: pb::BuryOrSuspendCardsIn) -> BackendResult<Empty> { fn bury_or_suspend_cards(&self, input: pb::BuryOrSuspendCardsIn) -> BackendResult<Empty> {
self.with_col(|col| { self.with_col(|col| {
let mode = input.mode(); let mode = input.mode();
let cids: Vec<_> = input.card_ids.into_iter().map(CardID).collect(); let cids: Vec<_> = input.card_ids.into_iter().map(CardID).collect();
@ -539,22 +545,70 @@ impl BackendService for Backend {
}) })
} }
fn empty_filtered_deck(&self, input: pb::DeckId) -> BackendResult<Empty> {
self.with_col(|col| col.empty_filtered_deck(input.did.into()).map(Into::into))
}
fn rebuild_filtered_deck(&self, input: pb::DeckId) -> BackendResult<pb::UInt32> {
self.with_col(|col| col.rebuild_filtered_deck(input.did.into()).map(Into::into))
}
fn schedule_cards_as_reviews(
&self,
input: pb::ScheduleCardsAsReviewsIn,
) -> BackendResult<Empty> {
let cids: Vec<_> = input.card_ids.into_iter().map(CardID).collect();
let (min, max) = (input.min_interval, input.max_interval);
self.with_col(|col| {
col.reschedule_cards_as_reviews(&cids, min, max)
.map(Into::into)
})
}
fn schedule_cards_as_new(&self, input: pb::CardIDs) -> BackendResult<Empty> {
self.with_col(|col| {
col.reschedule_cards_as_new(&input.into_native())
.map(Into::into)
})
}
fn sort_cards(&self, input: pb::SortCardsIn) -> BackendResult<Empty> {
let cids: Vec<_> = input.card_ids.into_iter().map(CardID).collect();
let (start, step, random, shift) = (
input.starting_from,
input.step_size,
input.randomize,
input.shift_existing,
);
self.with_col(|col| {
col.sort_cards(&cids, start, step, random, shift)
.map(Into::into)
})
}
fn sort_deck(&self, input: pb::SortDeckIn) -> BackendResult<Empty> {
self.with_col(|col| {
col.sort_deck(input.deck_id.into(), input.randomize)
.map(Into::into)
})
}
// statistics // statistics
//----------------------------------------------- //-----------------------------------------------
fn card_stats(&mut self, input: pb::CardId) -> BackendResult<pb::String> { fn card_stats(&self, input: pb::CardId) -> BackendResult<pb::String> {
self.with_col(|col| col.card_stats(input.into())) self.with_col(|col| col.card_stats(input.into()))
.map(Into::into) .map(Into::into)
} }
fn graphs(&mut self, input: pb::GraphsIn) -> BackendResult<pb::GraphsOut> { fn graphs(&self, input: pb::GraphsIn) -> BackendResult<pb::GraphsOut> {
self.with_col(|col| col.graph_data_for_search(&input.search, input.days)) self.with_col(|col| col.graph_data_for_search(&input.search, input.days))
} }
// decks // decks
//----------------------------------------------- //-----------------------------------------------
fn deck_tree(&mut self, input: pb::DeckTreeIn) -> Result<pb::DeckTreeNode> { fn deck_tree(&self, input: pb::DeckTreeIn) -> Result<pb::DeckTreeNode> {
let lim = if input.top_deck_id > 0 { let lim = if input.top_deck_id > 0 {
Some(DeckID(input.top_deck_id)) Some(DeckID(input.top_deck_id))
} else { } else {
@ -570,7 +624,7 @@ impl BackendService for Backend {
}) })
} }
fn deck_tree_legacy(&mut self, _input: pb::Empty) -> BackendResult<pb::Json> { fn deck_tree_legacy(&self, _input: pb::Empty) -> BackendResult<pb::Json> {
self.with_col(|col| { self.with_col(|col| {
let tree = col.legacy_deck_tree()?; let tree = col.legacy_deck_tree()?;
serde_json::to_vec(&tree) serde_json::to_vec(&tree)
@ -579,7 +633,7 @@ impl BackendService for Backend {
}) })
} }
fn get_deck_legacy(&mut self, input: pb::DeckId) -> Result<pb::Json> { fn get_deck_legacy(&self, input: pb::DeckId) -> Result<pb::Json> {
self.with_col(|col| { self.with_col(|col| {
let deck: DeckSchema11 = col let deck: DeckSchema11 = col
.storage .storage
@ -592,7 +646,7 @@ impl BackendService for Backend {
}) })
} }
fn get_deck_id_by_name(&mut self, input: pb::String) -> Result<pb::DeckId> { fn get_deck_id_by_name(&self, input: pb::String) -> Result<pb::DeckId> {
self.with_col(|col| { self.with_col(|col| {
col.get_deck_id(&input.val).and_then(|d| { col.get_deck_id(&input.val).and_then(|d| {
d.ok_or(AnkiError::NotFound) d.ok_or(AnkiError::NotFound)
@ -601,7 +655,7 @@ impl BackendService for Backend {
}) })
} }
fn get_all_decks_legacy(&mut self, _input: Empty) -> BackendResult<pb::Json> { fn get_all_decks_legacy(&self, _input: Empty) -> BackendResult<pb::Json> {
self.with_col(|col| { self.with_col(|col| {
let decks = col.storage.get_all_decks_as_schema11()?; let decks = col.storage.get_all_decks_as_schema11()?;
serde_json::to_vec(&decks).map_err(Into::into) serde_json::to_vec(&decks).map_err(Into::into)
@ -609,7 +663,7 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn get_deck_names(&mut self, input: pb::GetDeckNamesIn) -> Result<pb::DeckNames> { fn get_deck_names(&self, input: pb::GetDeckNamesIn) -> Result<pb::DeckNames> {
self.with_col(|col| { self.with_col(|col| {
let names = if input.include_filtered { let names = if input.include_filtered {
col.get_all_deck_names(input.skip_empty_default)? col.get_all_deck_names(input.skip_empty_default)?
@ -625,10 +679,7 @@ impl BackendService for Backend {
}) })
} }
fn add_or_update_deck_legacy( fn add_or_update_deck_legacy(&self, input: pb::AddOrUpdateDeckLegacyIn) -> Result<pb::DeckId> {
&mut self,
input: pb::AddOrUpdateDeckLegacyIn,
) -> Result<pb::DeckId> {
self.with_col(|col| { self.with_col(|col| {
let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?; let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?;
let mut deck: Deck = schema11.into(); let mut deck: Deck = schema11.into();
@ -644,7 +695,7 @@ impl BackendService for Backend {
}) })
} }
fn new_deck_legacy(&mut self, input: pb::Bool) -> BackendResult<pb::Json> { fn new_deck_legacy(&self, input: pb::Bool) -> BackendResult<pb::Json> {
let deck = if input.val { let deck = if input.val {
Deck::new_filtered() Deck::new_filtered()
} else { } else {
@ -656,7 +707,7 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn remove_deck(&mut self, input: pb::DeckId) -> BackendResult<Empty> { fn remove_deck(&self, input: pb::DeckId) -> BackendResult<Empty> {
self.with_col(|col| col.remove_deck_and_child_decks(input.into())) self.with_col(|col| col.remove_deck_and_child_decks(input.into()))
.map(Into::into) .map(Into::into)
} }
@ -665,7 +716,7 @@ impl BackendService for Backend {
//---------------------------------------------------- //----------------------------------------------------
fn add_or_update_deck_config_legacy( fn add_or_update_deck_config_legacy(
&mut self, &self,
input: AddOrUpdateDeckConfigLegacyIn, input: AddOrUpdateDeckConfigLegacyIn,
) -> BackendResult<pb::DeckConfigId> { ) -> BackendResult<pb::DeckConfigId> {
let conf: DeckConfSchema11 = serde_json::from_slice(&input.config)?; let conf: DeckConfSchema11 = serde_json::from_slice(&input.config)?;
@ -679,7 +730,7 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn all_deck_config_legacy(&mut self, _input: Empty) -> BackendResult<pb::Json> { fn all_deck_config_legacy(&self, _input: Empty) -> BackendResult<pb::Json> {
self.with_col(|col| { self.with_col(|col| {
let conf: Vec<DeckConfSchema11> = col let conf: Vec<DeckConfSchema11> = col
.storage .storage
@ -692,18 +743,18 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn new_deck_config_legacy(&mut self, _input: Empty) -> BackendResult<pb::Json> { fn new_deck_config_legacy(&self, _input: Empty) -> BackendResult<pb::Json> {
serde_json::to_vec(&DeckConfSchema11::default()) serde_json::to_vec(&DeckConfSchema11::default())
.map_err(Into::into) .map_err(Into::into)
.map(Into::into) .map(Into::into)
} }
fn remove_deck_config(&mut self, input: pb::DeckConfigId) -> BackendResult<Empty> { fn remove_deck_config(&self, input: pb::DeckConfigId) -> BackendResult<Empty> {
self.with_col(|col| col.transact(None, |col| col.remove_deck_config(input.into()))) self.with_col(|col| col.transact(None, |col| col.remove_deck_config(input.into())))
.map(Into::into) .map(Into::into)
} }
fn get_deck_config_legacy(&mut self, input: pb::DeckConfigId) -> BackendResult<pb::Json> { fn get_deck_config_legacy(&self, input: pb::DeckConfigId) -> BackendResult<pb::Json> {
self.with_col(|col| { self.with_col(|col| {
let conf = col.get_deck_config(input.into(), true)?.unwrap(); let conf = col.get_deck_config(input.into(), true)?.unwrap();
let conf: DeckConfSchema11 = conf.into(); let conf: DeckConfSchema11 = conf.into();
@ -715,7 +766,7 @@ impl BackendService for Backend {
// cards // cards
//------------------------------------------------------------------- //-------------------------------------------------------------------
fn get_card(&mut self, input: pb::CardId) -> BackendResult<pb::Card> { fn get_card(&self, input: pb::CardId) -> BackendResult<pb::Card> {
self.with_col(|col| { self.with_col(|col| {
col.storage col.storage
.get_card(input.into()) .get_card(input.into())
@ -724,7 +775,7 @@ impl BackendService for Backend {
}) })
} }
fn update_card(&mut self, input: pb::Card) -> BackendResult<Empty> { fn update_card(&self, input: pb::Card) -> BackendResult<Empty> {
let mut card = pbcard_to_native(input)?; let mut card = pbcard_to_native(input)?;
self.with_col(|col| { self.with_col(|col| {
col.transact(None, |ctx| { col.transact(None, |ctx| {
@ -738,13 +789,13 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn add_card(&mut self, input: pb::Card) -> BackendResult<pb::CardId> { fn add_card(&self, input: pb::Card) -> BackendResult<pb::CardId> {
let mut card = pbcard_to_native(input)?; let mut card = pbcard_to_native(input)?;
self.with_col(|col| col.transact(None, |ctx| ctx.add_card(&mut card)))?; self.with_col(|col| col.transact(None, |ctx| ctx.add_card(&mut card)))?;
Ok(pb::CardId { cid: card.id.0 }) Ok(pb::CardId { cid: card.id.0 })
} }
fn remove_cards(&mut self, input: pb::RemoveCardsIn) -> BackendResult<Empty> { fn remove_cards(&self, input: pb::RemoveCardsIn) -> BackendResult<Empty> {
self.with_col(|col| { self.with_col(|col| {
col.transact(None, |col| { col.transact(None, |col| {
col.remove_cards_and_orphaned_notes( col.remove_cards_and_orphaned_notes(
@ -759,17 +810,23 @@ impl BackendService for Backend {
}) })
} }
fn set_deck(&self, input: pb::SetDeckIn) -> BackendResult<Empty> {
let cids: Vec<_> = input.card_ids.into_iter().map(CardID).collect();
let deck_id = input.deck_id.into();
self.with_col(|col| col.set_deck(&cids, deck_id).map(Into::into))
}
// notes // notes
//------------------------------------------------------------------- //-------------------------------------------------------------------
fn new_note(&mut self, input: pb::NoteTypeId) -> BackendResult<pb::Note> { fn new_note(&self, input: pb::NoteTypeId) -> BackendResult<pb::Note> {
self.with_col(|col| { self.with_col(|col| {
let nt = col.get_notetype(input.into())?.ok_or(AnkiError::NotFound)?; let nt = col.get_notetype(input.into())?.ok_or(AnkiError::NotFound)?;
Ok(nt.new_note().into()) Ok(nt.new_note().into())
}) })
} }
fn add_note(&mut self, input: pb::AddNoteIn) -> BackendResult<pb::NoteId> { fn add_note(&self, input: pb::AddNoteIn) -> BackendResult<pb::NoteId> {
self.with_col(|col| { self.with_col(|col| {
let mut note: Note = input.note.ok_or(AnkiError::NotFound)?.into(); let mut note: Note = input.note.ok_or(AnkiError::NotFound)?.into();
col.add_note(&mut note, DeckID(input.deck_id)) col.add_note(&mut note, DeckID(input.deck_id))
@ -777,7 +834,7 @@ impl BackendService for Backend {
}) })
} }
fn update_note(&mut self, input: pb::Note) -> BackendResult<Empty> { fn update_note(&self, input: pb::Note) -> BackendResult<Empty> {
self.with_col(|col| { self.with_col(|col| {
let mut note: Note = input.into(); let mut note: Note = input.into();
col.update_note(&mut note) col.update_note(&mut note)
@ -785,7 +842,7 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn get_note(&mut self, input: pb::NoteId) -> BackendResult<pb::Note> { fn get_note(&self, input: pb::NoteId) -> BackendResult<pb::Note> {
self.with_col(|col| { self.with_col(|col| {
col.storage col.storage
.get_note(input.into())? .get_note(input.into())?
@ -794,7 +851,7 @@ impl BackendService for Backend {
}) })
} }
fn remove_notes(&mut self, input: pb::RemoveNotesIn) -> BackendResult<Empty> { fn remove_notes(&self, input: pb::RemoveNotesIn) -> BackendResult<Empty> {
self.with_col(|col| { self.with_col(|col| {
if !input.note_ids.is_empty() { if !input.note_ids.is_empty() {
col.remove_notes( col.remove_notes(
@ -819,7 +876,7 @@ impl BackendService for Backend {
}) })
} }
fn add_note_tags(&mut self, input: pb::AddNoteTagsIn) -> BackendResult<pb::UInt32> { fn add_note_tags(&self, input: pb::AddNoteTagsIn) -> BackendResult<pb::UInt32> {
self.with_col(|col| { self.with_col(|col| {
col.add_tags_for_notes(&to_nids(input.nids), &input.tags) col.add_tags_for_notes(&to_nids(input.nids), &input.tags)
.map(|n| n as u32) .map(|n| n as u32)
@ -827,7 +884,7 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn update_note_tags(&mut self, input: pb::UpdateNoteTagsIn) -> BackendResult<pb::UInt32> { fn update_note_tags(&self, input: pb::UpdateNoteTagsIn) -> BackendResult<pb::UInt32> {
self.with_col(|col| { self.with_col(|col| {
col.replace_tags_for_notes( col.replace_tags_for_notes(
&to_nids(input.nids), &to_nids(input.nids),
@ -839,10 +896,7 @@ impl BackendService for Backend {
}) })
} }
fn cloze_numbers_in_note( fn cloze_numbers_in_note(&self, note: pb::Note) -> BackendResult<pb::ClozeNumbersInNoteOut> {
&mut self,
note: pb::Note,
) -> BackendResult<pb::ClozeNumbersInNoteOut> {
let mut set = HashSet::with_capacity(4); let mut set = HashSet::with_capacity(4);
for field in &note.fields { for field in &note.fields {
add_cloze_numbers_in_string(field, &mut set); add_cloze_numbers_in_string(field, &mut set);
@ -853,7 +907,7 @@ impl BackendService for Backend {
} }
fn field_names_for_notes( fn field_names_for_notes(
&mut self, &self,
input: pb::FieldNamesForNotesIn, input: pb::FieldNamesForNotesIn,
) -> BackendResult<pb::FieldNamesForNotesOut> { ) -> BackendResult<pb::FieldNamesForNotesOut> {
self.with_col(|col| { self.with_col(|col| {
@ -864,7 +918,7 @@ impl BackendService for Backend {
}) })
} }
fn after_note_updates(&mut self, input: pb::AfterNoteUpdatesIn) -> BackendResult<Empty> { fn after_note_updates(&self, input: pb::AfterNoteUpdatesIn) -> BackendResult<Empty> {
self.with_col(|col| { self.with_col(|col| {
col.transact(None, |col| { col.transact(None, |col| {
col.after_note_updates( col.after_note_updates(
@ -878,7 +932,7 @@ impl BackendService for Backend {
} }
fn note_is_duplicate_or_empty( fn note_is_duplicate_or_empty(
&mut self, &self,
input: pb::Note, input: pb::Note,
) -> BackendResult<pb::NoteIsDuplicateOrEmptyOut> { ) -> BackendResult<pb::NoteIsDuplicateOrEmptyOut> {
let note: Note = input.into(); let note: Note = input.into();
@ -888,7 +942,7 @@ impl BackendService for Backend {
}) })
} }
fn cards_of_note(&mut self, input: pb::NoteId) -> BackendResult<pb::CardIDs> { fn cards_of_note(&self, input: pb::NoteId) -> BackendResult<pb::CardIDs> {
self.with_col(|col| { self.with_col(|col| {
col.storage col.storage
.all_card_ids_of_note(NoteID(input.nid)) .all_card_ids_of_note(NoteID(input.nid))
@ -901,10 +955,7 @@ impl BackendService for Backend {
// notetypes // notetypes
//------------------------------------------------------------------- //-------------------------------------------------------------------
fn get_stock_notetype_legacy( fn get_stock_notetype_legacy(&self, input: pb::GetStockNotetypeIn) -> BackendResult<pb::Json> {
&mut self,
input: pb::GetStockNotetypeIn,
) -> BackendResult<pb::Json> {
// fixme: use individual functions instead of full vec // fixme: use individual functions instead of full vec
let mut all = all_stock_notetypes(&self.i18n); let mut all = all_stock_notetypes(&self.i18n);
let idx = (input.kind as usize).min(all.len() - 1); let idx = (input.kind as usize).min(all.len() - 1);
@ -915,7 +966,7 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn get_notetype_names(&mut self, _input: Empty) -> BackendResult<pb::NoteTypeNames> { fn get_notetype_names(&self, _input: Empty) -> BackendResult<pb::NoteTypeNames> {
self.with_col(|col| { self.with_col(|col| {
let entries: Vec<_> = col let entries: Vec<_> = col
.storage .storage
@ -927,10 +978,7 @@ impl BackendService for Backend {
}) })
} }
fn get_notetype_names_and_counts( fn get_notetype_names_and_counts(&self, _input: Empty) -> BackendResult<pb::NoteTypeUseCounts> {
&mut self,
_input: Empty,
) -> BackendResult<pb::NoteTypeUseCounts> {
self.with_col(|col| { self.with_col(|col| {
let entries: Vec<_> = col let entries: Vec<_> = col
.storage .storage
@ -946,7 +994,7 @@ impl BackendService for Backend {
}) })
} }
fn get_notetype_legacy(&mut self, input: pb::NoteTypeId) -> BackendResult<pb::Json> { fn get_notetype_legacy(&self, input: pb::NoteTypeId) -> BackendResult<pb::Json> {
self.with_col(|col| { self.with_col(|col| {
let schema11: NoteTypeSchema11 = col let schema11: NoteTypeSchema11 = col
.storage .storage
@ -957,7 +1005,7 @@ impl BackendService for Backend {
}) })
} }
fn get_notetype_id_by_name(&mut self, input: pb::String) -> BackendResult<pb::NoteTypeId> { fn get_notetype_id_by_name(&self, input: pb::String) -> BackendResult<pb::NoteTypeId> {
self.with_col(|col| { self.with_col(|col| {
col.storage col.storage
.get_notetype_id(&input.val) .get_notetype_id(&input.val)
@ -967,7 +1015,7 @@ impl BackendService for Backend {
} }
fn add_or_update_notetype( fn add_or_update_notetype(
&mut self, &self,
input: pb::AddOrUpdateNotetypeIn, input: pb::AddOrUpdateNotetypeIn,
) -> BackendResult<pb::NoteTypeId> { ) -> BackendResult<pb::NoteTypeId> {
self.with_col(|col| { self.with_col(|col| {
@ -982,7 +1030,7 @@ impl BackendService for Backend {
}) })
} }
fn remove_notetype(&mut self, input: pb::NoteTypeId) -> BackendResult<Empty> { fn remove_notetype(&self, input: pb::NoteTypeId) -> BackendResult<Empty> {
self.with_col(|col| col.remove_notetype(input.into())) self.with_col(|col| col.remove_notetype(input.into()))
.map(Into::into) .map(Into::into)
} }
@ -990,7 +1038,7 @@ impl BackendService for Backend {
// media // media
//------------------------------------------------------------------- //-------------------------------------------------------------------
fn add_media_file(&mut self, input: pb::AddMediaFileIn) -> BackendResult<pb::String> { fn add_media_file(&self, input: pb::AddMediaFileIn) -> BackendResult<pb::String> {
self.with_col(|col| { self.with_col(|col| {
let mgr = MediaManager::new(&col.media_folder, &col.media_db)?; let mgr = MediaManager::new(&col.media_folder, &col.media_db)?;
let mut ctx = mgr.dbctx(); let mut ctx = mgr.dbctx();
@ -1001,7 +1049,7 @@ impl BackendService for Backend {
}) })
} }
fn empty_trash(&mut self, _input: Empty) -> BackendResult<Empty> { fn empty_trash(&self, _input: Empty) -> BackendResult<Empty> {
let mut handler = self.new_progress_handler(); let mut handler = self.new_progress_handler();
let progress_fn = let progress_fn =
move |progress| handler.update(Progress::MediaCheck(progress as u32), true); move |progress| handler.update(Progress::MediaCheck(progress as u32), true);
@ -1017,7 +1065,7 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn restore_trash(&mut self, _input: Empty) -> BackendResult<Empty> { fn restore_trash(&self, _input: Empty) -> BackendResult<Empty> {
let mut handler = self.new_progress_handler(); let mut handler = self.new_progress_handler();
let progress_fn = let progress_fn =
move |progress| handler.update(Progress::MediaCheck(progress as u32), true); move |progress| handler.update(Progress::MediaCheck(progress as u32), true);
@ -1033,7 +1081,7 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn trash_media_files(&mut self, input: pb::TrashMediaFilesIn) -> BackendResult<Empty> { fn trash_media_files(&self, input: pb::TrashMediaFilesIn) -> BackendResult<Empty> {
self.with_col(|col| { self.with_col(|col| {
let mgr = MediaManager::new(&col.media_folder, &col.media_db)?; let mgr = MediaManager::new(&col.media_folder, &col.media_db)?;
let mut ctx = mgr.dbctx(); let mut ctx = mgr.dbctx();
@ -1042,7 +1090,7 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn check_media(&mut self, _input: pb::Empty) -> Result<pb::CheckMediaOut> { fn check_media(&self, _input: pb::Empty) -> Result<pb::CheckMediaOut> {
let mut handler = self.new_progress_handler(); let mut handler = self.new_progress_handler();
let progress_fn = let progress_fn =
move |progress| handler.update(Progress::MediaCheck(progress as u32), true); move |progress| handler.update(Progress::MediaCheck(progress as u32), true);
@ -1067,7 +1115,7 @@ impl BackendService for Backend {
// collection // collection
//------------------------------------------------------------------- //-------------------------------------------------------------------
fn check_database(&mut self, _input: pb::Empty) -> BackendResult<pb::CheckDatabaseOut> { fn check_database(&self, _input: pb::Empty) -> BackendResult<pb::CheckDatabaseOut> {
let mut handler = self.new_progress_handler(); let mut handler = self.new_progress_handler();
let progress_fn = move |progress, throttle| { let progress_fn = move |progress, throttle| {
handler.update(Progress::DatabaseCheck(progress), throttle); handler.update(Progress::DatabaseCheck(progress), throttle);
@ -1080,7 +1128,7 @@ impl BackendService for Backend {
}) })
} }
fn open_collection(&mut self, input: pb::OpenCollectionIn) -> BackendResult<Empty> { fn open_collection(&self, input: pb::OpenCollectionIn) -> BackendResult<Empty> {
let mut col = self.col.lock().unwrap(); let mut col = self.col.lock().unwrap();
if col.is_some() { if col.is_some() {
return Err(AnkiError::CollectionAlreadyOpen); return Err(AnkiError::CollectionAlreadyOpen);
@ -1109,7 +1157,7 @@ impl BackendService for Backend {
Ok(().into()) Ok(().into())
} }
fn close_collection(&mut self, input: pb::CloseCollectionIn) -> BackendResult<Empty> { fn close_collection(&self, input: pb::CloseCollectionIn) -> BackendResult<Empty> {
self.abort_media_sync_and_wait(); self.abort_media_sync_and_wait();
let mut col = self.col.lock().unwrap(); let mut col = self.col.lock().unwrap();
@ -1131,41 +1179,41 @@ impl BackendService for Backend {
// sync // sync
//------------------------------------------------------------------- //-------------------------------------------------------------------
fn sync_login(&mut self, input: pb::SyncLoginIn) -> BackendResult<pb::SyncAuth> { fn sync_login(&self, input: pb::SyncLoginIn) -> BackendResult<pb::SyncAuth> {
self.sync_login_inner(input) self.sync_login_inner(input)
} }
fn sync_status(&mut self, input: pb::SyncAuth) -> BackendResult<pb::SyncStatusOut> { fn sync_status(&self, input: pb::SyncAuth) -> BackendResult<pb::SyncStatusOut> {
self.sync_status_inner(input) self.sync_status_inner(input)
} }
fn sync_collection(&mut self, input: pb::SyncAuth) -> BackendResult<pb::SyncCollectionOut> { fn sync_collection(&self, input: pb::SyncAuth) -> BackendResult<pb::SyncCollectionOut> {
self.sync_collection_inner(input) self.sync_collection_inner(input)
} }
fn full_upload(&mut self, input: pb::SyncAuth) -> BackendResult<Empty> { fn full_upload(&self, input: pb::SyncAuth) -> BackendResult<Empty> {
self.full_sync_inner(input, true)?; self.full_sync_inner(input, true)?;
Ok(().into()) Ok(().into())
} }
fn full_download(&mut self, input: pb::SyncAuth) -> BackendResult<Empty> { fn full_download(&self, input: pb::SyncAuth) -> BackendResult<Empty> {
self.full_sync_inner(input, false)?; self.full_sync_inner(input, false)?;
Ok(().into()) Ok(().into())
} }
fn sync_media(&mut self, input: pb::SyncAuth) -> BackendResult<Empty> { fn sync_media(&self, input: pb::SyncAuth) -> BackendResult<Empty> {
self.sync_media_inner(input).map(Into::into) self.sync_media_inner(input).map(Into::into)
} }
fn abort_sync(&mut self, _input: Empty) -> BackendResult<Empty> { fn abort_sync(&self, _input: Empty) -> BackendResult<Empty> {
if let Some(handle) = self.sync_abort.take() { if let Some(handle) = self.sync_abort.lock().unwrap().take() {
handle.abort(); handle.abort();
} }
Ok(().into()) Ok(().into())
} }
/// Abort the media sync. Does not wait for completion. /// Abort the media sync. Does not wait for completion.
fn abort_media_sync(&mut self, _input: Empty) -> BackendResult<Empty> { fn abort_media_sync(&self, _input: Empty) -> BackendResult<Empty> {
let guard = self.state.lock().unwrap(); let guard = self.state.lock().unwrap();
if let Some(handle) = &guard.media_sync_abort { if let Some(handle) = &guard.media_sync_abort {
handle.abort(); handle.abort();
@ -1173,14 +1221,14 @@ impl BackendService for Backend {
Ok(().into()) Ok(().into())
} }
fn before_upload(&mut self, _input: Empty) -> BackendResult<Empty> { fn before_upload(&self, _input: Empty) -> BackendResult<Empty> {
self.with_col(|col| col.before_upload().map(Into::into)) self.with_col(|col| col.before_upload().map(Into::into))
} }
// i18n/messages // i18n/messages
//------------------------------------------------------------------- //-------------------------------------------------------------------
fn translate_string(&mut self, input: pb::TranslateStringIn) -> BackendResult<pb::String> { fn translate_string(&self, input: pb::TranslateStringIn) -> BackendResult<pb::String> {
let key = match pb::FluentString::from_i32(input.key) { let key = match pb::FluentString::from_i32(input.key) {
Some(key) => key, Some(key) => key,
None => return Ok("invalid key".to_string().into()), None => return Ok("invalid key".to_string().into()),
@ -1195,7 +1243,7 @@ impl BackendService for Backend {
Ok(self.i18n.trn(key, map).into()) Ok(self.i18n.trn(key, map).into())
} }
fn format_timespan(&mut self, input: pb::FormatTimespanIn) -> BackendResult<pb::String> { fn format_timespan(&self, input: pb::FormatTimespanIn) -> BackendResult<pb::String> {
let context = match pb::format_timespan_in::Context::from_i32(input.context) { let context = match pb::format_timespan_in::Context::from_i32(input.context) {
Some(context) => context, Some(context) => context,
None => return Ok("".to_string().into()), None => return Ok("".to_string().into()),
@ -1212,7 +1260,7 @@ impl BackendService for Backend {
.into()) .into())
} }
fn i18n_resources(&mut self, _input: Empty) -> BackendResult<pb::Json> { fn i18n_resources(&self, _input: Empty) -> BackendResult<pb::Json> {
serde_json::to_vec(&self.i18n.resources_for_js()) serde_json::to_vec(&self.i18n.resources_for_js())
.map(Into::into) .map(Into::into)
.map_err(Into::into) .map_err(Into::into)
@ -1221,7 +1269,7 @@ impl BackendService for Backend {
// tags // tags
//------------------------------------------------------------------- //-------------------------------------------------------------------
fn all_tags(&mut self, _input: Empty) -> BackendResult<pb::AllTagsOut> { fn all_tags(&self, _input: Empty) -> BackendResult<pb::AllTagsOut> {
let tags = self.with_col(|col| col.storage.all_tags())?; let tags = self.with_col(|col| col.storage.all_tags())?;
let tags: Vec<_> = tags let tags: Vec<_> = tags
.into_iter() .into_iter()
@ -1230,7 +1278,7 @@ impl BackendService for Backend {
Ok(pb::AllTagsOut { tags }) Ok(pb::AllTagsOut { tags })
} }
fn register_tags(&mut self, input: pb::RegisterTagsIn) -> BackendResult<pb::Bool> { fn register_tags(&self, input: pb::RegisterTagsIn) -> BackendResult<pb::Bool> {
self.with_col(|col| { self.with_col(|col| {
col.transact(None, |col| { col.transact(None, |col| {
let usn = if input.preserve_usn { let usn = if input.preserve_usn {
@ -1247,7 +1295,7 @@ impl BackendService for Backend {
// config/preferences // config/preferences
//------------------------------------------------------------------- //-------------------------------------------------------------------
fn get_config_json(&mut self, input: pb::String) -> BackendResult<pb::Json> { fn get_config_json(&self, input: pb::String) -> BackendResult<pb::Json> {
self.with_col(|col| { self.with_col(|col| {
let val: Option<JsonValue> = col.get_config_optional(input.val.as_str()); let val: Option<JsonValue> = col.get_config_optional(input.val.as_str());
val.ok_or(AnkiError::NotFound) val.ok_or(AnkiError::NotFound)
@ -1256,7 +1304,7 @@ impl BackendService for Backend {
}) })
} }
fn set_config_json(&mut self, input: pb::SetConfigJsonIn) -> BackendResult<Empty> { fn set_config_json(&self, input: pb::SetConfigJsonIn) -> BackendResult<Empty> {
self.with_col(|col| { self.with_col(|col| {
col.transact(None, |col| { col.transact(None, |col| {
// ensure it's a well-formed object // ensure it's a well-formed object
@ -1267,12 +1315,12 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn remove_config(&mut self, input: pb::String) -> BackendResult<Empty> { fn remove_config(&self, input: pb::String) -> BackendResult<Empty> {
self.with_col(|col| col.transact(None, |col| col.remove_config(input.val.as_str()))) self.with_col(|col| col.transact(None, |col| col.remove_config(input.val.as_str())))
.map(Into::into) .map(Into::into)
} }
fn set_all_config(&mut self, input: pb::Json) -> BackendResult<Empty> { fn set_all_config(&self, input: pb::Json) -> BackendResult<Empty> {
let val: HashMap<String, JsonValue> = serde_json::from_slice(&input.json)?; let val: HashMap<String, JsonValue> = serde_json::from_slice(&input.json)?;
self.with_col(|col| { self.with_col(|col| {
col.transact(None, |col| { col.transact(None, |col| {
@ -1283,7 +1331,7 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn get_all_config(&mut self, _input: Empty) -> BackendResult<pb::Json> { fn get_all_config(&self, _input: Empty) -> BackendResult<pb::Json> {
self.with_col(|col| { self.with_col(|col| {
let conf = col.storage.get_all_config()?; let conf = col.storage.get_all_config()?;
serde_json::to_vec(&conf).map_err(Into::into) serde_json::to_vec(&conf).map_err(Into::into)
@ -1291,11 +1339,11 @@ impl BackendService for Backend {
.map(Into::into) .map(Into::into)
} }
fn get_preferences(&mut self, _input: Empty) -> BackendResult<pb::Preferences> { fn get_preferences(&self, _input: Empty) -> BackendResult<pb::Preferences> {
self.with_col(|col| col.get_preferences()) self.with_col(|col| col.get_preferences())
} }
fn set_preferences(&mut self, input: pb::Preferences) -> BackendResult<Empty> { fn set_preferences(&self, input: pb::Preferences) -> BackendResult<Empty> {
self.with_col(|col| col.transact(None, |col| col.set_preferences(input))) self.with_col(|col| col.transact(None, |col| col.set_preferences(input)))
.map(Into::into) .map(Into::into)
} }
@ -1307,12 +1355,12 @@ impl Backend {
col: Arc::new(Mutex::new(None)), col: Arc::new(Mutex::new(None)),
i18n, i18n,
server, server,
sync_abort: None, sync_abort: Arc::new(Mutex::new(None)),
progress_state: Arc::new(Mutex::new(ProgressState { progress_state: Arc::new(Mutex::new(ProgressState {
want_abort: false, want_abort: false,
last_progress: None, last_progress: None,
})), })),
runtime: None, runtime: OnceCell::new(),
state: Arc::new(Mutex::new(BackendState::default())), state: Arc::new(Mutex::new(BackendState::default())),
} }
} }
@ -1321,11 +1369,7 @@ impl Backend {
&self.i18n &self.i18n
} }
pub fn run_command_bytes( pub fn run_command_bytes(&self, method: u32, input: &[u8]) -> result::Result<Vec<u8>, Vec<u8>> {
&mut self,
method: u32,
input: &[u8],
) -> result::Result<Vec<u8>, Vec<u8>> {
self.run_command_bytes2_inner(method, input).map_err(|err| { self.run_command_bytes2_inner(method, input).map_err(|err| {
let backend_err = anki_error_to_proto_error(err, &self.i18n); let backend_err = anki_error_to_proto_error(err, &self.i18n);
let mut bytes = Vec::new(); let mut bytes = Vec::new();
@ -1357,26 +1401,54 @@ impl Backend {
guard.last_progress = None; guard.last_progress = None;
} }
ThrottlingProgressHandler { ThrottlingProgressHandler {
state: self.progress_state.clone(), state: Arc::clone(&self.progress_state),
last_update: coarsetime::Instant::now(), last_update: coarsetime::Instant::now(),
} }
} }
fn runtime_handle(&mut self) -> runtime::Handle { fn runtime_handle(&self) -> runtime::Handle {
if self.runtime.is_none() { self.runtime
self.runtime = Some( .get_or_init(|| {
runtime::Builder::new() runtime::Builder::new()
.threaded_scheduler() .threaded_scheduler()
.core_threads(1) .core_threads(1)
.enable_all() .enable_all()
.build() .build()
.unwrap(), .unwrap()
) })
} .handle()
self.runtime.as_ref().unwrap().handle().clone() .clone()
} }
fn sync_media_inner(&mut self, input: pb::SyncAuth) -> Result<()> { fn sync_abort_handle(
&self,
) -> BackendResult<(
scopeguard::ScopeGuard<AbortHandleSlot, impl FnOnce(AbortHandleSlot)>,
AbortRegistration,
)> {
let (abort_handle, abort_reg) = AbortHandle::new_pair();
// Register the new abort_handle.
let old_handle = self.sync_abort.lock().unwrap().replace(abort_handle);
if old_handle.is_some() {
// NOTE: In the future we would ideally be able to handle multiple
// abort handles by just iterating over them all in
// abort_sync). But for now, just log a warning if there was
// already one present -- but don't abort it either.
let log = self.with_col(|col| Ok(col.log.clone()))?;
warn!(
log,
"new sync_abort handle registered, but old one was still present (old sync job might not be cancelled on abort)"
);
}
// Clear the abort handle after the caller is done and drops the guard.
let guard = scopeguard::guard(Arc::clone(&self.sync_abort), |sync_abort| {
sync_abort.lock().unwrap().take();
});
Ok((guard, abort_reg))
}
fn sync_media_inner(&self, input: pb::SyncAuth) -> Result<()> {
// mark media sync as active // mark media sync as active
let (abort_handle, abort_reg) = AbortHandle::new_pair(); let (abort_handle, abort_reg) = AbortHandle::new_pair();
{ {
@ -1421,7 +1493,7 @@ impl Backend {
} }
/// Abort the media sync. Won't return until aborted. /// Abort the media sync. Won't return until aborted.
fn abort_media_sync_and_wait(&mut self) { fn abort_media_sync_and_wait(&self) {
let guard = self.state.lock().unwrap(); let guard = self.state.lock().unwrap();
if let Some(handle) = &guard.media_sync_abort { if let Some(handle) = &guard.media_sync_abort {
handle.abort(); handle.abort();
@ -1436,9 +1508,8 @@ impl Backend {
} }
} }
fn sync_login_inner(&mut self, input: pb::SyncLoginIn) -> BackendResult<pb::SyncAuth> { fn sync_login_inner(&self, input: pb::SyncLoginIn) -> BackendResult<pb::SyncAuth> {
let (abort_handle, abort_reg) = AbortHandle::new_pair(); let (_guard, abort_reg) = self.sync_abort_handle()?;
self.sync_abort = Some(abort_handle);
let rt = self.runtime_handle(); let rt = self.runtime_handle();
let sync_fut = sync_login(&input.username, &input.password); let sync_fut = sync_login(&input.username, &input.password);
@ -1447,14 +1518,13 @@ impl Backend {
Ok(sync_result) => sync_result, Ok(sync_result) => sync_result,
Err(_) => Err(AnkiError::Interrupted), Err(_) => Err(AnkiError::Interrupted),
}; };
self.sync_abort = None;
ret.map(|a| pb::SyncAuth { ret.map(|a| pb::SyncAuth {
hkey: a.hkey, hkey: a.hkey,
host_number: a.host_number, host_number: a.host_number,
}) })
} }
fn sync_status_inner(&mut self, input: pb::SyncAuth) -> BackendResult<pb::SyncStatusOut> { fn sync_status_inner(&self, input: pb::SyncAuth) -> BackendResult<pb::SyncStatusOut> {
// any local changes mean we can skip the network round-trip // any local changes mean we can skip the network round-trip
let req = self.with_col(|col| col.get_local_sync_status())?; let req = self.with_col(|col| col.get_local_sync_status())?;
if req != pb::sync_status_out::Required::NoChanges { if req != pb::sync_status_out::Required::NoChanges {
@ -1483,12 +1553,8 @@ impl Backend {
Ok(response.into()) Ok(response.into())
} }
fn sync_collection_inner( fn sync_collection_inner(&self, input: pb::SyncAuth) -> BackendResult<pb::SyncCollectionOut> {
&mut self, let (_guard, abort_reg) = self.sync_abort_handle()?;
input: pb::SyncAuth,
) -> BackendResult<pb::SyncCollectionOut> {
let (abort_handle, abort_reg) = AbortHandle::new_pair();
self.sync_abort = Some(abort_handle);
let rt = self.runtime_handle(); let rt = self.runtime_handle();
let input_copy = input.clone(); let input_copy = input.clone();
@ -1516,7 +1582,6 @@ impl Backend {
} }
} }
}); });
self.sync_abort = None;
let output: SyncOutput = ret?; let output: SyncOutput = ret?;
self.state self.state
@ -1527,7 +1592,7 @@ impl Backend {
Ok(output.into()) Ok(output.into())
} }
fn full_sync_inner(&mut self, input: pb::SyncAuth, upload: bool) -> Result<()> { fn full_sync_inner(&self, input: pb::SyncAuth, upload: bool) -> Result<()> {
self.abort_media_sync_and_wait(); self.abort_media_sync_and_wait();
let rt = self.runtime_handle(); let rt = self.runtime_handle();
@ -1539,8 +1604,7 @@ impl Backend {
let col_inner = col.take().unwrap(); let col_inner = col.take().unwrap();
let (abort_handle, abort_reg) = AbortHandle::new_pair(); let (_guard, abort_reg) = self.sync_abort_handle()?;
self.sync_abort = Some(abort_handle);
let col_path = col_inner.col_path.clone(); let col_path = col_inner.col_path.clone();
let media_folder_path = col_inner.media_folder.clone(); let media_folder_path = col_inner.media_folder.clone();
@ -1561,7 +1625,6 @@ impl Backend {
let abortable_sync = Abortable::new(sync_fut, abort_reg); let abortable_sync = Abortable::new(sync_fut, abort_reg);
rt.block_on(abortable_sync) rt.block_on(abortable_sync)
}; };
self.sync_abort = None;
// ensure re-opened regardless of outcome // ensure re-opened regardless of outcome
col.replace(open_collection( col.replace(open_collection(

View file

@ -6,8 +6,8 @@ use crate::define_newtype;
use crate::err::{AnkiError, Result}; use crate::err::{AnkiError, Result};
use crate::notes::NoteID; use crate::notes::NoteID;
use crate::{ use crate::{
collection::Collection, config::SchedulerVersion, deckconf::INITIAL_EASE_FACTOR, collection::Collection, config::SchedulerVersion, timestamp::TimestampSecs, types::Usn,
timestamp::TimestampSecs, types::Usn, undo::Undoable, undo::Undoable,
}; };
use num_enum::TryFromPrimitive; use num_enum::TryFromPrimitive;
use serde_repr::{Deserialize_repr, Serialize_repr}; use serde_repr::{Deserialize_repr, Serialize_repr};
@ -102,82 +102,10 @@ impl Card {
self.usn = usn; self.usn = usn;
} }
pub(crate) fn return_home(&mut self, sched: SchedulerVersion) { /// Caller must ensure provided deck exists and is not filtered.
if self.original_deck_id.0 == 0 { fn set_deck(&mut self, deck: DeckID, sched: SchedulerVersion) {
// not in a filtered deck self.remove_from_filtered_deck_restoring_queue(sched);
return; self.deck_id = deck;
}
self.deck_id = self.original_deck_id;
self.original_deck_id.0 = 0;
if self.original_due > 0 {
self.due = self.original_due;
}
self.original_due = 0;
self.queue = match sched {
SchedulerVersion::V1 => {
match self.ctype {
CardType::New => CardQueue::New,
CardType::Learn => CardQueue::New,
CardType::Review => CardQueue::Review,
// not applicable in v1, should not happen
CardType::Relearn => {
println!("did not expect relearn type in v1 for card {}", self.id);
CardQueue::New
}
}
}
SchedulerVersion::V2 => {
if (self.queue as i8) >= 0 {
match self.ctype {
CardType::Learn | CardType::Relearn => {
if self.due > 1_000_000_000 {
// unix timestamp
CardQueue::Learn
} else {
// day number
CardQueue::DayLearn
}
}
CardType::New => CardQueue::New,
CardType::Review => CardQueue::Review,
}
} else {
self.queue
}
}
};
if sched == SchedulerVersion::V1 && self.ctype == CardType::Learn {
self.ctype = CardType::New;
}
}
/// Remove the card from the (re)learning queue.
/// This will reset cards in learning.
/// Only used in the V1 scheduler.
/// Unlike the legacy Python code, this sets the due# to 0 instead of
/// one past the previous max due number.
pub(crate) fn remove_from_learning(&mut self) {
if !matches!(self.queue, CardQueue::Learn | CardQueue::DayLearn) {
return;
}
if self.ctype == CardType::Review {
// reviews are removed from relearning
self.due = self.original_due;
self.original_due = 0;
self.queue = CardQueue::Review;
} else {
// other cards are reset to new
self.ctype = CardType::New;
self.queue = CardQueue::New;
self.interval = 0;
self.due = 0;
self.original_due = 0;
self.ease_factor = INITIAL_EASE_FACTOR;
}
} }
} }
#[derive(Debug)] #[derive(Debug)]
@ -268,6 +196,27 @@ impl Collection {
Ok(()) Ok(())
} }
pub fn set_deck(&mut self, cards: &[CardID], deck_id: DeckID) -> Result<()> {
let deck = self.get_deck(deck_id)?.ok_or(AnkiError::NotFound)?;
if deck.is_filtered() {
return Err(AnkiError::DeckIsFiltered);
}
self.storage.set_search_table_to_card_ids(cards)?;
let sched = self.sched_ver();
let usn = self.usn()?;
self.transact(None, |col| {
for mut card in col.storage.all_searched_cards()? {
if card.deck_id == deck_id {
continue;
}
let original = card.clone();
card.set_deck(deck_id, sched);
col.update_card(&mut card, &original, usn)?;
}
Ok(())
})
}
} }
#[cfg(test)] #[cfg(test)]

View file

@ -179,6 +179,10 @@ impl Collection {
self.set_config(ConfigKey::CurrentNoteTypeID, &id) self.set_config(ConfigKey::CurrentNoteTypeID, &id)
} }
pub(crate) fn get_next_card_position(&self) -> u32 {
self.get_config_default(ConfigKey::NextNewCardPosition)
}
pub(crate) fn get_and_update_next_card_position(&self) -> Result<u32> { pub(crate) fn get_and_update_next_card_position(&self) -> Result<u32> {
let pos: u32 = self let pos: u32 = self
.get_config_optional(ConfigKey::NextNewCardPosition) .get_config_optional(ConfigKey::NextNewCardPosition)

View file

@ -7,7 +7,6 @@ pub use crate::backend_proto::{
DeckCommon, DeckKind as DeckKindProto, FilteredDeck, FilteredSearchTerm, NormalDeck, DeckCommon, DeckKind as DeckKindProto, FilteredDeck, FilteredSearchTerm, NormalDeck,
}; };
use crate::{ use crate::{
card::CardID,
collection::Collection, collection::Collection,
deckconf::DeckConfID, deckconf::DeckConfID,
define_newtype, define_newtype,
@ -51,25 +50,6 @@ impl Deck {
} }
} }
pub fn new_filtered() -> Deck {
let mut filt = FilteredDeck::default();
filt.search_terms.push(FilteredSearchTerm {
search: "".into(),
limit: 100,
order: 0,
});
filt.preview_delay = 10;
filt.reschedule = true;
Deck {
id: DeckID(0),
name: "".into(),
mtime_secs: TimestampSecs(0),
usn: Usn(0),
common: DeckCommon::default(),
kind: DeckKind::Filtered(filt),
}
}
fn reset_stats_if_day_changed(&mut self, today: u32) { fn reset_stats_if_day_changed(&mut self, today: u32) {
let c = &mut self.common; let c = &mut self.common;
if c.last_day_studied != today { if c.last_day_studied != today {
@ -80,12 +60,6 @@ impl Deck {
c.last_day_studied = today; c.last_day_studied = today;
} }
} }
}
impl Deck {
pub(crate) fn is_filtered(&self) -> bool {
matches!(self.kind, DeckKind::Filtered(_))
}
/// Returns deck config ID if deck is a normal deck. /// Returns deck config ID if deck is a normal deck.
pub(crate) fn config_id(&self) -> Option<DeckConfID> { pub(crate) fn config_id(&self) -> Option<DeckConfID> {
@ -434,23 +408,6 @@ impl Collection {
self.remove_cards_and_orphaned_notes(&cids) self.remove_cards_and_orphaned_notes(&cids)
} }
fn return_all_cards_in_filtered_deck(&mut self, did: DeckID) -> Result<()> {
let cids = self.storage.all_cards_in_single_deck(did)?;
self.return_cards_to_home_deck(&cids)
}
fn return_cards_to_home_deck(&mut self, cids: &[CardID]) -> Result<()> {
let sched = self.sched_ver();
for cid in cids {
if let Some(mut card) = self.storage.get_card(*cid)? {
// fixme: undo
card.return_home(sched);
self.storage.update_card(&card)?;
}
}
Ok(())
}
pub fn get_all_deck_names(&self, skip_empty_default: bool) -> Result<Vec<(DeckID, String)>> { pub fn get_all_deck_names(&self, skip_empty_default: bool) -> Result<Vec<(DeckID, String)>> {
if skip_empty_default && self.default_deck_is_empty()? { if skip_empty_default && self.default_deck_is_empty()? {
Ok(self Ok(self

268
rslib/src/filtered.rs Normal file
View file

@ -0,0 +1,268 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub use crate::backend_proto::{
deck_kind::Kind as DeckKind, filtered_search_term::FilteredSearchOrder, Deck as DeckProto,
DeckCommon, DeckKind as DeckKindProto, FilteredDeck, FilteredSearchTerm, NormalDeck,
};
use crate::decks::{Deck, DeckID};
use crate::{
card::{Card, CardID, CardQueue, CardType},
collection::Collection,
config::SchedulerVersion,
err::Result,
prelude::AnkiError,
search::SortMode,
timestamp::TimestampSecs,
types::Usn,
};
impl Card {
pub(crate) fn move_into_filtered_deck(&mut self, ctx: &DeckFilterContext, position: i32) {
// filtered and v1 learning cards are excluded, so odue should be guaranteed to be zero
if self.original_due != 0 {
println!("bug: odue was set");
return;
}
self.original_deck_id = self.deck_id;
self.deck_id = ctx.target_deck;
self.original_due = self.due;
if ctx.scheduler == SchedulerVersion::V1 {
if self.ctype == CardType::Review && self.due <= ctx.today as i32 {
// review cards that are due are left in the review queue
} else {
// new + non-due go into new queue
self.queue = CardQueue::New;
}
if self.due != 0 {
self.due = position;
}
} else {
// if rescheduling is disabled, all cards go in the review queue
if !ctx.config.reschedule {
self.queue = CardQueue::Review;
}
// fixme: can we unify this with v1 scheduler in the future?
// https://anki.tenderapp.com/discussions/ankidesktop/35978-rebuilding-filtered-deck-on-experimental-v2-empties-deck-and-reschedules-to-the-year-1745
if self.due > 0 {
self.due = position;
}
}
}
/// Restores to the original deck and clears original_due.
/// This does not update the queue or type, so should only be used as
/// part of an operation that adjusts those separately.
pub(crate) fn remove_from_filtered_deck_before_reschedule(&mut self) {
if self.original_deck_id.0 != 0 {
self.deck_id = self.original_deck_id;
self.original_deck_id.0 = 0;
self.original_due = 0;
}
}
pub(crate) fn remove_from_filtered_deck_restoring_queue(&mut self, sched: SchedulerVersion) {
if self.original_deck_id.0 == 0 {
// not in a filtered deck
return;
}
self.deck_id = self.original_deck_id;
self.original_deck_id.0 = 0;
match sched {
SchedulerVersion::V1 => {
self.due = self.original_due;
self.queue = match self.ctype {
CardType::New => CardQueue::New,
CardType::Learn => CardQueue::New,
CardType::Review => CardQueue::Review,
// not applicable in v1, should not happen
CardType::Relearn => {
println!("did not expect relearn type in v1 for card {}", self.id);
CardQueue::New
}
};
if self.ctype == CardType::Learn {
self.ctype = CardType::New;
}
}
SchedulerVersion::V2 => {
// original_due is cleared if card answered in filtered deck
if self.original_due > 0 {
self.due = self.original_due;
}
if (self.queue as i8) >= 0 {
self.queue = match self.ctype {
CardType::Learn | CardType::Relearn => {
if self.due > 1_000_000_000 {
// unix timestamp
CardQueue::Learn
} else {
// day number
CardQueue::DayLearn
}
}
CardType::New => CardQueue::New,
CardType::Review => CardQueue::Review,
}
}
}
}
self.original_due = 0;
}
}
impl Deck {
pub fn new_filtered() -> Deck {
let mut filt = FilteredDeck::default();
filt.search_terms.push(FilteredSearchTerm {
search: "".into(),
limit: 100,
order: 0,
});
filt.preview_delay = 10;
filt.reschedule = true;
Deck {
id: DeckID(0),
name: "".into(),
mtime_secs: TimestampSecs(0),
usn: Usn(0),
common: DeckCommon::default(),
kind: DeckKind::Filtered(filt),
}
}
pub(crate) fn is_filtered(&self) -> bool {
matches!(self.kind, DeckKind::Filtered(_))
}
}
pub(crate) struct DeckFilterContext<'a> {
pub target_deck: DeckID,
pub config: &'a FilteredDeck,
pub scheduler: SchedulerVersion,
pub usn: Usn,
pub today: u32,
}
impl Collection {
pub fn empty_filtered_deck(&mut self, did: DeckID) -> Result<()> {
self.transact(None, |col| col.return_all_cards_in_filtered_deck(did))
}
pub(super) fn return_all_cards_in_filtered_deck(&mut self, did: DeckID) -> Result<()> {
let cids = self.storage.all_cards_in_single_deck(did)?;
self.return_cards_to_home_deck(&cids)
}
// Unlike the old Python code, this also marks the cards as modified.
fn return_cards_to_home_deck(&mut self, cids: &[CardID]) -> Result<()> {
let sched = self.sched_ver();
let usn = self.usn()?;
for cid in cids {
if let Some(mut card) = self.storage.get_card(*cid)? {
let original = card.clone();
card.remove_from_filtered_deck_restoring_queue(sched);
self.update_card(&mut card, &original, usn)?;
}
}
Ok(())
}
// Unlike the old Python code, this also marks the cards as modified.
pub fn rebuild_filtered_deck(&mut self, did: DeckID) -> Result<u32> {
let deck = self.get_deck(did)?.ok_or(AnkiError::NotFound)?;
let config = if let DeckKind::Filtered(kind) = &deck.kind {
kind
} else {
return Err(AnkiError::invalid_input("not filtered"));
};
let ctx = DeckFilterContext {
target_deck: did,
config,
scheduler: self.sched_ver(),
usn: self.usn()?,
today: self.timing_today()?.days_elapsed,
};
self.transact(None, |col| {
col.return_all_cards_in_filtered_deck(did)?;
col.build_filtered_deck(ctx)
})
}
fn build_filtered_deck(&mut self, ctx: DeckFilterContext) -> Result<u32> {
let start = -100_000;
let mut position = start;
for term in &ctx.config.search_terms {
position = self.move_cards_matching_term(&ctx, term, position)?;
}
Ok((position - start) as u32)
}
/// Move matching cards into filtered deck.
/// Returns the new starting position.
fn move_cards_matching_term(
&mut self,
ctx: &DeckFilterContext,
term: &FilteredSearchTerm,
mut position: i32,
) -> Result<i32> {
let search = format!(
"{} -is:suspended -is:buried -deck:filtered {}",
if term.search.trim().is_empty() {
"".to_string()
} else {
format!("({})", term.search)
},
if ctx.scheduler == SchedulerVersion::V1 {
"-is:learn"
} else {
""
}
);
let order = order_and_limit_for_search(term, ctx.today);
self.search_cards_into_table(&search, SortMode::Custom(order))?;
for mut card in self.storage.all_searched_cards()? {
let original = card.clone();
card.move_into_filtered_deck(ctx, position);
self.update_card(&mut card, &original, ctx.usn)?;
position += 1;
}
Ok(position)
}
}
fn order_and_limit_for_search(term: &FilteredSearchTerm, today: u32) -> String {
let temp_string;
let order = match term.order() {
FilteredSearchOrder::OldestFirst => "(select max(id) from revlog where cid=c.id)",
FilteredSearchOrder::Random => "random()",
FilteredSearchOrder::IntervalsAscending => "ivl",
FilteredSearchOrder::IntervalsDescending => "ivl desc",
FilteredSearchOrder::Lapses => "lapses desc",
FilteredSearchOrder::Added => "n.id",
FilteredSearchOrder::ReverseAdded => "n.id desc",
FilteredSearchOrder::Due => "c.due, c.ord",
FilteredSearchOrder::DuePriority => {
temp_string = format!(
"
(case when queue={rev_queue} and due <= {today}
then (ivl / cast({today}-due+0.001 as real)) else 100000+due end)",
rev_queue = CardQueue::Review as i8,
today = today
);
&temp_string
}
};
format!("{} limit {}", order, term.limit)
}

View file

@ -13,6 +13,7 @@ pub mod dbcheck;
pub mod deckconf; pub mod deckconf;
pub mod decks; pub mod decks;
pub mod err; pub mod err;
pub mod filtered;
pub mod findreplace; pub mod findreplace;
pub mod i18n; pub mod i18n;
pub mod latex; pub mod latex;

View file

@ -455,7 +455,7 @@ fn normalize_and_maybe_rename_files<'a>(
} }
// normalize fname into NFC // normalize fname into NFC
let mut fname = normalize_to_nfc(media_ref.fname); let mut fname = normalize_to_nfc(&media_ref.fname_decoded);
// and look it up to see if it's been renamed // and look it up to see if it's been renamed
if let Some(new_name) = renamed.get(fname.as_ref()) { if let Some(new_name) = renamed.get(fname.as_ref()) {
fname = new_name.to_owned().into(); fname = new_name.to_owned().into();
@ -486,7 +486,13 @@ fn normalize_and_maybe_rename_files<'a>(
} }
fn rename_media_ref_in_field(field: &str, media_ref: &MediaRef, new_name: &str) -> String { fn rename_media_ref_in_field(field: &str, media_ref: &MediaRef, new_name: &str) -> String {
let updated_tag = media_ref.full_ref.replace(media_ref.fname, new_name); let new_name = if matches!(media_ref.fname_decoded, Cow::Owned(_)) {
// filename had quoted characters like &amp; - need to re-encode
htmlescape::encode_minimal(new_name)
} else {
new_name.into()
};
let updated_tag = media_ref.full_ref.replace(media_ref.fname, &new_name);
field.replace(media_ref.full_ref, &updated_tag) field.replace(media_ref.full_ref, &updated_tag)
} }
@ -522,6 +528,7 @@ pub(crate) mod test {
pub(crate) const MEDIACHECK_ANKI2: &[u8] = pub(crate) const MEDIACHECK_ANKI2: &[u8] =
include_bytes!("../../tests/support/mediacheck.anki2"); include_bytes!("../../tests/support/mediacheck.anki2");
use super::normalize_and_maybe_rename_files;
use crate::collection::{open_collection, Collection}; use crate::collection::{open_collection, Collection};
use crate::err::Result; use crate::err::Result;
use crate::i18n::I18n; use crate::i18n::I18n;
@ -530,7 +537,7 @@ pub(crate) mod test {
use crate::media::files::trash_folder; use crate::media::files::trash_folder;
use crate::media::MediaManager; use crate::media::MediaManager;
use std::path::Path; use std::path::Path;
use std::{fs, io}; use std::{collections::HashMap, fs, io};
use tempfile::{tempdir, TempDir}; use tempfile::{tempdir, TempDir};
fn common_setup() -> Result<(TempDir, MediaManager, Collection)> { fn common_setup() -> Result<(TempDir, MediaManager, Collection)> {
@ -730,4 +737,12 @@ Unused: unused.jpg
Ok(()) Ok(())
} }
#[test]
fn html_encoding() {
let field = "[sound:a &amp; b.mp3]";
let mut seen = Default::default();
normalize_and_maybe_rename_files(field, &HashMap::new(), &mut seen, Path::new("/tmp"));
assert!(seen.contains("a & b.mp3"));
}
} }

View file

@ -9,6 +9,7 @@ use crate::{
collection::Collection, collection::Collection,
err::Result, err::Result,
sched::cutoff::local_minutes_west_for_stamp, sched::cutoff::local_minutes_west_for_stamp,
timestamp::TimestampSecs,
}; };
impl Collection { impl Collection {
@ -79,6 +80,10 @@ impl Collection {
self.set_creation_mins_west(None)?; self.set_creation_mins_west(None)?;
} }
if s.scheduler_version != 1 {
self.set_local_mins_west(local_minutes_west_for_stamp(TimestampSecs::now().0))?;
}
// fixme: currently scheduler change unhandled // fixme: currently scheduler change unhandled
Ok(()) Ok(())
} }

View file

@ -42,6 +42,7 @@ pub enum RevlogReviewKind {
Review = 1, Review = 1,
Relearning = 2, Relearning = 2,
EarlyReview = 3, EarlyReview = 3,
Manual = 4,
} }
impl Default for RevlogReviewKind { impl Default for RevlogReviewKind {
@ -59,3 +60,40 @@ impl RevlogEntry {
}) as u32 }) as u32
} }
} }
impl Card {
fn last_interval_for_revlog_todo(&self) -> i32 {
self.interval as i32
// fixme: need to pass in delays for (re)learning
// if let Some(delay) = self.current_learning_delay_seconds(&[]) {
// -(delay as i32)
// } else {
// self.interval as i32
// }
}
}
impl Collection {
pub(crate) fn log_manually_scheduled_review(
&mut self,
card: &Card,
usn: Usn,
next_interval: u32,
) -> Result<()> {
println!("fixme: learning last_interval");
// let deck = self.get_deck(card.deck_id)?.ok_or(AnkiError::NotFound)?;
let entry = RevlogEntry {
id: TimestampMillis::now(),
cid: card.id,
usn,
button_chosen: 0,
interval: next_interval as i32,
last_interval: card.last_interval_for_revlog_todo(),
ease_factor: card.ease_factor as u32,
taken_millis: 0,
review_kind: RevlogReviewKind::Manual,
};
self.storage.add_revlog_entry(&entry)
}
}

View file

@ -7,6 +7,7 @@ use crate::{
collection::Collection, collection::Collection,
config::SchedulerVersion, config::SchedulerVersion,
err::Result, err::Result,
search::SortMode,
}; };
use super::cutoff::SchedTimingToday; use super::cutoff::SchedTimingToday;
@ -59,12 +60,12 @@ impl Collection {
/// Unbury cards from the previous day. /// Unbury cards from the previous day.
/// Done automatically, and does not mark the cards as modified. /// Done automatically, and does not mark the cards as modified.
fn unbury_on_day_rollover(&mut self) -> Result<()> { fn unbury_on_day_rollover(&mut self) -> Result<()> {
self.search_cards_into_table("is:buried")?; self.search_cards_into_table("is:buried", SortMode::NoOrder)?;
self.storage.for_each_card_in_search(|mut card| { self.storage.for_each_card_in_search(|mut card| {
card.restore_queue_after_bury_or_suspend(); card.restore_queue_after_bury_or_suspend();
self.storage.update_card(&card) self.storage.update_card(&card)
})?; })?;
self.clear_searched_cards() self.storage.clear_searched_cards_table()
} }
/// Unsuspend/unbury cards in search table, and clear it. /// Unsuspend/unbury cards in search table, and clear it.
@ -77,12 +78,12 @@ impl Collection {
self.update_card(&mut card, &original, usn)?; self.update_card(&mut card, &original, usn)?;
} }
} }
self.clear_searched_cards() self.storage.clear_searched_cards_table()
} }
pub fn unbury_or_unsuspend_cards(&mut self, cids: &[CardID]) -> Result<()> { pub fn unbury_or_unsuspend_cards(&mut self, cids: &[CardID]) -> Result<()> {
self.transact(None, |col| { self.transact(None, |col| {
col.set_search_table_to_card_ids(cids)?; col.storage.set_search_table_to_card_ids(cids)?;
col.unsuspend_or_unbury_searched_cards() col.unsuspend_or_unbury_searched_cards()
}) })
} }
@ -94,7 +95,7 @@ impl Collection {
UnburyDeckMode::SchedOnly => "is:buried-sibling", UnburyDeckMode::SchedOnly => "is:buried-sibling",
}; };
self.transact(None, |col| { self.transact(None, |col| {
col.search_cards_into_table(&format!("deck:current {}", search))?; col.search_cards_into_table(&format!("deck:current {}", search), SortMode::NoOrder)?;
col.unsuspend_or_unbury_searched_cards() col.unsuspend_or_unbury_searched_cards()
}) })
} }
@ -125,7 +126,7 @@ impl Collection {
}; };
if card.queue != desired_queue { if card.queue != desired_queue {
if sched == SchedulerVersion::V1 { if sched == SchedulerVersion::V1 {
card.return_home(sched); card.remove_from_filtered_deck_restoring_queue(sched);
card.remove_from_learning(); card.remove_from_learning();
} }
card.queue = desired_queue; card.queue = desired_queue;
@ -133,7 +134,7 @@ impl Collection {
} }
} }
self.clear_searched_cards() self.storage.clear_searched_cards_table()
} }
pub fn bury_or_suspend_cards( pub fn bury_or_suspend_cards(
@ -142,7 +143,7 @@ impl Collection {
mode: pb::bury_or_suspend_cards_in::Mode, mode: pb::bury_or_suspend_cards_in::Mode,
) -> Result<()> { ) -> Result<()> {
self.transact(None, |col| { self.transact(None, |col| {
col.set_search_table_to_card_ids(cids)?; col.storage.set_search_table_to_card_ids(cids)?;
col.bury_or_suspend_searched_cards(mode) col.bury_or_suspend_searched_cards(mode)
}) })
} }

View file

@ -0,0 +1,58 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{
card::{Card, CardQueue, CardType},
deckconf::INITIAL_EASE_FACTOR,
};
impl Card {
/// Remove the card from the (re)learning queue.
/// This will reset cards in learning.
/// Only used in the V1 scheduler.
/// Unlike the legacy Python code, this sets the due# to 0 instead of
/// one past the previous max due number.
pub(crate) fn remove_from_learning(&mut self) {
if !matches!(self.queue, CardQueue::Learn | CardQueue::DayLearn) {
return;
}
if self.ctype == CardType::Review {
// reviews are removed from relearning
self.due = self.original_due;
self.original_due = 0;
self.queue = CardQueue::Review;
} else {
// other cards are reset to new
self.ctype = CardType::New;
self.queue = CardQueue::New;
self.interval = 0;
self.due = 0;
self.original_due = 0;
self.ease_factor = INITIAL_EASE_FACTOR;
}
}
fn all_remaining_steps(&self) -> u32 {
self.remaining_steps % 1000
}
#[allow(dead_code)]
fn remaining_steps_today(&self) -> u32 {
self.remaining_steps / 1000
}
#[allow(dead_code)]
pub(crate) fn current_learning_delay_seconds(&self, delays: &[u32]) -> Option<u32> {
if self.queue == CardQueue::Learn {
let remaining = self.all_remaining_steps();
delays
.iter()
.nth_back(remaining.saturating_sub(0) as usize)
.or(Some(&0))
.map(|n| n * 60)
} else {
None
}
}
}

View file

@ -8,6 +8,9 @@ use crate::{
pub mod bury_and_suspend; pub mod bury_and_suspend;
pub(crate) mod congrats; pub(crate) mod congrats;
pub mod cutoff; pub mod cutoff;
mod learning;
pub mod new;
mod reviews;
pub mod timespan; pub mod timespan;
use chrono::FixedOffset; use chrono::FixedOffset;

143
rslib/src/sched/new.rs Normal file
View file

@ -0,0 +1,143 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{
card::{Card, CardID, CardQueue, CardType},
collection::Collection,
deckconf::INITIAL_EASE_FACTOR,
decks::DeckID,
err::Result,
notes::NoteID,
search::SortMode,
types::Usn,
};
use rand::seq::SliceRandom;
use std::collections::{HashMap, HashSet};
impl Card {
fn schedule_as_new(&mut self, position: u32) {
self.remove_from_filtered_deck_before_reschedule();
self.due = position as i32;
self.ctype = CardType::New;
self.queue = CardQueue::New;
self.interval = 0;
if self.ease_factor == 0 {
// unlike the old Python code, we leave the ease factor alone
// if it's already set
self.ease_factor = INITIAL_EASE_FACTOR;
}
}
/// If the card is new, change its position.
fn set_new_position(&mut self, position: u32) {
if self.queue != CardQueue::New || self.ctype != CardType::New {
return;
}
self.due = position as i32;
}
}
pub(crate) struct NewCardSorter {
position: HashMap<NoteID, u32>,
}
impl NewCardSorter {
pub(crate) fn new(cards: &[Card], starting_from: u32, step: u32, random: bool) -> Self {
let nids: HashSet<_> = cards.iter().map(|c| c.note_id).collect();
let mut nids: Vec<_> = nids.into_iter().collect();
if random {
nids.shuffle(&mut rand::thread_rng());
} else {
nids.sort_unstable();
}
NewCardSorter {
position: nids
.into_iter()
.enumerate()
.map(|(i, nid)| (nid, ((i as u32) * step) + starting_from))
.collect(),
}
}
pub(crate) fn position(&self, card: &Card) -> u32 {
self.position
.get(&card.note_id)
.cloned()
.unwrap_or_default()
}
}
impl Collection {
pub fn reschedule_cards_as_new(&mut self, cids: &[CardID]) -> Result<()> {
let usn = self.usn()?;
let mut position = self.get_next_card_position();
self.transact(None, |col| {
col.storage.set_search_table_to_card_ids(cids)?;
let cards = col.storage.all_searched_cards()?;
for mut card in cards {
let original = card.clone();
col.log_manually_scheduled_review(&card, usn, 0)?;
card.schedule_as_new(position);
col.update_card(&mut card, &original, usn)?;
position += 1;
}
col.set_next_card_position(position)?;
col.storage.clear_searched_cards_table()?;
Ok(())
})
}
pub fn sort_cards(
&mut self,
cids: &[CardID],
starting_from: u32,
step: u32,
random: bool,
shift: bool,
) -> Result<()> {
let usn = self.usn()?;
self.transact(None, |col| {
col.sort_cards_inner(cids, starting_from, step, random, shift, usn)
})
}
fn sort_cards_inner(
&mut self,
cids: &[CardID],
starting_from: u32,
step: u32,
random: bool,
shift: bool,
usn: Usn,
) -> Result<()> {
if shift {
self.shift_existing_cards(starting_from, step * cids.len() as u32, usn)?;
}
self.storage.set_search_table_to_card_ids(cids)?;
let cards = self.storage.all_searched_cards()?;
let sorter = NewCardSorter::new(&cards, starting_from, step, random);
for mut card in cards {
let original = card.clone();
card.set_new_position(sorter.position(&card));
self.update_card(&mut card, &original, usn)?;
}
self.storage.clear_searched_cards_table()
}
/// This creates a transaction - we probably want to split it out
/// in the future if calling it as part of a deck options update.
pub fn sort_deck(&mut self, deck: DeckID, random: bool) -> Result<()> {
let cids = self.search_cards(&format!("did:{}", deck), SortMode::NoOrder)?;
self.sort_cards(&cids, 1, 1, random, false)
}
fn shift_existing_cards(&mut self, start: u32, by: u32, usn: Usn) -> Result<()> {
self.storage.search_cards_at_or_above_position(start)?;
for mut card in self.storage.all_searched_cards()? {
let original = card.clone();
card.set_new_position(card.due as u32 + by);
self.update_card(&mut card, &original, usn)?;
}
Ok(())
}
}

View file

@ -0,0 +1,51 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{
card::{Card, CardID, CardQueue, CardType},
collection::Collection,
deckconf::INITIAL_EASE_FACTOR,
err::Result,
};
use rand::distributions::{Distribution, Uniform};
impl Card {
fn schedule_as_review(&mut self, interval: u32, today: u32) {
self.remove_from_filtered_deck_before_reschedule();
self.interval = interval.max(1);
self.due = (today + interval) as i32;
self.ctype = CardType::Review;
self.queue = CardQueue::Review;
if self.ease_factor == 0 {
// unlike the old Python code, we leave the ease factor alone
// if it's already set
self.ease_factor = INITIAL_EASE_FACTOR;
}
}
}
impl Collection {
pub fn reschedule_cards_as_reviews(
&mut self,
cids: &[CardID],
min_days: u32,
max_days: u32,
) -> Result<()> {
let usn = self.usn()?;
let today = self.timing_today()?.days_elapsed;
let mut rng = rand::thread_rng();
let distribution = Uniform::from(min_days..=max_days);
self.transact(None, |col| {
col.storage.set_search_table_to_card_ids(cids)?;
for mut card in col.storage.all_searched_cards()? {
let original = card.clone();
let interval = distribution.sample(&mut rng);
col.log_manually_scheduled_review(&card, usn, interval.max(1))?;
card.schedule_as_review(interval, today);
col.update_card(&mut card, &original, usn)?;
}
col.storage.clear_searched_cards_table()?;
Ok(())
})
}
}

View file

@ -41,21 +41,6 @@ pub fn time_span(seconds: f32, i18n: &I18n, precise: bool) -> String {
i18n.trn(key, args) i18n.trn(key, args)
} }
// fixme: this doesn't belong here
pub fn studied_today(cards: usize, secs: f32, i18n: &I18n) -> String {
let span = Timespan::from_secs(secs).natural_span();
let amount = span.as_unit();
let unit = span.unit().as_str();
let secs_per = if cards > 0 {
secs / (cards as f32)
} else {
0.0
};
let args = tr_args!["amount" => amount, "unit" => unit,
"cards" => cards, "secs-per-card" => secs_per];
i18n.trn(TR::StatisticsStudiedToday, args)
}
const SECOND: f32 = 1.0; const SECOND: f32 = 1.0;
const MINUTE: f32 = 60.0 * SECOND; const MINUTE: f32 = 60.0 * SECOND;
const HOUR: f32 = 60.0 * MINUTE; const HOUR: f32 = 60.0 * MINUTE;
@ -64,7 +49,7 @@ const MONTH: f32 = 30.0 * DAY;
const YEAR: f32 = 12.0 * MONTH; const YEAR: f32 = 12.0 * MONTH;
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
enum TimespanUnit { pub(crate) enum TimespanUnit {
Seconds, Seconds,
Minutes, Minutes,
Hours, Hours,
@ -74,7 +59,7 @@ enum TimespanUnit {
} }
impl TimespanUnit { impl TimespanUnit {
fn as_str(self) -> &'static str { pub fn as_str(self) -> &'static str {
match self { match self {
TimespanUnit::Seconds => "seconds", TimespanUnit::Seconds => "seconds",
TimespanUnit::Minutes => "minutes", TimespanUnit::Minutes => "minutes",
@ -87,13 +72,13 @@ impl TimespanUnit {
} }
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
struct Timespan { pub(crate) struct Timespan {
seconds: f32, seconds: f32,
unit: TimespanUnit, unit: TimespanUnit,
} }
impl Timespan { impl Timespan {
fn from_secs(seconds: f32) -> Self { pub fn from_secs(seconds: f32) -> Self {
Timespan { Timespan {
seconds, seconds,
unit: TimespanUnit::Seconds, unit: TimespanUnit::Seconds,
@ -102,7 +87,7 @@ impl Timespan {
/// Return the value as the configured unit, eg seconds=70/unit=Minutes /// Return the value as the configured unit, eg seconds=70/unit=Minutes
/// returns 1.17 /// returns 1.17
fn as_unit(self) -> f32 { pub fn as_unit(self) -> f32 {
let s = self.seconds; let s = self.seconds;
match self.unit { match self.unit {
TimespanUnit::Seconds => s, TimespanUnit::Seconds => s,
@ -116,7 +101,7 @@ impl Timespan {
/// Round seconds and days to integers, otherwise /// Round seconds and days to integers, otherwise
/// truncates to one decimal place. /// truncates to one decimal place.
fn as_rounded_unit(self) -> f32 { pub fn as_rounded_unit(self) -> f32 {
match self.unit { match self.unit {
// seconds/days as integer // seconds/days as integer
TimespanUnit::Seconds | TimespanUnit::Days => self.as_unit().round(), TimespanUnit::Seconds | TimespanUnit::Days => self.as_unit().round(),
@ -125,13 +110,13 @@ impl Timespan {
} }
} }
fn unit(self) -> TimespanUnit { pub fn unit(self) -> TimespanUnit {
self.unit self.unit
} }
/// Return a new timespan in the most appropriate unit, eg /// Return a new timespan in the most appropriate unit, eg
/// 70 secs -> timespan in minutes /// 70 secs -> timespan in minutes
fn natural_span(self) -> Timespan { pub fn natural_span(self) -> Timespan {
let secs = self.seconds.abs(); let secs = self.seconds.abs();
let unit = if secs < MINUTE { let unit = if secs < MINUTE {
TimespanUnit::Seconds TimespanUnit::Seconds
@ -158,7 +143,7 @@ impl Timespan {
mod test { mod test {
use crate::i18n::I18n; use crate::i18n::I18n;
use crate::log; use crate::log;
use crate::sched::timespan::{answer_button_time, studied_today, time_span, MONTH}; use crate::sched::timespan::{answer_button_time, time_span, MONTH};
#[test] #[test]
fn answer_buttons() { fn answer_buttons() {
@ -180,15 +165,4 @@ mod test {
assert_eq!(time_span(45.0 * 86_400.0, &i18n, false), "1.5 months"); assert_eq!(time_span(45.0 * 86_400.0, &i18n, false), "1.5 months");
assert_eq!(time_span(365.0 * 86_400.0 * 1.5, &i18n, false), "1.5 years"); assert_eq!(time_span(365.0 * 86_400.0 * 1.5, &i18n, false), "1.5 years");
} }
#[test]
fn combo() {
// temporary test of fluent term handling
let log = log::terminal();
let i18n = I18n::new(&["zz"], "", log);
assert_eq!(
&studied_today(3, 13.0, &i18n).replace("\n", " "),
"Studied 3 cards in 13 seconds today (4.33s/card)"
);
}
} }

View file

@ -9,7 +9,6 @@ use crate::{
card::CardID, card::CardType, collection::Collection, config::SortKind, err::Result, card::CardID, card::CardType, collection::Collection, config::SortKind, err::Result,
search::parser::parse, search::parser::parse,
}; };
use rusqlite::NO_PARAMS;
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub enum SortMode { pub enum SortMode {
@ -63,20 +62,7 @@ impl Collection {
let writer = SqlWriter::new(self); let writer = SqlWriter::new(self);
let (mut sql, args) = writer.build_cards_query(&top_node, mode.required_table())?; let (mut sql, args) = writer.build_cards_query(&top_node, mode.required_table())?;
self.add_order(&mut sql, mode)?;
match mode {
SortMode::NoOrder => (),
SortMode::FromConfig => unreachable!(),
SortMode::Builtin { kind, reverse } => {
prepare_sort(self, kind)?;
sql.push_str(" order by ");
write_order(&mut sql, kind, reverse)?;
}
SortMode::Custom(order_clause) => {
sql.push_str(" order by ");
sql.push_str(&order_clause);
}
}
let mut stmt = self.storage.db.prepare(&sql)?; let mut stmt = self.storage.db.prepare(&sql)?;
let ids: Vec<_> = stmt let ids: Vec<_> = stmt
@ -86,16 +72,33 @@ impl Collection {
Ok(ids) Ok(ids)
} }
fn add_order(&mut self, sql: &mut String, mode: SortMode) -> Result<()> {
match mode {
SortMode::NoOrder => (),
SortMode::FromConfig => unreachable!(),
SortMode::Builtin { kind, reverse } => {
prepare_sort(self, kind)?;
sql.push_str(" order by ");
write_order(sql, kind, reverse)?;
}
SortMode::Custom(order_clause) => {
sql.push_str(" order by ");
sql.push_str(&order_clause);
}
}
Ok(())
}
/// Place the matched card ids into a temporary 'search_cids' table /// Place the matched card ids into a temporary 'search_cids' table
/// instead of returning them. Use clear_searched_cards() to remove it. /// instead of returning them. Use clear_searched_cards() to remove it.
pub(crate) fn search_cards_into_table(&mut self, search: &str) -> Result<()> { pub(crate) fn search_cards_into_table(&mut self, search: &str, mode: SortMode) -> Result<()> {
let top_node = Node::Group(parse(search)?); let top_node = Node::Group(parse(search)?);
let writer = SqlWriter::new(self); let writer = SqlWriter::new(self);
let (sql, args) = writer.build_cards_query(&top_node, RequiredTable::Cards)?; let (mut sql, args) = writer.build_cards_query(&top_node, mode.required_table())?;
self.storage self.add_order(&mut sql, mode)?;
.db
.execute_batch(include_str!("search_cids_setup.sql"))?; self.storage.setup_searched_cards_table()?;
let sql = format!("insert into search_cids {}", sql); let sql = format!("insert into search_cids {}", sql);
self.storage.db.prepare(&sql)?.execute(&args)?; self.storage.db.prepare(&sql)?.execute(&args)?;
@ -103,31 +106,6 @@ impl Collection {
Ok(()) Ok(())
} }
/// Injects the provided card IDs into the search_cids table, for
/// when ids have arrived outside of a search.
/// Clear with clear_searched_cards().
pub(crate) fn set_search_table_to_card_ids(&mut self, cards: &[CardID]) -> Result<()> {
self.storage
.db
.execute_batch(include_str!("search_cids_setup.sql"))?;
let mut stmt = self
.storage
.db
.prepare_cached("insert into search_cids values (?)")?;
for cid in cards {
stmt.execute(&[cid])?;
}
Ok(())
}
pub(crate) fn clear_searched_cards(&self) -> Result<()> {
self.storage
.db
.execute("drop table if exists search_cids", NO_PARAMS)?;
Ok(())
}
/// If the sort mode is based on a config setting, look it up. /// If the sort mode is based on a config setting, look it up.
fn resolve_config_sort(&self, mode: &mut SortMode) { fn resolve_config_sort(&self, mode: &mut SortMode) {
if mode == &SortMode::FromConfig { if mode == &SortMode::FromConfig {

View file

@ -2,6 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{ use crate::{
decks::DeckID,
err::{AnkiError, Result}, err::{AnkiError, Result},
notetype::NoteTypeID, notetype::NoteTypeID,
}; };
@ -64,6 +65,7 @@ pub(super) enum SearchNode<'a> {
EditedInDays(u32), EditedInDays(u32),
CardTemplate(TemplateKind), CardTemplate(TemplateKind),
Deck(Cow<'a, str>), Deck(Cow<'a, str>),
DeckID(DeckID),
NoteTypeID(NoteTypeID), NoteTypeID(NoteTypeID),
NoteType(Cow<'a, str>), NoteType(Cow<'a, str>),
Rated { Rated {
@ -283,6 +285,7 @@ fn search_node_for_text_with_argument<'a>(
"mid" => SearchNode::NoteTypeID(val.parse()?), "mid" => SearchNode::NoteTypeID(val.parse()?),
"nid" => SearchNode::NoteIDs(check_id_list(val)?), "nid" => SearchNode::NoteIDs(check_id_list(val)?),
"cid" => SearchNode::CardIDs(check_id_list(val)?), "cid" => SearchNode::CardIDs(check_id_list(val)?),
"did" => SearchNode::DeckID(val.parse()?),
"card" => parse_template(val.as_ref()), "card" => parse_template(val.as_ref()),
"is" => parse_state(val.as_ref())?, "is" => parse_state(val.as_ref())?,
"flag" => parse_flag(val.as_ref())?, "flag" => parse_flag(val.as_ref())?,

View file

@ -140,6 +140,9 @@ impl SqlWriter<'_> {
SearchNode::NoteTypeID(ntid) => { SearchNode::NoteTypeID(ntid) => {
write!(self.sql, "n.mid = {}", ntid).unwrap(); write!(self.sql, "n.mid = {}", ntid).unwrap();
} }
SearchNode::DeckID(did) => {
write!(self.sql, "c.did = {}", did).unwrap();
}
SearchNode::NoteType(notetype) => self.write_note_type(&norm(notetype))?, SearchNode::NoteType(notetype) => self.write_note_type(&norm(notetype))?,
SearchNode::Rated { days, ease } => self.write_rated(*days, *ease)?, SearchNode::Rated { days, ease } => self.write_rated(*days, *ease)?,
SearchNode::Tag(tag) => self.write_tag(&norm(tag))?, SearchNode::Tag(tag) => self.write_tag(&norm(tag))?,
@ -505,6 +508,7 @@ impl SearchNode<'_> {
match self { match self {
SearchNode::AddedInDays(_) => RequiredTable::Cards, SearchNode::AddedInDays(_) => RequiredTable::Cards,
SearchNode::Deck(_) => RequiredTable::Cards, SearchNode::Deck(_) => RequiredTable::Cards,
SearchNode::DeckID(_) => RequiredTable::Cards,
SearchNode::Rated { .. } => RequiredTable::Cards, SearchNode::Rated { .. } => RequiredTable::Cards,
SearchNode::State(_) => RequiredTable::Cards, SearchNode::State(_) => RequiredTable::Cards,
SearchNode::Flag(_) => RequiredTable::Cards, SearchNode::Flag(_) => RequiredTable::Cards,

View file

@ -229,12 +229,14 @@ fn revlog_to_text(e: RevlogEntry, i18n: &I18n, offset: FixedOffset) -> RevlogTex
RevlogReviewKind::Review => i18n.tr(TR::CardStatsReviewLogTypeReview).into(), RevlogReviewKind::Review => i18n.tr(TR::CardStatsReviewLogTypeReview).into(),
RevlogReviewKind::Relearning => i18n.tr(TR::CardStatsReviewLogTypeRelearn).into(), RevlogReviewKind::Relearning => i18n.tr(TR::CardStatsReviewLogTypeRelearn).into(),
RevlogReviewKind::EarlyReview => i18n.tr(TR::CardStatsReviewLogTypeFiltered).into(), RevlogReviewKind::EarlyReview => i18n.tr(TR::CardStatsReviewLogTypeFiltered).into(),
RevlogReviewKind::Manual => i18n.tr(TR::CardStatsReviewLogTypeManual).into(),
}; };
let kind_class = match e.review_kind { let kind_class = match e.review_kind {
RevlogReviewKind::Learning => String::from("revlog-learn"), RevlogReviewKind::Learning => String::from("revlog-learn"),
RevlogReviewKind::Review => String::from("revlog-review"), RevlogReviewKind::Review => String::from("revlog-review"),
RevlogReviewKind::Relearning => String::from("revlog-relearn"), RevlogReviewKind::Relearning => String::from("revlog-relearn"),
RevlogReviewKind::EarlyReview => String::from("revlog-filtered"), RevlogReviewKind::EarlyReview => String::from("revlog-filtered"),
RevlogReviewKind::Manual => String::from("revlog-manual"),
}; };
let rating = e.button_chosen.to_string(); let rating = e.button_chosen.to_string();
let interval = if e.interval == 0 { let interval = if e.interval == 0 {

View file

@ -1,7 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{backend_proto as pb, prelude::*, revlog::RevlogEntry}; use crate::{backend_proto as pb, prelude::*, revlog::RevlogEntry, search::SortMode};
impl Collection { impl Collection {
pub(crate) fn graph_data_for_search( pub(crate) fn graph_data_for_search(
@ -9,7 +9,7 @@ impl Collection {
search: &str, search: &str,
days: u32, days: u32,
) -> Result<pb::GraphsOut> { ) -> Result<pb::GraphsOut> {
self.search_cards_into_table(search)?; self.search_cards_into_table(search, SortMode::NoOrder)?;
let all = search.trim().is_empty(); let all = search.trim().is_empty();
self.graph_data(all, days) self.graph_data(all, days)
} }
@ -33,7 +33,7 @@ impl Collection {
.get_revlog_entries_for_searched_cards(revlog_start)? .get_revlog_entries_for_searched_cards(revlog_start)?
}; };
self.clear_searched_cards()?; self.storage.clear_searched_cards_table()?;
Ok(pb::GraphsOut { Ok(pb::GraphsOut {
cards: cards.into_iter().map(Into::into).collect(), cards: cards.into_iter().map(Into::into).collect(),

View file

@ -3,3 +3,6 @@
mod card; mod card;
mod graphs; mod graphs;
mod today;
pub use today::studied_today;

45
rslib/src/stats/today.rs Normal file
View file

@ -0,0 +1,45 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{i18n::I18n, prelude::*, sched::timespan::Timespan};
pub fn studied_today(cards: u32, secs: f32, i18n: &I18n) -> String {
let span = Timespan::from_secs(secs).natural_span();
let amount = span.as_unit();
let unit = span.unit().as_str();
let secs_per = if cards > 0 {
secs / (cards as f32)
} else {
0.0
};
let args = tr_args!["amount" => amount, "unit" => unit,
"cards" => cards, "secs-per-card" => secs_per];
i18n.trn(TR::StatisticsStudiedToday, args)
}
impl Collection {
pub fn studied_today(&self) -> Result<String> {
let today = self
.storage
.studied_today(self.timing_today()?.next_day_at)?;
Ok(studied_today(today.cards, today.seconds as f32, &self.i18n))
}
}
#[cfg(test)]
mod test {
use super::studied_today;
use crate::i18n::I18n;
use crate::log;
#[test]
fn today() {
// temporary test of fluent term handling
let log = log::terminal();
let i18n = I18n::new(&["zz"], "", log);
assert_eq!(
&studied_today(3, 13.0, &i18n).replace("\n", " "),
"Studied 3 cards in 13 seconds today (4.33s/card)"
);
}
}

View file

@ -0,0 +1,5 @@
insert into search_cids
select id
from cards
where due >= ?
and type = ?

View file

@ -315,6 +315,41 @@ impl super::SqliteStorage {
.next() .next()
.unwrap() .unwrap()
} }
pub(crate) fn search_cards_at_or_above_position(&self, start: u32) -> Result<()> {
self.setup_searched_cards_table()?;
self.db
.prepare(include_str!("at_or_above_position.sql"))?
.execute(&[start, CardType::New as u32])?;
Ok(())
}
pub(crate) fn setup_searched_cards_table(&self) -> Result<()> {
self.db
.execute_batch(include_str!("search_cids_setup.sql"))?;
Ok(())
}
pub(crate) fn clear_searched_cards_table(&self) -> Result<()> {
self.db
.execute("drop table if exists search_cids", NO_PARAMS)?;
Ok(())
}
/// Injects the provided card IDs into the search_cids table, for
/// when ids have arrived outside of a search.
/// Clear with clear_searched_cards().
pub(crate) fn set_search_table_to_card_ids(&mut self, cards: &[CardID]) -> Result<()> {
self.setup_searched_cards_table()?;
let mut stmt = self
.db
.prepare_cached("insert into search_cids values (?)")?;
for cid in cards {
stmt.execute(&[cid])?;
}
Ok(())
}
} }
#[cfg(test)] #[cfg(test)]

View file

@ -10,5 +10,25 @@ insert
time, time,
type type
) )
values values (
(?, ?, ?, ?, ?, ?, ?, ?, ?) (
case
when ?1 in (
select id
from revlog
) then (
select max(id) + 1
from revlog
)
else ?1
end
),
?,
?,
?,
?,
?,
?,
?,
?
)

View file

@ -15,6 +15,11 @@ use rusqlite::{
}; };
use std::convert::TryFrom; use std::convert::TryFrom;
pub(crate) struct StudiedToday {
pub cards: u32,
pub seconds: f64,
}
impl FromSql for RevlogReviewKind { impl FromSql for RevlogReviewKind {
fn column_result(value: ValueRef<'_>) -> std::result::Result<Self, FromSqlError> { fn column_result(value: ValueRef<'_>) -> std::result::Result<Self, FromSqlError> {
if let ValueRef::Integer(i) = value { if let ValueRef::Integer(i) = value {
@ -113,4 +118,19 @@ impl SqliteStorage {
})? })?
.collect() .collect()
} }
pub(crate) fn studied_today(&self, day_cutoff: i64) -> Result<StudiedToday> {
let start = (day_cutoff - 86_400) * 1_000;
self.db
.prepare_cached(include_str!("studied_today.sql"))?
.query_map(&[start, RevlogReviewKind::Manual as i64], |row| {
Ok(StudiedToday {
cards: row.get(0)?,
seconds: row.get(1)?,
})
})?
.next()
.unwrap()
.map_err(Into::into)
}
} }

View file

@ -0,0 +1,5 @@
select count(),
coalesce(sum(time) / 1000.0, 0.0)
from revlog
where id > ?
and type != ?

View file

@ -142,28 +142,36 @@ pub fn extract_av_tags<'a>(text: &'a str, question_side: bool) -> (Cow<'a, str>,
pub(crate) struct MediaRef<'a> { pub(crate) struct MediaRef<'a> {
pub full_ref: &'a str, pub full_ref: &'a str,
pub fname: &'a str, pub fname: &'a str,
/// audio files may have things like &amp; that need decoding
pub fname_decoded: Cow<'a, str>,
} }
pub(crate) fn extract_media_refs(text: &str) -> Vec<MediaRef> { pub(crate) fn extract_media_refs(text: &str) -> Vec<MediaRef> {
let mut out = vec![]; let mut out = vec![];
for caps in IMG_TAG.captures_iter(text) { for caps in IMG_TAG.captures_iter(text) {
let fname = caps
.get(1)
.or_else(|| caps.get(2))
.or_else(|| caps.get(3))
.unwrap()
.as_str();
let fname_decoded = fname.into();
out.push(MediaRef { out.push(MediaRef {
full_ref: caps.get(0).unwrap().as_str(), full_ref: caps.get(0).unwrap().as_str(),
fname: caps fname,
.get(1) fname_decoded,
.or_else(|| caps.get(2))
.or_else(|| caps.get(3))
.unwrap()
.as_str(),
}); });
} }
for caps in AV_TAGS.captures_iter(text) { for caps in AV_TAGS.captures_iter(text) {
if let Some(m) = caps.get(1) { if let Some(m) = caps.get(1) {
let fname = m.as_str();
let fname_decoded = decode_entities(fname);
out.push(MediaRef { out.push(MediaRef {
full_ref: caps.get(0).unwrap().as_str(), full_ref: caps.get(0).unwrap().as_str(),
fname: m.as_str(), fname,
fname_decoded,
}); });
} }
} }

View file

@ -15,6 +15,10 @@ impl TimestampSecs {
Self(elapsed().as_secs() as i64) Self(elapsed().as_secs() as i64)
} }
pub fn zero() -> Self {
Self(0)
}
pub fn elapsed_secs(self) -> u64 { pub fn elapsed_secs(self) -> u64 {
(Self::now().0 - self.0).max(0) as u64 (Self::now().0 - self.0).max(0) as u64
} }
@ -30,6 +34,10 @@ impl TimestampMillis {
Self(elapsed().as_millis() as i64) Self(elapsed().as_millis() as i64)
} }
pub fn zero() -> Self {
Self(0)
}
pub fn as_secs(self) -> TimestampSecs { pub fn as_secs(self) -> TimestampSecs {
TimestampSecs(self.0 / 1000) TimestampSecs(self.0 / 1000)
} }

View file

@ -11,7 +11,7 @@ readme = "README.md"
anki = { path = "../rslib" } anki = { path = "../rslib" }
[dependencies.pyo3] [dependencies.pyo3]
version = "0.8.0" version = "0.11.0"
features = ["extension-module"] features = ["extension-module"]
[lib] [lib]

View file

@ -1 +1 @@
nightly-2020-06-25 stable

View file

@ -61,10 +61,10 @@ fn want_release_gil(method: u32) -> bool {
#[pymethods] #[pymethods]
impl Backend { impl Backend {
fn command(&mut self, py: Python, method: u32, input: &PyBytes) -> PyResult<PyObject> { fn command(&self, py: Python, method: u32, input: &PyBytes) -> PyResult<PyObject> {
let in_bytes = input.as_bytes(); let in_bytes = input.as_bytes();
if want_release_gil(method) { if want_release_gil(method) {
py.allow_threads(move || self.backend.run_command_bytes(method, in_bytes)) py.allow_threads(|| self.backend.run_command_bytes(method, in_bytes))
} else { } else {
self.backend.run_command_bytes(method, in_bytes) self.backend.run_command_bytes(method, in_bytes)
} }
@ -77,9 +77,9 @@ impl Backend {
/// This takes and returns JSON, due to Python's slow protobuf /// This takes and returns JSON, due to Python's slow protobuf
/// encoding/decoding. /// encoding/decoding.
fn db_command(&mut self, py: Python, input: &PyBytes) -> PyResult<PyObject> { fn db_command(&self, py: Python, input: &PyBytes) -> PyResult<PyObject> {
let in_bytes = input.as_bytes(); let in_bytes = input.as_bytes();
let out_res = py.allow_threads(move || { let out_res = py.allow_threads(|| {
self.backend self.backend
.run_db_command_bytes(in_bytes) .run_db_command_bytes(in_bytes)
.map_err(BackendError::py_err) .map_err(BackendError::py_err)

View file

@ -59,9 +59,11 @@
</p> </p>
{/if} {/if}
<p> {#if !info.isFilteredDeck}
{@html customStudyMsg} <p>
</p> {@html customStudyMsg}
</p>
{/if}
{/if} {/if}
</div> </div>
</div> </div>

View file

@ -47,6 +47,10 @@ export function gatherData(data: pb.BackendProto.GraphsOut): GraphData {
const empty = { mature: 0, young: 0, learn: 0, relearn: 0, early: 0 }; const empty = { mature: 0, young: 0, learn: 0, relearn: 0, early: 0 };
for (const review of data.revlog as pb.BackendProto.RevlogEntry[]) { for (const review of data.revlog as pb.BackendProto.RevlogEntry[]) {
if (review.reviewKind == ReviewKind.MANUAL) {
// don't count days with only manual scheduling
continue;
}
const day = Math.ceil( const day = Math.ceil(
((review.id as number) / 1000 - data.nextDayAtSecs) / 86400 ((review.id as number) / 1000 - data.nextDayAtSecs) / 86400
); );

View file

@ -1,7 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import pb from "../backend/proto"; import pb, { BackendProto } from "../backend/proto";
import { studiedToday } from "../time"; import { studiedToday } from "../time";
import { I18n } from "../i18n"; import { I18n } from "../i18n";
@ -30,6 +30,10 @@ export function gatherData(data: pb.BackendProto.GraphsOut, i18n: I18n): TodayDa
continue; continue;
} }
if (review.reviewKind == ReviewKind.MANUAL) {
continue;
}
// total // total
answerCount += 1; answerCount += 1;
answerMillis += review.takenMillis; answerMillis += review.takenMillis;