Merge branch 'main' into ally-fix-border-ratio-too-low

This commit is contained in:
GithubAnon0000 2025-08-30 20:36:01 +00:00 committed by GitHub
commit 54b1ca1074
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
106 changed files with 4354 additions and 4078 deletions

View file

@ -1 +1 @@
25.08b1
25.08b5

View file

@ -234,6 +234,11 @@ Emmanuel Ferdman <https://github.com/emmanuel-ferdman>
Sunong2008 <https://github.com/Sunrongguo2008>
Marvin Kopf <marvinkopf@outlook.com>
Kevin Nakamura <grinkers@grinkers.net>
Bradley Szoke <bradleyszoke@gmail.com>
jcznk <https://github.com/jcznk>
Thomas Rixen <thomas.rixen@student.uclouvain.be>
Siyuan Mattuwu Yan <syan4@ualberta.ca>
Lee Doughty <https://github.com/leedoughty>
********************

52
Cargo.lock generated
View file

@ -130,7 +130,8 @@ dependencies = [
"prost",
"prost-reflect",
"pulldown-cmark 0.13.0",
"rand 0.9.1",
"rand 0.9.2",
"rayon",
"regex",
"reqwest 0.12.20",
"rusqlite",
@ -143,7 +144,7 @@ dependencies = [
"serde_tuple",
"sha1",
"snafu",
"strum 0.27.1",
"strum 0.27.2",
"syn 2.0.103",
"tempfile",
"tokio",
@ -219,7 +220,7 @@ dependencies = [
"prost-types",
"serde",
"snafu",
"strum 0.27.1",
"strum 0.27.2",
]
[[package]]
@ -705,7 +706,7 @@ dependencies = [
"log",
"num-traits",
"portable-atomic-util",
"rand 0.9.1",
"rand 0.9.2",
"rmp-serde",
"serde",
"serde_json",
@ -731,7 +732,7 @@ dependencies = [
"hashbrown 0.15.4",
"log",
"num-traits",
"rand 0.9.1",
"rand 0.9.2",
"serde",
"spin 0.10.0",
"text_placeholder",
@ -761,12 +762,12 @@ dependencies = [
"csv",
"derive-new 0.7.0",
"dirs 6.0.0",
"rand 0.9.1",
"rand 0.9.2",
"rmp-serde",
"sanitize-filename 0.6.0",
"serde",
"serde_json",
"strum 0.27.1",
"strum 0.27.2",
"tempfile",
"thiserror 2.0.12",
]
@ -816,7 +817,7 @@ dependencies = [
"num-traits",
"paste",
"portable-atomic-util",
"rand 0.9.1",
"rand 0.9.2",
"seq-macro",
"spin 0.10.0",
]
@ -864,7 +865,7 @@ dependencies = [
"half",
"hashbrown 0.15.4",
"num-traits",
"rand 0.9.1",
"rand 0.9.2",
"rand_distr",
"serde",
"serde_bytes",
@ -958,7 +959,7 @@ dependencies = [
"memmap2",
"num-traits",
"num_cpus",
"rand 0.9.1",
"rand 0.9.2",
"rand_distr",
"rayon",
"safetensors",
@ -1402,7 +1403,7 @@ dependencies = [
"log",
"num-traits",
"portable-atomic",
"rand 0.9.1",
"rand 0.9.2",
"sanitize-filename 0.5.0",
"serde",
"serde_json",
@ -2213,20 +2214,20 @@ dependencies = [
[[package]]
name = "fsrs"
version = "4.1.1"
version = "5.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1f3a8c3df2c324ebab71461178fe8c1fe2d7373cf603f312b652befd026f06d"
checksum = "04954cc67c3c11ee342a2ee1f5222bf76d73f7772df08d37dc9a6cdd73c467eb"
dependencies = [
"burn",
"itertools 0.14.0",
"log",
"ndarray",
"priority-queue",
"rand 0.9.1",
"rand 0.9.2",
"rayon",
"serde",
"snafu",
"strum 0.27.1",
"strum 0.27.2",
]
[[package]]
@ -2803,7 +2804,7 @@ dependencies = [
"cfg-if",
"crunchy",
"num-traits",
"rand 0.9.1",
"rand 0.9.2",
"rand_distr",
"serde",
]
@ -3657,7 +3658,7 @@ dependencies = [
"linkcheck",
"regex",
"reqwest 0.12.20",
"strum 0.27.1",
"strum 0.27.2",
"tokio",
]
@ -3958,6 +3959,7 @@ dependencies = [
"anki_process",
"anyhow",
"camino",
"serde_json",
"walkdir",
"which",
]
@ -5095,7 +5097,7 @@ dependencies = [
"bytes",
"getrandom 0.3.3",
"lru-slab",
"rand 0.9.1",
"rand 0.9.2",
"ring",
"rustc-hash 2.1.1",
"rustls",
@ -5149,9 +5151,9 @@ dependencies = [
[[package]]
name = "rand"
version = "0.9.1"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
dependencies = [
"rand_chacha 0.9.0",
"rand_core 0.9.3",
@ -5202,7 +5204,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8615d50dcf34fa31f7ab52692afec947c4dd0ab803cc87cb3b0b4570ff7463"
dependencies = [
"num-traits",
"rand 0.9.1",
"rand 0.9.2",
]
[[package]]
@ -5973,9 +5975,9 @@ checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
[[package]]
name = "slab"
version = "0.4.10"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04dc19736151f35336d325007ac991178d504a119863a2fcb3758cdb5e52c50d"
checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589"
[[package]]
name = "slotmap"
@ -6112,9 +6114,9 @@ dependencies = [
[[package]]
name = "strum"
version = "0.27.1"
version = "0.27.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32"
checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
dependencies = [
"strum_macros 0.27.1",
]

View file

@ -33,9 +33,8 @@ git = "https://github.com/ankitects/linkcheck.git"
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
[workspace.dependencies.fsrs]
version = "4.1.1"
version = "5.1.0"
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
# rev = "a7f7efc10f0a26b14ee348cc7402155685f2a24f"
# path = "../open-spaced-repetition/fsrs-rs"
[workspace.dependencies]
@ -110,6 +109,7 @@ prost-types = "0.13"
pulldown-cmark = "0.13.0"
pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] }
rand = "0.9.1"
rayon = "1.10.0"
regex = "1.11.1"
reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] }
rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] }
@ -141,7 +141,7 @@ walkdir = "2.5.0"
which = "8.0.0"
widestring = "1.1.0"
winapi = { version = "0.3", features = ["wincon", "winreg"] }
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_Foundation", "Win32_UI_Shell"] }
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_Foundation", "Win32_UI_Shell", "Wdk_System_SystemServices"] }
wiremock = "0.6.3"
xz2 = "0.1.7"
zip = { version = "4.1.0", default-features = false, features = ["deflate", "time"] }

View file

@ -169,7 +169,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
pub fn check_rust(build: &mut Build) -> Result<()> {
let inputs = inputs![
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**}"),
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**,tools/minilints/**}"),
"Cargo.lock",
"Cargo.toml",
"rust-toolchain.toml",

View file

@ -32,10 +32,19 @@ pub fn setup_pyenv(args: PyenvArgs) {
}
}
let mut command = Command::new(args.uv_bin);
// remove UV_* environment variables to avoid interference
for (key, _) in std::env::vars() {
if key.starts_with("UV_") || key == "VIRTUAL_ENV" {
command.env_remove(key);
}
}
run_command(
Command::new(args.uv_bin)
command
.env("UV_PROJECT_ENVIRONMENT", args.pyenv_folder.clone())
.args(["sync", "--locked"])
.args(["sync", "--locked", "--no-config"])
.args(args.extra_args),
);

File diff suppressed because it is too large Load diff

@ -1 +1 @@
Subproject commit 3d04bcbf7fefca0007bc9db307409d88210995d8
Subproject commit a599715d3c27ff2eb895c749f3534ab73d83dad1

View file

@ -5,6 +5,11 @@ database-check-card-properties =
[one] Fixed { $count } invalid card property.
*[other] Fixed { $count } invalid card properties.
}
database-check-card-last-review-time-empty =
{ $count ->
[one] Added last review time to { $count } card.
*[other] Added last review time to { $count } cards.
}
database-check-missing-templates =
{ $count ->
[one] Deleted { $count } card with missing template.

View file

@ -505,7 +505,9 @@ deck-config-desired-retention-below-optimal = Your desired retention is below op
# Description of the y axis in the FSRS simulation
# diagram (Deck options -> FSRS) showing the total number of
# cards that can be recalled or retrieved on a specific date.
deck-config-fsrs-simulator-experimental = FSRS simulator (experimental)
deck-config-fsrs-simulator-experimental = FSRS Simulator (Experimental)
deck-config-fsrs-simulate-desired-retention-experimental = FSRS Desired Retention Simulator (Experimental)
deck-config-fsrs-desired-retention-help-me-decide-experimental = Help Me Decide (Experimental)
deck-config-additional-new-cards-to-simulate = Additional new cards to simulate
deck-config-simulate = Simulate
deck-config-clear-last-simulate = Clear Last Simulation
@ -519,6 +521,9 @@ deck-config-save-options-to-preset-confirm = Overwrite the options in your curre
# to show the total number of cards that can be recalled or retrieved on a
# specific date.
deck-config-fsrs-simulator-radio-memorized = Memorized
deck-config-fsrs-simulator-radio-ratio = Time / Memorized Ratio
# $time here is pre-formatted e.g. "10 Seconds"
deck-config-fsrs-simulator-ratio-tooltip = { $time } per memorized card
## Messages related to the FSRS schedulers health check. The health check determines whether the correlation between FSRS predictions and your memory is good or bad. It can be optionally triggered as part of the "Optimize" function.
@ -528,7 +533,7 @@ deck-config-health-check = Check health when optimizing
deck-config-fsrs-bad-fit-warning = Health Check:
Your memory is difficult for FSRS to predict. Recommendations:
- Suspend or reformulate leeches.
- Suspend or reformulate any cards you constantly forget.
- Use the answer buttons consistently. Keep in mind that "Hard" is a passing grade, not a failing grade.
- Understand before you memorize.
@ -539,6 +544,7 @@ deck-config-fsrs-good-fit = Health Check:
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
deck-config-plotted-on-x-axis = (Plotted on the X-axis)
deck-config-a-100-day-interval =
{ $days ->
[one] A 100 day interval will become { $days } day.

View file

@ -34,7 +34,7 @@ preferences-when-adding-default-to-current-deck = When adding, default to curren
preferences-you-can-restore-backups-via-fileswitch = You can restore backups via File > Switch Profile.
preferences-legacy-timezone-handling = Legacy timezone handling (buggy, but required for AnkiDroid <= 2.14)
preferences-default-search-text = Default search text
preferences-default-search-text-example = eg. 'deck:current '
preferences-default-search-text-example = e.g. "deck:current"
preferences-theme = Theme
preferences-theme-follow-system = Follow System
preferences-theme-light = Light

View file

@ -80,7 +80,7 @@ statistics-reviews =
# This fragment of the tooltip in the FSRS simulation
# diagram (Deck options -> FSRS) shows the total number of
# cards that can be recalled or retrieved on a specific date.
statistics-memorized = {$memorized} memorized
statistics-memorized = {$memorized} cards memorized
statistics-today-title = Today
statistics-today-again-count = Again count:
statistics-today-type-counts = Learn: { $learnCount }, Review: { $reviewCount }, Relearn: { $relearnCount }, Filtered: { $filteredCount }
@ -99,9 +99,9 @@ statistics-counts-relearning-cards = Relearning
statistics-counts-title = Card Counts
statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards
## Retention rate represents your actual retention rate from past reviews, in
## Retention represents your actual retention from past reviews, in
## comparison to the "desired retention" setting of FSRS, which forecasts
## future retention. Retention rate is the percentage of all reviewed cards
## future retention. Retention is the percentage of all reviewed cards
## that were marked as "Hard," "Good," or "Easy" within a specific time period.
##
## Most of these strings are used as column / row headings in a table.
@ -112,9 +112,9 @@ statistics-counts-separate-suspended-buried-cards = Separate suspended/buried ca
## N.B. Stats cards may be very small on mobile devices and when the Stats
## window is certain sizes.
statistics-true-retention-title = Retention rate
statistics-true-retention-title = Retention
statistics-true-retention-subtitle = Pass rate of cards with an interval ≥ 1 day.
statistics-true-retention-tooltip = If you are using FSRS, your retention rate is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data.
statistics-true-retention-tooltip = If you are using FSRS, your retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data.
statistics-true-retention-range = Range
statistics-true-retention-pass = Pass
statistics-true-retention-fail = Fail

@ -1 +1 @@
Subproject commit c65a9587b1f18931986bdf145872e8e4c44c5c82
Subproject commit bb4207f3b8e9a7c428db282d12c75b850be532f3

View file

@ -40,12 +40,10 @@ message DeckConfigId {
message GetRetentionWorkloadRequest {
repeated float w = 1;
string search = 2;
float before = 3;
float after = 4;
}
message GetRetentionWorkloadResponse {
float factor = 1;
map<uint32, float> costs = 1;
}
message GetIgnoredBeforeCountRequest {
@ -219,6 +217,8 @@ message DeckConfigsForUpdate {
bool review_today_active = 5;
// Whether new_today applies to today or a past day.
bool new_today_active = 6;
// Deck-specific desired retention override
optional float desired_retention = 7;
}
string name = 1;
int64 config_id = 2;

View file

@ -83,6 +83,8 @@ message Deck {
optional uint32 new_limit = 7;
DayLimit review_limit_today = 8;
DayLimit new_limit_today = 9;
// Deck-specific desired retention override
optional float desired_retention = 10;
reserved 12 to 15;
}

View file

@ -55,6 +55,8 @@ service SchedulerService {
returns (ComputeOptimalRetentionResponse);
rpc SimulateFsrsReview(SimulateFsrsReviewRequest)
returns (SimulateFsrsReviewResponse);
rpc SimulateFsrsWorkload(SimulateFsrsReviewRequest)
returns (SimulateFsrsWorkloadResponse);
rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse);
rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest)
returns (EvaluateParamsResponse);
@ -404,6 +406,9 @@ message SimulateFsrsReviewRequest {
repeated float easy_days_percentages = 10;
deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11;
optional uint32 suspend_after_lapse_count = 12;
float historical_retention = 13;
uint32 learning_step_count = 14;
uint32 relearning_step_count = 15;
}
message SimulateFsrsReviewResponse {
@ -413,6 +418,12 @@ message SimulateFsrsReviewResponse {
repeated float daily_time_cost = 4;
}
message SimulateFsrsWorkloadResponse {
map<uint32, float> cost = 1;
map<uint32, float> memorized = 2;
map<uint32, uint32> review_count = 3;
}
message ComputeOptimalRetentionResponse {
float optimal_retention = 1;
}

View file

@ -246,7 +246,7 @@ def backend_exception_to_pylib(err: backend_pb2.BackendError) -> Exception:
return BackendError(err.message, help_page, context, backtrace)
elif val == kind.SEARCH_ERROR:
return SearchError(markdown(err.message), help_page, context, backtrace)
return SearchError(err.message, help_page, context, backtrace)
elif val == kind.UNDO_EMPTY:
return UndoEmpty(err.message, help_page, context, backtrace)

View file

@ -133,6 +133,7 @@ class Card(DeprecatedNamesMixin):
memory_state=self.memory_state,
desired_retention=self.desired_retention,
decay=self.decay,
last_review_time_secs=self.last_review_time,
)
@deprecated(info="please use col.update_card()")

View file

@ -176,7 +176,7 @@ class MnemoFact:
try:
fact_view = self.cards[0].fact_view_id
except IndexError as err:
raise Exception(f"Fact {id} has no cards") from err
raise Exception(f"Fact {self.id} has no cards") from err
if fact_view.startswith("1.") or fact_view.startswith("1::"):
return FrontOnly
@ -187,7 +187,7 @@ class MnemoFact:
elif fact_view.startswith("5.1"):
return Cloze
raise Exception(f"Fact {id} has unknown fact view: {fact_view}")
raise Exception(f"Fact {self.id} has unknown fact view: {fact_view}")
def anki_fields(self, fact_view: type[MnemoFactView]) -> list[str]:
return [munge_field(self.fields.get(k, "")) for k in fact_view.field_keys]

View file

@ -73,7 +73,7 @@ langs = sorted(
("ଓଡ଼ିଆ", "or_OR"),
("Filipino", "tl"),
("ئۇيغۇر", "ug"),
("Oʻzbek", "uz_UZ"),
("Oʻzbekcha", "uz_UZ"),
]
)

View file

@ -7,7 +7,7 @@ dependencies = [
"decorator",
"markdown",
"orjson",
"protobuf>=4.21",
"protobuf>=6.0,<8.0",
"requests[socks]",
# remove after we update to min python 3.11+
"typing_extensions",

View file

@ -70,10 +70,10 @@ def show(mw: aqt.AnkiQt) -> QDialog:
abouttext += f"<p>{lede}"
abouttext += f"<p>{tr.about_anki_is_licensed_under_the_agpl3()}"
abouttext += f"<p>{tr.about_version(val=version_with_build())}<br>"
abouttext += ("Python %s Qt %s PyQt %s<br>") % (
abouttext += ("Python %s Qt %s Chromium %s<br>") % (
platform.python_version(),
qVersion(),
PYQT_VERSION_STR,
(qWebEngineChromiumVersion() or "").split(".")[0],
)
abouttext += (
without_unicode_isolation(tr.about_visit_website(val=aqt.appWebsite))
@ -225,6 +225,7 @@ def show(mw: aqt.AnkiQt) -> QDialog:
"Adnane Taghi",
"Anon_0000",
"Bilolbek Normuminov",
"Sagiv Marzini",
)
)

View file

@ -10,6 +10,8 @@ import re
from collections.abc import Callable, Sequence
from typing import Any, cast
from markdown import markdown
import aqt
import aqt.browser
import aqt.editor
@ -20,7 +22,7 @@ from anki.cards import Card, CardId
from anki.collection import Collection, Config, OpChanges, SearchNode
from anki.consts import *
from anki.decks import DeckId
from anki.errors import NotFoundError
from anki.errors import NotFoundError, SearchError
from anki.lang import without_unicode_isolation
from anki.models import NotetypeId
from anki.notes import NoteId
@ -498,6 +500,8 @@ class Browser(QMainWindow):
text = self.current_search()
try:
normed = self.col.build_search_string(text)
except SearchError as err:
showWarning(markdown(str(err)))
except Exception as err:
showWarning(str(err))
else:

View file

@ -51,6 +51,7 @@ class CardInfoDialog(QDialog):
def _setup_ui(self, card_id: CardId | None) -> None:
self.mw.garbage_collect_on_dialog_finish(self)
self.setMinimumSize(400, 300)
disable_help_button(self)
restoreGeom(self, self.GEOMETRY_KEY, default_size=(800, 800))
add_close_shortcut(self)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 727 B

View file

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="21" height="21" viewBox="0 0 21 21" version="1.1"
xmlns="http://www.w3.org/2000/svg">
<g id="Layer-1" transform="translate(0.5,0.5)">
<rect x="0" y="0" width="20" height="20" fill="none"/>
<g transform="translate(14.8974,6.3648)">
<path d="M0,0C0,3.403 -2.042,6.161 -4.56,6.161C-7.078,6.161 -9.12,3.403 -9.12,0C-9.12,-3.403 -7.078,-6.161 -4.56,-6.161C-2.042,-6.161 0,-3.403 0,0"
fill="black" fill-rule="nonzero"/>
</g>
<g transform="matrix(0,-1,-1,0,10.3374,1.8048)">
<ellipse cx="-4.56" cy="0" rx="6.161" ry="4.56"
fill="none" stroke="black" stroke-width="0.25"/>
</g>
<g transform="translate(3.1987,14.4958)">
<path d="M0,-9.484C-0.76,-4.212 3.287,0 7.12,-0.046C10.864,-0.09 14.742,-4.199 14.076,-9.343"
fill="none" stroke="black" stroke-width="2" fill-rule="nonzero"/>
</g>
<g transform="matrix(-1,0,0,1,20.573,18.613)">
<rect x="5.387" y="0.601" width="9.799" height="0.185"
fill="none" stroke="black" stroke-width="2"/>
</g>
<g transform="matrix(-1,0,0,1,20.741,13.51)">
<rect x="9.899" y="1.163" width="0.943" height="4.164"
fill="none" stroke="black" stroke-width="2"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

View file

@ -1292,9 +1292,10 @@
<tabstop>daily_backups</tabstop>
<tabstop>weekly_backups</tabstop>
<tabstop>monthly_backups</tabstop>
<tabstop>tabWidget</tabstop>
<tabstop>syncAnkiHubLogout</tabstop>
<tabstop>syncAnkiHubLogin</tabstop>
<tabstop>buttonBox</tabstop>
<tabstop>tabWidget</tabstop>
</tabstops>
<resources/>
<connections>

View file

@ -483,7 +483,7 @@ def update_deck_configs() -> bytes:
update.abort = True
def on_success(changes: OpChanges) -> None:
if isinstance(window := aqt.mw.app.activeWindow(), DeckOptionsDialog):
if isinstance(window := aqt.mw.app.activeModalWidget(), DeckOptionsDialog):
window.reject()
def handle_on_main() -> None:
@ -511,7 +511,7 @@ def set_scheduling_states() -> bytes:
def import_done() -> bytes:
def update_window_modality() -> None:
if window := aqt.mw.app.activeWindow():
if window := aqt.mw.app.activeModalWidget():
from aqt.import_export.import_dialog import ImportDialog
if isinstance(window, ImportDialog):
@ -529,7 +529,7 @@ def import_request(endpoint: str) -> bytes:
response.ParseFromString(output)
def handle_on_main() -> None:
window = aqt.mw.app.activeWindow()
window = aqt.mw.app.activeModalWidget()
on_op_finished(aqt.mw, response, window)
aqt.mw.taskman.run_on_main(handle_on_main)
@ -569,7 +569,7 @@ def change_notetype() -> bytes:
data = request.data
def handle_on_main() -> None:
window = aqt.mw.app.activeWindow()
window = aqt.mw.app.activeModalWidget()
if isinstance(window, ChangeNotetypeDialog):
window.save(data)
@ -579,7 +579,7 @@ def change_notetype() -> bytes:
def deck_options_require_close() -> bytes:
def handle_on_main() -> None:
window = aqt.mw.app.activeWindow()
window = aqt.mw.app.activeModalWidget()
if isinstance(window, DeckOptionsDialog):
window.require_close()
@ -591,7 +591,7 @@ def deck_options_require_close() -> bytes:
def deck_options_ready() -> bytes:
def handle_on_main() -> None:
window = aqt.mw.app.activeWindow()
window = aqt.mw.app.activeModalWidget()
if isinstance(window, DeckOptionsDialog):
window.set_ready()
@ -654,6 +654,7 @@ exposed_backend_list = [
"evaluate_params_legacy",
"get_optimal_retention_parameters",
"simulate_fsrs_review",
"simulate_fsrs_workload",
# DeckConfigService
"get_ignored_before_count",
"get_retention_workload",

View file

@ -124,17 +124,14 @@ def launcher_executable() -> str | None:
def trigger_launcher_run() -> None:
"""Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run."""
"""Create a trigger file to request launcher UI on next run."""
try:
root = launcher_root()
if not root:
return
pyproject_path = Path(root) / "pyproject.toml"
if pyproject_path.exists():
# Touch the file to update its mtime
pyproject_path.touch()
trigger_path = Path(root) / ".want-launcher"
trigger_path.touch()
except Exception as e:
print(e)
@ -150,6 +147,7 @@ def update_and_restart() -> None:
with contextlib.suppress(ResourceWarning):
env = os.environ.copy()
env["ANKI_LAUNCHER_WANT_TERMINAL"] = "1"
# fixes a bug where launcher fails to appear if opening it
# straight after updating
if "GNOME_TERMINAL_SCREEN" in env:
@ -159,12 +157,15 @@ def update_and_restart() -> None:
creationflags = (
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
)
# On Windows 10, changing the handles breaks ANSI display
io = None if sys.platform == "win32" else subprocess.DEVNULL
subprocess.Popen(
[launcher],
start_new_session=True,
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
stdin=io,
stdout=io,
stderr=io,
env=env,
creationflags=creationflags,
)

View file

@ -82,11 +82,14 @@ class Preferences(QDialog):
)
group = self.form.preferences_answer_keys
group.setLayout(layout := QFormLayout())
tab_widget: QWidget = self.form.url_schemes
for ease, label in ease_labels:
layout.addRow(
label,
line_edit := QLineEdit(self.mw.pm.get_answer_key(ease) or ""),
)
QWidget.setTabOrder(tab_widget, line_edit)
tab_widget = line_edit
qconnect(
line_edit.textChanged,
functools.partial(self.mw.pm.set_answer_key, ease),

View file

@ -633,7 +633,7 @@ class QtAudioInputRecorder(Recorder):
from PyQt6.QtMultimedia import QAudioFormat, QAudioSource # type: ignore
format = QAudioFormat()
format.setChannelCount(1)
format.setChannelCount(2)
format.setSampleRate(44100)
format.setSampleFormat(QAudioFormat.SampleFormat.Int16)
@ -743,7 +743,8 @@ class RecordDialog(QDialog):
def _setup_dialog(self) -> None:
self.setWindowTitle("Anki")
icon = QLabel()
icon.setPixmap(QPixmap("icons:media-record.png"))
qicon = QIcon("icons:media-record.svg")
icon.setPixmap(qicon.pixmap(60, 60))
self.label = QLabel("...")
hbox = QHBoxLayout()
hbox.addWidget(icon)

View file

@ -177,9 +177,13 @@ class CustomStyles:
QPushButton:default {{
border: 1px solid {tm.var(colors.BORDER_FOCUS)};
}}
QPushButton {{
margin: 1px;
}}
QPushButton:focus {{
border: 2px solid {tm.var(colors.BORDER_FOCUS)};
outline: none;
margin: 0px;
}}
QPushButton:hover,
QTabBar::tab:hover,

View file

@ -73,7 +73,7 @@ def handle_sync_error(mw: aqt.main.AnkiQt, err: Exception) -> None:
elif isinstance(err, Interrupted):
# no message to show
return
show_warning(str(err))
show_warning(str(err), parent=mw)
def on_normal_sync_timer(mw: aqt.main.AnkiQt) -> None:
@ -118,7 +118,7 @@ def sync_collection(mw: aqt.main.AnkiQt, on_done: Callable[[], None]) -> None:
if out.new_endpoint:
mw.pm.set_current_sync_url(out.new_endpoint)
if out.server_message:
showText(out.server_message)
showText(out.server_message, parent=mw)
if out.required == out.NO_CHANGES:
tooltip(parent=mw, msg=tr.sync_collection_complete())
# all done; track media progress

View file

@ -226,29 +226,45 @@ def ask_user_dialog(
)
def show_info(text: str, callback: Callable | None = None, **kwargs: Any) -> MessageBox:
def show_info(
text: str,
callback: Callable | None = None,
parent: QWidget | None = None,
**kwargs: Any,
) -> MessageBox:
"Show a small info window with an OK button."
if "icon" not in kwargs:
kwargs["icon"] = QMessageBox.Icon.Information
return MessageBox(
text,
callback=(lambda _: callback()) if callback is not None else None,
parent=parent,
**kwargs,
)
def show_warning(
text: str, callback: Callable | None = None, **kwargs: Any
text: str,
callback: Callable | None = None,
parent: QWidget | None = None,
**kwargs: Any,
) -> MessageBox:
"Show a small warning window with an OK button."
return show_info(text, icon=QMessageBox.Icon.Warning, callback=callback, **kwargs)
return show_info(
text, icon=QMessageBox.Icon.Warning, callback=callback, parent=parent, **kwargs
)
def show_critical(
text: str, callback: Callable | None = None, **kwargs: Any
text: str,
callback: Callable | None = None,
parent: QWidget | None = None,
**kwargs: Any,
) -> MessageBox:
"Show a small critical error window with an OK button."
return show_info(text, icon=QMessageBox.Icon.Critical, callback=callback, **kwargs)
return show_info(
text, icon=QMessageBox.Icon.Critical, callback=callback, parent=parent, **kwargs
)
def showWarning(

View file

@ -69,17 +69,14 @@ def add_python_requirements(reqs: list[str]) -> tuple[bool, str]:
def trigger_launcher_run() -> None:
"""Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run."""
"""Create a trigger file to request launcher UI on next run."""
try:
root = launcher_root()
if not root:
return
pyproject_path = Path(root) / "pyproject.toml"
if pyproject_path.exists():
# Touch the file to update its mtime
pyproject_path.touch()
trigger_path = Path(root) / ".want-launcher"
trigger_path.touch()
except Exception as e:
print(e)
@ -93,17 +90,21 @@ def update_and_restart() -> None:
with contextlib.suppress(ResourceWarning):
env = os.environ.copy()
env["ANKI_LAUNCHER_WANT_TERMINAL"] = "1"
creationflags = 0
if sys.platform == "win32":
creationflags = (
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
)
# On Windows, changing the handles breaks ANSI display
io = None if sys.platform == "win32" else subprocess.DEVNULL
subprocess.Popen(
[launcher],
start_new_session=True,
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
stdin=io,
stdout=io,
stderr=io,
env=env,
creationflags=creationflags,
)

View file

@ -13,7 +13,8 @@ HOST_ARCH=$(uname -m)
# Define output paths
OUTPUT_DIR="../../../out/launcher"
LAUNCHER_DIR="$OUTPUT_DIR/anki-linux"
ANKI_VERSION=$(cat ../../../.version | tr -d '\n')
LAUNCHER_DIR="$OUTPUT_DIR/anki-launcher-$ANKI_VERSION-linux"
# Clean existing output directory
rm -rf "$LAUNCHER_DIR"
@ -77,8 +78,8 @@ chmod +x \
chmod -R a+r "$LAUNCHER_DIR"
ZSTD="zstd -c --long -T0 -18"
TRANSFORM="s%^.%anki-linux%S"
TARBALL="$OUTPUT_DIR/anki-linux.tar.zst"
TRANSFORM="s%^.%anki-launcher-$ANKI_VERSION-linux%S"
TARBALL="$OUTPUT_DIR/anki-launcher-$ANKI_VERSION-linux.tar.zst"
tar -I "$ZSTD" --transform "$TRANSFORM" -cf "$TARBALL" -C "$LAUNCHER_DIR" .

View file

@ -5,7 +5,7 @@
<key>CFBundleDisplayName</key>
<string>Anki</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<string>ANKI_VERSION</string>
<key>LSMinimumSystemVersion</key>
<string>12</string>
<key>LSApplicationCategoryType</key>

View file

@ -31,25 +31,26 @@ lipo -create \
cp "$OUTPUT_DIR/uv" "$APP_LAUNCHER/Contents/MacOS/"
# Copy support files
cp Info.plist "$APP_LAUNCHER/Contents/"
ANKI_VERSION=$(cat ../../../.version | tr -d '\n')
sed "s/ANKI_VERSION/$ANKI_VERSION/g" Info.plist > "$APP_LAUNCHER/Contents/Info.plist"
cp icon/Assets.car "$APP_LAUNCHER/Contents/Resources/"
cp ../pyproject.toml "$APP_LAUNCHER/Contents/Resources/"
cp ../../../.python-version "$APP_LAUNCHER/Contents/Resources/"
cp ../versions.py "$APP_LAUNCHER/Contents/Resources/"
# Codesign
for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do
# Codesign/bundle
if [ -z "$NODMG" ]; then
for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do
codesign --force -vvvv -o runtime -s "Developer ID Application:" \
--entitlements entitlements.python.xml \
"$i"
done
done
# Check
codesign -vvv "$APP_LAUNCHER"
spctl -a "$APP_LAUNCHER"
# Check
codesign -vvv "$APP_LAUNCHER"
spctl -a "$APP_LAUNCHER"
# Notarize and bundle (skip if NODMG is set)
if [ -z "$NODMG" ]; then
# Notarize and build dmg
./notarize.sh "$OUTPUT_DIR"
./dmg/build.sh "$OUTPUT_DIR"
fi

View file

@ -6,7 +6,8 @@ set -e
# base folder with Anki.app in it
output="$1"
dist="$1/tmp"
dmg_path="$output/Anki.dmg"
ANKI_VERSION=$(cat ../../../.version | tr -d '\n')
dmg_path="$output/anki-launcher-$ANKI_VERSION-mac.dmg"
if [ -d "/Volumes/Anki" ]
then

View file

@ -22,6 +22,11 @@ const NSIS_PATH: &str = "C:\\Program Files (x86)\\NSIS\\makensis.exe";
fn main() -> Result<()> {
println!("Building Windows launcher...");
// Read version early so it can be used throughout the build process
let version = std::fs::read_to_string("../../../.version")?
.trim()
.to_string();
let output_dir = PathBuf::from(OUTPUT_DIR);
let launcher_exe_dir = PathBuf::from(LAUNCHER_EXE_DIR);
let nsis_dir = PathBuf::from(NSIS_DIR);
@ -31,16 +36,20 @@ fn main() -> Result<()> {
extract_nsis_plugins()?;
copy_files(&output_dir)?;
sign_binaries(&output_dir)?;
copy_nsis_files(&nsis_dir)?;
copy_nsis_files(&nsis_dir, &version)?;
build_uninstaller(&output_dir, &nsis_dir)?;
sign_file(&output_dir.join("uninstall.exe"))?;
generate_install_manifest(&output_dir)?;
build_installer(&output_dir, &nsis_dir)?;
sign_file(&PathBuf::from("../../../out/launcher_exe/anki-install.exe"))?;
let installer_filename = format!("anki-launcher-{version}-windows.exe");
let installer_path = PathBuf::from("../../../out/launcher_exe").join(&installer_filename);
sign_file(&installer_path)?;
println!("Build completed successfully!");
println!("Output directory: {}", output_dir.display());
println!("Installer: ../../../out/launcher_exe/anki-install.exe");
println!("Installer: ../../../out/launcher_exe/{installer_filename}");
Ok(())
}
@ -235,11 +244,13 @@ fn generate_install_manifest(output_dir: &Path) -> Result<()> {
Ok(())
}
fn copy_nsis_files(nsis_dir: &Path) -> Result<()> {
fn copy_nsis_files(nsis_dir: &Path, version: &str) -> Result<()> {
println!("Copying NSIS support files...");
// Copy anki.template.nsi as anki.nsi
copy_file("anki.template.nsi", nsis_dir.join("anki.nsi"))?;
// Copy anki.template.nsi as anki.nsi and substitute version placeholders
let template_content = std::fs::read_to_string("anki.template.nsi")?;
let substituted_content = template_content.replace("ANKI_VERSION", version);
write_file(nsis_dir.join("anki.nsi"), substituted_content)?;
// Copy fileassoc.nsh
copy_file("fileassoc.nsh", nsis_dir.join("fileassoc.nsh"))?;

View file

@ -11,7 +11,6 @@ use std::time::SystemTime;
use std::time::UNIX_EPOCH;
use anki_io::copy_file;
use anki_io::copy_if_newer;
use anki_io::create_dir_all;
use anki_io::modified_time;
use anki_io::read_file;
@ -46,8 +45,12 @@ struct State {
dist_python_version_path: std::path::PathBuf,
uv_lock_path: std::path::PathBuf,
sync_complete_marker: std::path::PathBuf,
launcher_trigger_file: std::path::PathBuf,
mirror_path: std::path::PathBuf,
pyproject_modified_by_user: bool,
previous_version: Option<String>,
resources_dir: std::path::PathBuf,
venv_folder: std::path::PathBuf,
}
#[derive(Debug, Clone)]
@ -69,8 +72,8 @@ pub enum MainMenuChoice {
Version(VersionKind),
ToggleBetas,
ToggleCache,
DownloadMirror,
Uninstall,
Quit,
}
fn main() {
@ -106,8 +109,12 @@ fn run() -> Result<()> {
dist_python_version_path: resources_dir.join(".python-version"),
uv_lock_path: uv_install_root.join("uv.lock"),
sync_complete_marker: uv_install_root.join(".sync_complete"),
launcher_trigger_file: uv_install_root.join(".want-launcher"),
mirror_path: uv_install_root.join("mirror"),
pyproject_modified_by_user: false, // calculated later
previous_version: None,
resources_dir,
venv_folder: uv_install_root.join(".venv"),
};
// Check for uninstall request from Windows uninstaller
@ -117,23 +124,19 @@ fn run() -> Result<()> {
return Ok(());
}
// Create install directory and copy project files in
// Create install directory
create_dir_all(&state.uv_install_root)?;
copy_if_newer(&state.dist_pyproject_path, &state.user_pyproject_path)?;
copy_if_newer(
&state.dist_python_version_path,
&state.user_python_version_path,
)?;
let pyproject_has_changed = !state.sync_complete_marker.exists() || {
let pyproject_toml_time = modified_time(&state.user_pyproject_path)?;
let sync_complete_time = modified_time(&state.sync_complete_marker)?;
Ok::<bool, anyhow::Error>(pyproject_toml_time > sync_complete_time)
}
.unwrap_or(true);
let launcher_requested =
state.launcher_trigger_file.exists() || !state.user_pyproject_path.exists();
if !pyproject_has_changed {
// If venv is already up to date, launch Anki normally
// Calculate whether user has custom edits that need syncing
let pyproject_time = file_timestamp_secs(&state.user_pyproject_path);
let sync_time = file_timestamp_secs(&state.sync_complete_marker);
state.pyproject_modified_by_user = pyproject_time > sync_time;
let pyproject_has_changed = state.pyproject_modified_by_user;
if !launcher_requested && !pyproject_has_changed {
// If no launcher request and venv is already up to date, launch Anki normally
let args: Vec<String> = std::env::args().skip(1).collect();
let cmd = build_python_command(&state, &args)?;
launch_anki_normally(cmd)?;
@ -143,6 +146,11 @@ fn run() -> Result<()> {
// If we weren't in a terminal, respawn ourselves in one
ensure_terminal_shown()?;
if launcher_requested {
// Remove the trigger file to make request ephemeral
let _ = remove_file(&state.launcher_trigger_file);
}
print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top
println!("\x1B[1mAnki Launcher\x1B[0m\n");
@ -150,15 +158,10 @@ fn run() -> Result<()> {
check_versions(&mut state);
let first_run = !state.uv_install_root.join(".venv").exists();
if first_run {
handle_version_install_or_update(&state, MainMenuChoice::Latest)?;
} else {
main_menu_loop(&state)?;
}
// Write marker file to indicate we've completed the sync process
write_sync_marker(&state.sync_complete_marker)?;
write_sync_marker(&state)?;
#[cfg(target_os = "macos")]
{
@ -184,12 +187,15 @@ fn run() -> Result<()> {
Ok(())
}
fn extract_aqt_version(
uv_path: &std::path::Path,
uv_install_root: &std::path::Path,
) -> Option<String> {
let output = Command::new(uv_path)
.current_dir(uv_install_root)
fn extract_aqt_version(state: &State) -> Option<String> {
// Check if .venv exists first
if !state.venv_folder.exists() {
return None;
}
let output = Command::new(&state.uv_path)
.current_dir(&state.uv_install_root)
.env("VIRTUAL_ENV", &state.venv_folder)
.args(["pip", "show", "aqt"])
.output()
.ok()?;
@ -214,7 +220,7 @@ fn check_versions(state: &mut State) {
}
// Determine current version by invoking uv pip show aqt
match extract_aqt_version(&state.uv_path, &state.uv_install_root) {
match extract_aqt_version(state) {
Some(version) => {
state.current_version = Some(version);
}
@ -239,12 +245,12 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
update_pyproject_for_version(choice.clone(), state)?;
// Extract current version before syncing (but don't write to file yet)
let previous_version_to_save = extract_aqt_version(&state.uv_path, &state.uv_install_root);
let previous_version_to_save = extract_aqt_version(state);
// Remove sync marker before attempting sync
let _ = remove_file(&state.sync_complete_marker);
println!("\x1B[1mUpdating Anki...\x1B[0m\n");
println!("Updating Anki...\n");
let python_version_trimmed = if state.user_python_version_path.exists() {
let python_version = read_file(&state.user_python_version_path)?;
@ -255,29 +261,44 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
None
};
// `uv sync` sometimes does not pull in Python automatically
// This might be system/platform specific and/or a uv bug.
let mut command = Command::new(&state.uv_path);
command
.current_dir(&state.uv_install_root)
.env("UV_CACHE_DIR", &state.uv_cache_dir)
.env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir)
.args(["python", "install", "--managed-python"]);
// Add python version if .python-version file exists
if let Some(version) = &python_version_trimmed {
command.args([version]);
let have_venv = state.venv_folder.exists();
if cfg!(target_os = "macos") && !have_developer_tools() && !have_venv {
println!("If you see a pop-up about 'install_name_tool', you can cancel it, and ignore the warning below.\n");
}
command.ensure_success().context("Python install failed")?;
// Sync the venv
// Prepare to sync the venv
let mut command = Command::new(&state.uv_path);
command.current_dir(&state.uv_install_root);
// remove UV_* environment variables to avoid interference
for (key, _) in std::env::vars() {
if key.starts_with("UV_") || key == "VIRTUAL_ENV" {
command.env_remove(key);
}
}
// remove CONDA_PREFIX/bin from PATH to avoid conda interference
#[cfg(target_os = "macos")]
if let Ok(conda_prefix) = std::env::var("CONDA_PREFIX") {
if let Ok(current_path) = std::env::var("PATH") {
let conda_bin = format!("{conda_prefix}/bin");
let filtered_paths: Vec<&str> = current_path
.split(':')
.filter(|&path| path != conda_bin)
.collect();
let new_path = filtered_paths.join(":");
command.env("PATH", new_path);
}
}
command
.current_dir(&state.uv_install_root)
.env("UV_CACHE_DIR", &state.uv_cache_dir)
.env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir)
.args(["sync", "--upgrade", "--managed-python"]);
.env(
"UV_HTTP_TIMEOUT",
std::env::var("UV_HTTP_TIMEOUT").unwrap_or_else(|_| "180".to_string()),
)
.args(["sync", "--upgrade", "--managed-python", "--no-config"]);
// Add python version if .python-version file exists
if let Some(version) = &python_version_trimmed {
@ -292,7 +313,7 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
Ok(_) => {
// Sync succeeded
if matches!(&choice, MainMenuChoice::Version(VersionKind::PyOxidizer(_))) {
inject_helper_addon(&state.uv_install_root)?;
inject_helper_addon()?;
}
// Now that sync succeeded, save the previous version
@ -321,9 +342,11 @@ fn main_menu_loop(state: &State) -> Result<()> {
let menu_choice = get_main_menu_choice(state)?;
match menu_choice {
MainMenuChoice::Quit => std::process::exit(0),
MainMenuChoice::KeepExisting => {
// Skip sync, just launch existing installation
if state.pyproject_modified_by_user {
// User has custom edits, sync them
handle_version_install_or_update(state, MainMenuChoice::KeepExisting)?;
}
break;
}
MainMenuChoice::ToggleBetas => {
@ -354,6 +377,11 @@ fn main_menu_loop(state: &State) -> Result<()> {
println!();
continue;
}
MainMenuChoice::DownloadMirror => {
show_mirror_submenu(state)?;
println!();
continue;
}
MainMenuChoice::Uninstall => {
if handle_uninstall(state)? {
std::process::exit(0);
@ -361,9 +389,7 @@ fn main_menu_loop(state: &State) -> Result<()> {
continue;
}
choice @ (MainMenuChoice::Latest | MainMenuChoice::Version(_)) => {
if handle_version_install_or_update(state, choice.clone()).is_err() {
continue;
}
handle_version_install_or_update(state, choice.clone())?;
break;
}
}
@ -371,23 +397,37 @@ fn main_menu_loop(state: &State) -> Result<()> {
Ok(())
}
fn write_sync_marker(sync_complete_marker: &std::path::Path) -> Result<()> {
fn write_sync_marker(state: &State) -> Result<()> {
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.context("Failed to get system time")?
.as_secs();
write_file(sync_complete_marker, timestamp.to_string())?;
write_file(&state.sync_complete_marker, timestamp.to_string())?;
Ok(())
}
/// Get mtime of provided file, or 0 if unavailable
fn file_timestamp_secs(path: &std::path::Path) -> i64 {
modified_time(path)
.map(|t| t.duration_since(UNIX_EPOCH).unwrap_or_default().as_secs() as i64)
.unwrap_or_default()
}
fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
loop {
println!("1) Latest Anki (press Enter)");
println!("2) Choose a version");
if let Some(current_version) = &state.current_version {
let normalized_current = normalize_version(current_version);
if state.pyproject_modified_by_user {
println!("3) Sync project changes");
} else {
println!("3) Keep existing version ({normalized_current})");
}
}
if let Some(prev_version) = &state.previous_version {
if state.current_version.as_ref() != Some(prev_version) {
let normalized_prev = normalize_version(prev_version);
@ -406,9 +446,13 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
"6) Cache downloads: {}",
if cache_enabled { "on" } else { "off" }
);
let mirror_enabled = is_mirror_enabled(state);
println!(
"7) Download mirror: {}",
if mirror_enabled { "on" } else { "off" }
);
println!();
println!("7) Uninstall");
println!("8) Quit");
println!("8) Uninstall");
print!("> ");
let _ = stdout().flush();
@ -447,8 +491,8 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
}
"5" => MainMenuChoice::ToggleBetas,
"6" => MainMenuChoice::ToggleCache,
"7" => MainMenuChoice::Uninstall,
"8" => MainMenuChoice::Quit,
"7" => MainMenuChoice::DownloadMirror,
"8" => MainMenuChoice::Uninstall,
_ => {
println!("Invalid input. Please try again.");
continue;
@ -458,8 +502,6 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
}
fn get_version_kind(state: &State) -> Result<Option<VersionKind>> {
println!("Please wait...");
let releases = get_releases(state)?;
let releases_str = releases
.latest
@ -618,15 +660,32 @@ fn fetch_versions(state: &State) -> Result<Vec<String>> {
let mut cmd = Command::new(&state.uv_path);
cmd.current_dir(&state.uv_install_root)
.args(["run", "--no-project"])
.arg(&versions_script);
.args(["run", "--no-project", "--no-config", "--managed-python"])
.args(["--with", "pip-system-certs,requests[socks]"]);
let output = cmd.utf8_output()?;
let python_version = read_file(&state.dist_python_version_path)?;
let python_version_str =
String::from_utf8(python_version).context("Invalid UTF-8 in .python-version")?;
let version_trimmed = python_version_str.trim();
if !version_trimmed.is_empty() {
cmd.args(["--python", version_trimmed]);
}
cmd.arg(&versions_script);
let output = match cmd.utf8_output() {
Ok(output) => output,
Err(e) => {
print!("Unable to check for Anki versions. Please check your internet connection.\n\n");
return Err(e.into());
}
};
let versions = serde_json::from_str(&output.stdout).context("Failed to parse versions JSON")?;
Ok(versions)
}
fn get_releases(state: &State) -> Result<Releases> {
println!("Checking for updates...");
let include_prereleases = state.prerelease_marker.exists();
let all_versions = fetch_versions(state)?;
let all_versions = filter_and_normalize_versions(all_versions, include_prereleases);
@ -666,7 +725,15 @@ fn apply_version_kind(version_kind: &VersionKind, state: &State) -> Result<()> {
&format!("anki-release=={version}\",\n \"anki=={version}\",\n \"aqt=={version}"),
),
};
write_file(&state.user_pyproject_path, &updated_content)?;
// Add mirror configuration if enabled
let final_content = if let Some((python_mirror, pypi_mirror)) = get_mirror_urls(state)? {
format!("{updated_content}\n\n[[tool.uv.index]]\nname = \"mirror\"\nurl = \"{pypi_mirror}\"\ndefault = true\n\n[tool.uv]\npython-install-mirror = \"{python_mirror}\"\n")
} else {
updated_content
};
write_file(&state.user_pyproject_path, &final_content)?;
// Update .python-version based on version kind
match version_kind {
@ -700,15 +767,15 @@ fn update_pyproject_for_version(menu_choice: MainMenuChoice, state: &State) -> R
MainMenuChoice::ToggleCache => {
unreachable!();
}
MainMenuChoice::DownloadMirror => {
unreachable!();
}
MainMenuChoice::Uninstall => {
unreachable!();
}
MainMenuChoice::Version(version_kind) => {
apply_version_kind(&version_kind, state)?;
}
MainMenuChoice::Quit => {
std::process::exit(0);
}
}
Ok(())
}
@ -756,7 +823,7 @@ fn parse_version_kind(version: &str) -> Option<VersionKind> {
}
}
fn inject_helper_addon(_uv_install_root: &std::path::Path) -> Result<()> {
fn inject_helper_addon() -> Result<()> {
let addons21_path = get_anki_addons21_path()?;
if !addons21_path.exists() {
@ -858,16 +925,24 @@ fn handle_uninstall(state: &State) -> Result<bool> {
Ok(true)
}
fn have_developer_tools() -> bool {
Command::new("xcode-select")
.args(["-p"])
.output()
.map(|output| output.status.success())
.unwrap_or(false)
}
fn build_python_command(state: &State, args: &[String]) -> Result<Command> {
let python_exe = if cfg!(target_os = "windows") {
let show_console = std::env::var("ANKI_CONSOLE").is_ok();
if show_console {
state.uv_install_root.join(".venv/Scripts/python.exe")
state.venv_folder.join("Scripts/python.exe")
} else {
state.uv_install_root.join(".venv/Scripts/pythonw.exe")
state.venv_folder.join("Scripts/pythonw.exe")
}
} else {
state.uv_install_root.join(".venv/bin/python")
state.venv_folder.join("bin/python")
};
let mut cmd = Command::new(&python_exe);
@ -884,6 +959,70 @@ fn build_python_command(state: &State, args: &[String]) -> Result<Command> {
Ok(cmd)
}
fn is_mirror_enabled(state: &State) -> bool {
state.mirror_path.exists()
}
fn get_mirror_urls(state: &State) -> Result<Option<(String, String)>> {
if !state.mirror_path.exists() {
return Ok(None);
}
let content = read_file(&state.mirror_path)?;
let content_str = String::from_utf8(content).context("Invalid UTF-8 in mirror file")?;
let lines: Vec<&str> = content_str.lines().collect();
if lines.len() >= 2 {
Ok(Some((
lines[0].trim().to_string(),
lines[1].trim().to_string(),
)))
} else {
Ok(None)
}
}
fn show_mirror_submenu(state: &State) -> Result<()> {
loop {
println!("Download mirror options:");
println!("1) No mirror");
println!("2) China");
print!("> ");
let _ = stdout().flush();
let mut input = String::new();
let _ = stdin().read_line(&mut input);
let input = input.trim();
match input {
"1" => {
// Remove mirror file
if state.mirror_path.exists() {
let _ = remove_file(&state.mirror_path);
}
println!("Mirror disabled.");
break;
}
"2" => {
// Write China mirror URLs
let china_mirrors = "https://registry.npmmirror.com/-/binary/python-build-standalone/\nhttps://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple/";
write_file(&state.mirror_path, china_mirrors)?;
println!("China mirror enabled.");
break;
}
"" => {
// Empty input - return to main menu
break;
}
_ => {
println!("Invalid input. Please try again.");
continue;
}
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -116,8 +116,9 @@ pub use windows::ensure_terminal_shown;
pub fn ensure_terminal_shown() -> Result<()> {
use std::io::IsTerminal;
let want_terminal = std::env::var("ANKI_LAUNCHER_WANT_TERMINAL").is_ok();
let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout());
if !stdout_is_terminal {
if want_terminal || !stdout_is_terminal {
#[cfg(target_os = "macos")]
mac::relaunch_in_terminal()?;
#[cfg(not(target_os = "macos"))]

View file

@ -8,6 +8,7 @@ use anyhow::Context;
use anyhow::Result;
use widestring::u16cstr;
use windows::core::PCWSTR;
use windows::Wdk::System::SystemServices::RtlGetVersion;
use windows::Win32::System::Console::AttachConsole;
use windows::Win32::System::Console::GetConsoleWindow;
use windows::Win32::System::Console::ATTACH_PARENT_PROCESS;
@ -18,8 +19,25 @@ use windows::Win32::System::Registry::HKEY;
use windows::Win32::System::Registry::HKEY_CURRENT_USER;
use windows::Win32::System::Registry::KEY_READ;
use windows::Win32::System::Registry::REG_SZ;
use windows::Win32::System::SystemInformation::OSVERSIONINFOW;
use windows::Win32::UI::Shell::SetCurrentProcessExplicitAppUserModelID;
/// Returns true if running on Windows 10 (not Windows 11)
fn is_windows_10() -> bool {
unsafe {
let mut info = OSVERSIONINFOW {
dwOSVersionInfoSize: std::mem::size_of::<OSVERSIONINFOW>() as u32,
..Default::default()
};
if RtlGetVersion(&mut info).is_ok() {
// Windows 10 has build numbers < 22000, Windows 11 >= 22000
info.dwBuildNumber < 22000 && info.dwMajorVersion == 10
} else {
false
}
}
}
pub fn ensure_terminal_shown() -> Result<()> {
unsafe {
if !GetConsoleWindow().is_invalid() {
@ -29,6 +47,14 @@ pub fn ensure_terminal_shown() -> Result<()> {
}
if std::env::var("ANKI_IMPLICIT_CONSOLE").is_ok() && attach_to_parent_console() {
// This black magic triggers Windows to switch to the new
// ANSI-supporting console host, which is usually only available
// when the app is built with the console subsystem.
// Only needed on Windows 10, not Windows 11.
if is_windows_10() {
let _ = Command::new("cmd").args(["/C", ""]).status();
}
// Successfully attached to parent console
reconnect_stdio_to_console();
return Ok(());

View file

@ -3,7 +3,11 @@
import json
import sys
import urllib.request
import pip_system_certs.wrapt_requests
import requests
pip_system_certs.wrapt_requests.inject_truststore()
def main():
@ -11,8 +15,9 @@ def main():
url = "https://pypi.org/pypi/aqt/json"
try:
with urllib.request.urlopen(url, timeout=30) as response:
data = json.loads(response.read().decode("utf-8"))
response = requests.get(url, timeout=30)
response.raise_for_status()
data = response.json()
releases = data.get("releases", {})
# Create list of (version, upload_time) tuples

View file

@ -24,7 +24,7 @@ Name "Anki"
Unicode true
; The file to write (relative to nsis directory)
OutFile "..\launcher_exe\anki-install.exe"
OutFile "..\launcher_exe\anki-launcher-ANKI_VERSION-windows.exe"
; Non elevated
RequestExecutionLevel user
@ -214,7 +214,7 @@ Section ""
; Write the uninstall keys for Windows
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayName" "Anki Launcher"
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayVersion" "1.0.0"
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayVersion" "ANKI_VERSION"
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "UninstallString" '"$INSTDIR\uninstall.exe"'
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "QuietUninstallString" '"$INSTDIR\uninstall.exe" /S'
WriteRegDWORD HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "NoModify" 1

View file

@ -33,6 +33,12 @@ class _MacOSHelper:
"On completion, file should be saved if no error has arrived."
self._dll.end_wav_record()
def disable_appnap(self) -> None:
self._dll.disable_appnap()
def enable_appnap(self) -> None:
self._dll.enable_appnap()
# this must not be overwritten or deallocated
@CFUNCTYPE(None, c_char_p) # type: ignore

25
qt/mac/appnap.swift Normal file
View file

@ -0,0 +1,25 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import Foundation
private var currentActivity: NSObjectProtocol?
@_cdecl("disable_appnap")
public func disableAppNap() {
// No-op if already assigned
guard currentActivity == nil else { return }
currentActivity = ProcessInfo.processInfo.beginActivity(
options: .userInitiatedAllowingIdleSystemSleep,
reason: "AppNap is disabled"
)
}
@_cdecl("enable_appnap")
public func enableAppNap() {
guard let activity = currentActivity else { return }
ProcessInfo.processInfo.endActivity(activity)
currentActivity = nil
}

View file

@ -15,6 +15,7 @@ echo "Building macOS helper dylib..."
# Create the wheel using uv
echo "Creating wheel..."
cd "$SCRIPT_DIR"
rm -rf dist
"$PROJ_ROOT/out/extracted/uv/uv" build --wheel
echo "Build complete!"

View file

@ -1,8 +1,6 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import os
import platform
import subprocess
import sys
from pathlib import Path

View file

@ -4,7 +4,7 @@ build-backend = "hatchling.build"
[project]
name = "anki-mac-helper"
version = "0.1.0"
version = "0.1.1"
description = "Small support library for Anki on Macs"
requires-python = ">=3.9"
license = { text = "AGPL-3.0-or-later" }

14
qt/mac/update-launcher-env Executable file
View file

@ -0,0 +1,14 @@
#!/bin/bash
#
# Build and install into the launcher venv
set -e
./build.sh
if [[ "$OSTYPE" == "darwin"* ]]; then
export VIRTUAL_ENV=$HOME/Library/Application\ Support/AnkiProgramFiles/.venv
else
export VIRTUAL_ENV=$HOME/.local/share/AnkiProgramFiles/.venv
fi
../../out/extracted/uv/uv pip install dist/*.whl

View file

@ -12,7 +12,7 @@ dependencies = [
"send2trash",
"waitress>=2.0.0",
"pywin32; sys.platform == 'win32'",
"anki-mac-helper; sys.platform == 'darwin'",
"anki-mac-helper>=0.1.1; sys.platform == 'darwin'",
"pip-system-certs!=5.1",
"pyqt6>=6.2",
"pyqt6-webengine>=6.2",
@ -40,8 +40,8 @@ qt67 = [
qt = [
"pyqt6==6.9.1",
"pyqt6-qt6==6.9.1",
"pyqt6-webengine==6.9.0",
"pyqt6-webengine-qt6==6.9.1",
"pyqt6-webengine==6.8.0",
"pyqt6-webengine-qt6==6.8.2",
"pyqt6_sip==13.10.2",
]
qt68 = [

View file

@ -81,6 +81,7 @@ pin-project.workspace = true
prost.workspace = true
pulldown-cmark.workspace = true
rand.workspace = true
rayon.workspace = true
regex.workspace = true
reqwest.workspace = true
rusqlite.workspace = true

View file

@ -11,6 +11,24 @@ use snafu::ensure;
use snafu::ResultExt;
use snafu::Snafu;
#[derive(Debug)]
pub struct CodeDisplay(Option<i32>);
impl std::fmt::Display for CodeDisplay {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.0 {
Some(code) => write!(f, "{code}"),
None => write!(f, "?"),
}
}
}
impl From<Option<i32>> for CodeDisplay {
fn from(code: Option<i32>) -> Self {
CodeDisplay(code)
}
}
#[derive(Debug, Snafu)]
pub enum Error {
#[snafu(display("Failed to execute: {cmdline}"))]
@ -18,8 +36,15 @@ pub enum Error {
cmdline: String,
source: std::io::Error,
},
#[snafu(display("Failed with code {code:?}: {cmdline}"))]
ReturnedError { cmdline: String, code: Option<i32> },
#[snafu(display("Failed to run ({code}): {cmdline}"))]
ReturnedError { cmdline: String, code: CodeDisplay },
#[snafu(display("Failed to run ({code}): {cmdline}: {stdout}{stderr}"))]
ReturnedWithOutputError {
cmdline: String,
code: CodeDisplay,
stdout: String,
stderr: String,
},
#[snafu(display("Couldn't decode stdout/stderr as utf8"))]
InvalidUtf8 {
cmdline: String,
@ -71,31 +96,36 @@ impl CommandExt for Command {
status.success(),
ReturnedSnafu {
cmdline: get_cmdline(self),
code: status.code(),
code: CodeDisplay::from(status.code()),
}
);
Ok(self)
}
fn utf8_output(&mut self) -> Result<Utf8Output> {
let cmdline = get_cmdline(self);
let output = self.output().with_context(|_| DidNotExecuteSnafu {
cmdline: get_cmdline(self),
cmdline: cmdline.clone(),
})?;
let stdout = String::from_utf8(output.stdout).with_context(|_| InvalidUtf8Snafu {
cmdline: cmdline.clone(),
})?;
let stderr = String::from_utf8(output.stderr).with_context(|_| InvalidUtf8Snafu {
cmdline: cmdline.clone(),
})?;
ensure!(
output.status.success(),
ReturnedSnafu {
cmdline: get_cmdline(self),
code: output.status.code(),
ReturnedWithOutputSnafu {
cmdline,
code: CodeDisplay::from(output.status.code()),
stdout: stdout.clone(),
stderr: stderr.clone(),
}
);
Ok(Utf8Output {
stdout: String::from_utf8(output.stdout).with_context(|_| InvalidUtf8Snafu {
cmdline: get_cmdline(self),
})?,
stderr: String::from_utf8(output.stderr).with_context(|_| InvalidUtf8Snafu {
cmdline: get_cmdline(self),
})?,
})
Ok(Utf8Output { stdout, stderr })
}
fn ensure_spawn(&mut self) -> Result<std::process::Child> {
@ -135,7 +165,10 @@ mod test {
#[cfg(not(windows))]
assert!(matches!(
Command::new("false").ensure_success(),
Err(Error::ReturnedError { code: Some(1), .. })
Err(Error::ReturnedError {
code: CodeDisplay(_),
..
})
));
}
}

View file

@ -105,7 +105,8 @@ impl Card {
/// Returns true if the card has a due date in terms of days.
fn is_due_in_days(&self) -> bool {
matches!(self.queue, CardQueue::DayLearn | CardQueue::Review)
self.ctype != CardType::New && self.original_or_current_due() <= 365_000 // keep consistent with SQL
|| matches!(self.queue, CardQueue::DayLearn | CardQueue::Review)
|| (self.ctype == CardType::Review && self.is_undue_queue())
}
@ -125,22 +126,20 @@ impl Card {
}
}
/// This uses card.due and card.ivl to infer the elapsed time. If 'set due
/// date' or an add-on has changed the due date, this won't be accurate.
pub(crate) fn days_since_last_review(&self, timing: &SchedTimingToday) -> Option<u32> {
/// If last_review_date isn't stored in the card, this uses card.due and
/// card.ivl to infer the elapsed time, which won't be accurate if
/// 'set due date' or an add-on has changed the due date.
pub(crate) fn seconds_since_last_review(&self, timing: &SchedTimingToday) -> Option<u32> {
if let Some(last_review_time) = self.last_review_time {
Some(timing.next_day_at.elapsed_days_since(last_review_time) as u32)
} else if !self.is_due_in_days() {
Some(
(timing.next_day_at.0 as u32).saturating_sub(self.original_or_current_due() as u32)
/ 86_400,
)
} else {
Some(timing.now.elapsed_secs_since(last_review_time) as u32)
} else if self.is_due_in_days() {
self.due_time(timing).map(|due| {
(due.adding_secs(-86_400 * self.interval as i64)
.elapsed_secs()
/ 86_400) as u32
.elapsed_secs()) as u32
})
} else {
let last_review_time = TimestampSecs(self.original_or_current_due() as i64);
Some(timing.now.elapsed_secs_since(last_review_time) as u32)
}
}
}
@ -543,12 +542,12 @@ impl RowContext {
self.cards[0]
.memory_state
.as_ref()
.zip(self.cards[0].days_since_last_review(&self.timing))
.zip(self.cards[0].seconds_since_last_review(&self.timing))
.zip(Some(self.cards[0].decay.unwrap_or(FSRS5_DEFAULT_DECAY)))
.map(|((state, days_elapsed), decay)| {
let r = FSRS::new(None).unwrap().current_retrievability(
.map(|((state, seconds), decay)| {
let r = FSRS::new(None).unwrap().current_retrievability_seconds(
(*state).into(),
days_elapsed,
seconds,
decay,
);
format!("{:.0}%", r * 100.)

View file

@ -24,6 +24,7 @@ use crate::notetype::NotetypeId;
use crate::notetype::NotetypeKind;
use crate::prelude::*;
use crate::progress::ThrottlingProgressHandler;
use crate::storage::card::CardFixStats;
use crate::timestamp::TimestampMillis;
use crate::timestamp::TimestampSecs;
@ -40,6 +41,7 @@ pub struct CheckDatabaseOutput {
notetypes_recovered: usize,
invalid_utf8: usize,
invalid_ids: usize,
card_last_review_time_empty: usize,
}
#[derive(Debug, Clone, Copy, Default)]
@ -69,6 +71,11 @@ impl CheckDatabaseOutput {
if self.card_properties_invalid > 0 {
probs.push(tr.database_check_card_properties(self.card_properties_invalid));
}
if self.card_last_review_time_empty > 0 {
probs.push(
tr.database_check_card_last_review_time_empty(self.card_last_review_time_empty),
);
}
if self.cards_missing_note > 0 {
probs.push(tr.database_check_card_missing_note(self.cards_missing_note));
}
@ -158,14 +165,25 @@ impl Collection {
fn check_card_properties(&mut self, out: &mut CheckDatabaseOutput) -> Result<()> {
let timing = self.timing_today()?;
let (new_cnt, other_cnt) = self.storage.fix_card_properties(
let CardFixStats {
new_cards_fixed,
other_cards_fixed,
last_review_time_fixed,
} = self.storage.fix_card_properties(
timing.days_elapsed,
TimestampSecs::now(),
self.usn()?,
self.scheduler_version() == SchedulerVersion::V1,
)?;
out.card_position_too_high = new_cnt;
out.card_properties_invalid += other_cnt;
out.card_position_too_high = new_cards_fixed;
out.card_properties_invalid += other_cards_fixed;
out.card_last_review_time_empty = last_review_time_fixed;
// Trigger one-way sync if last_review_time was updated to avoid conflicts
if last_review_time_fixed > 0 {
self.set_schema_modified()?;
}
Ok(())
}

View file

@ -1,6 +1,10 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::collections::HashMap;
use anki_proto::generic;
use rayon::iter::IntoParallelIterator;
use rayon::iter::ParallelIterator;
use crate::collection::Collection;
use crate::deckconfig::DeckConfSchema11;
@ -9,6 +13,7 @@ use crate::deckconfig::DeckConfigId;
use crate::deckconfig::UpdateDeckConfigsRequest;
use crate::error::Result;
use crate::scheduler::fsrs::params::ignore_revlogs_before_date_to_ms;
use crate::scheduler::fsrs::simulator::is_included_card;
impl crate::services::DeckConfigService for Collection {
fn add_or_update_deck_config_legacy(
@ -101,68 +106,41 @@ impl crate::services::DeckConfigService for Collection {
&mut self,
input: anki_proto::deck_config::GetRetentionWorkloadRequest,
) -> Result<anki_proto::deck_config::GetRetentionWorkloadResponse> {
const LEARN_SPAN: usize = 100_000_000;
const TERMINATION_PROB: f32 = 0.001;
// the default values are from https://github.com/open-spaced-repetition/Anki-button-usage/blob/881009015c2a85ac911021d76d0aacb124849937/analysis.ipynb
const DEFAULT_LEARN_COST: f32 = 19.4698;
const DEFAULT_PASS_COST: f32 = 7.8454;
const DEFAULT_FAIL_COST: f32 = 23.185;
const DEFAULT_INITIAL_PASS_RATE: f32 = 0.7645;
let days_elapsed = self.timing_today().unwrap().days_elapsed as i32;
let guard =
self.search_cards_into_table(&input.search, crate::search::SortMode::NoOrder)?;
let costs = guard.col.storage.get_costs_for_retention()?;
fn smoothing(obs: f32, default: f32, count: u32) -> f32 {
let alpha = count as f32 / (50.0 + count as f32);
obs * alpha + default * (1.0 - alpha)
}
let revlogs = guard
.col
.storage
.get_revlog_entries_for_searched_cards_in_card_order()?;
let cost_success = smoothing(
costs.average_pass_time_ms / 1000.0,
DEFAULT_PASS_COST,
costs.pass_count,
);
let cost_failure = smoothing(
costs.average_fail_time_ms / 1000.0,
DEFAULT_FAIL_COST,
costs.fail_count,
);
let cost_learn = smoothing(
costs.average_learn_time_ms / 1000.0,
DEFAULT_LEARN_COST,
costs.learn_count,
);
let initial_pass_rate = smoothing(
costs.initial_pass_rate,
DEFAULT_INITIAL_PASS_RATE,
costs.pass_count,
);
let config = guard.col.get_optimal_retention_parameters(revlogs)?;
let cards = guard
.col
.storage
.all_searched_cards()?
.into_iter()
.filter(is_included_card)
.filter_map(|c| crate::card::Card::convert(c.clone(), days_elapsed, c.memory_state?))
.collect::<Vec<fsrs::Card>>();
let before = fsrs::expected_workload(
let costs = (70u32..=99u32)
.into_par_iter()
.map(|dr| {
Ok((
dr,
fsrs::expected_workload_with_existing_cards(
&input.w,
input.before,
LEARN_SPAN,
cost_success,
cost_failure,
cost_learn,
initial_pass_rate,
TERMINATION_PROB,
)?;
let after = fsrs::expected_workload(
&input.w,
input.after,
LEARN_SPAN,
cost_success,
cost_failure,
cost_learn,
initial_pass_rate,
TERMINATION_PROB,
)?;
Ok(anki_proto::deck_config::GetRetentionWorkloadResponse {
factor: after / before,
dr as f32 / 100.,
&config,
&cards,
)?,
))
})
.collect::<Result<HashMap<_, _>>>()?;
Ok(anki_proto::deck_config::GetRetentionWorkloadResponse { costs })
}
}

View file

@ -212,10 +212,13 @@ impl Collection {
if fsrs_toggled {
self.set_config_bool_inner(BoolKey::Fsrs, req.fsrs)?;
}
let mut deck_desired_retention: HashMap<DeckId, f32> = Default::default();
for deck in self.storage.get_all_decks()? {
if let Ok(normal) = deck.normal() {
let deck_id = deck.id;
if let Some(desired_retention) = normal.desired_retention {
deck_desired_retention.insert(deck_id, desired_retention);
}
// previous order & params
let previous_config_id = DeckConfigId(normal.config_id);
let previous_config = configs_before_update.get(&previous_config_id);
@ -277,10 +280,11 @@ impl Collection {
if req.fsrs {
Some(UpdateMemoryStateRequest {
params: c.fsrs_params().clone(),
desired_retention: c.inner.desired_retention,
preset_desired_retention: c.inner.desired_retention,
max_interval: c.inner.maximum_review_interval,
reschedule: req.fsrs_reschedule,
historical_retention: c.inner.historical_retention,
deck_desired_retention: deck_desired_retention.clone(),
})
} else {
None
@ -409,6 +413,7 @@ fn normal_deck_to_limits(deck: &NormalDeck, today: u32) -> Limits {
.new_limit_today
.map(|limit| limit.today == today)
.unwrap_or_default(),
desired_retention: deck.desired_retention,
}
}
@ -417,6 +422,7 @@ fn update_deck_limits(deck: &mut NormalDeck, limits: &Limits, today: u32) {
deck.new_limit = limits.new;
update_day_limit(&mut deck.review_limit_today, limits.review_today, today);
update_day_limit(&mut deck.new_limit_today, limits.new_today, today);
deck.desired_retention = limits.desired_retention;
}
fn update_day_limit(day_limit: &mut Option<DayLimit>, new_limit: Option<u32>, today: u32) {

View file

@ -31,6 +31,7 @@ pub(crate) use name::immediate_parent_name;
pub use name::NativeDeckName;
pub use schema11::DeckSchema11;
use crate::deckconfig::DeckConfig;
use crate::define_newtype;
use crate::error::FilteredDeckError;
use crate::markdown::render_markdown;
@ -89,6 +90,16 @@ impl Deck {
}
}
/// Get the effective desired retention value for a deck.
/// Returns deck-specific desired retention if available, otherwise falls
/// back to config default.
pub fn effective_desired_retention(&self, config: &DeckConfig) -> f32 {
self.normal()
.ok()
.and_then(|d| d.desired_retention)
.unwrap_or(config.inner.desired_retention)
}
// used by tests at the moment
#[allow(dead_code)]

View file

@ -325,6 +325,7 @@ impl From<NormalDeckSchema11> for NormalDeck {
new_limit: deck.new_limit,
review_limit_today: deck.review_limit_today,
new_limit_today: deck.new_limit_today,
desired_retention: None,
}
}
}

View file

@ -84,6 +84,42 @@ impl RevlogEntry {
})
.unwrap()
}
/// Returns true if this entry represents a reset operation.
/// These entries are created when a card is reset using
/// [`Collection::reschedule_cards_as_new`].
/// The 0 value of `ease_factor` differentiates it
/// from entry created by [`Collection::set_due_date`] that has
/// `RevlogReviewKind::Manual` but non-zero `ease_factor`.
pub(crate) fn is_reset(&self) -> bool {
self.review_kind == RevlogReviewKind::Manual && self.ease_factor == 0
}
/// Returns true if this entry represents a cramming operation.
/// These entries are created when a card is reviewed in a
/// filtered deck with "Reschedule cards based on my answers
/// in this deck" disabled.
/// [`crate::scheduler::answering::CardStateUpdater::apply_preview_state`].
/// The 0 value of `ease_factor` distinguishes it from the entry
/// created when a card is reviewed before its due date in a
/// filtered deck with reschedule enabled or using Grade Now.
pub(crate) fn is_cramming(&self) -> bool {
self.review_kind == RevlogReviewKind::Filtered && self.ease_factor == 0
}
pub(crate) fn has_rating(&self) -> bool {
self.button_chosen > 0
}
/// Returns true if the review entry is not manually rescheduled and not
/// cramming. Used to filter out entries that shouldn't be considered
/// for statistics and scheduling.
pub(crate) fn has_rating_and_affects_scheduling(&self) -> bool {
// not rescheduled/set due date/reset
self.has_rating()
// not cramming
&& !self.is_cramming()
}
}
impl Collection {

View file

@ -444,6 +444,8 @@ impl Collection {
.get_deck(card.deck_id)?
.or_not_found(card.deck_id)?;
let config = self.home_deck_config(deck.config_id(), card.original_deck_id)?;
let desired_retention = deck.effective_desired_retention(&config);
let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs);
let fsrs_next_states = if fsrs_enabled {
let params = config.fsrs_params();
@ -473,13 +475,13 @@ impl Collection {
};
Some(fsrs.next_states(
card.memory_state.map(Into::into),
config.inner.desired_retention,
desired_retention,
days_elapsed,
)?)
} else {
None
};
let desired_retention = fsrs_enabled.then_some(config.inner.desired_retention);
let desired_retention = fsrs_enabled.then_some(desired_retention);
let fsrs_short_term_with_steps =
self.get_config_bool(BoolKey::FsrsShortTermWithStepsEnabled);
let fsrs_allow_short_term = if fsrs_enabled {
@ -662,6 +664,43 @@ pub(crate) mod test {
col.get_scheduling_states(card_id).unwrap().current
}
// Test that deck-specific desired retention is used when available
#[test]
fn deck_specific_desired_retention() -> Result<()> {
let mut col = Collection::new();
// Enable FSRS
col.set_config_bool(BoolKey::Fsrs, true, false)?;
// Create a deck with specific desired retention
let deck_id = DeckId(1);
let deck = col.get_deck(deck_id)?.unwrap();
let mut deck_clone = (*deck).clone();
deck_clone.normal_mut().unwrap().desired_retention = Some(0.85);
col.update_deck(&mut deck_clone)?;
// Create a card in this deck
let nt = col.get_notetype_by_name("Basic")?.unwrap();
let mut note = nt.new_note();
col.add_note(&mut note, deck_id)?;
// Get the card using search_cards
let cards = col.search_cards(note.id, SortMode::NoOrder)?;
let card = col.storage.get_card(cards[0])?.unwrap();
// Test that the card state updater uses deck-specific desired retention
let updater = col.card_state_updater(card)?;
// Print debug information
println!("FSRS enabled: {}", col.get_config_bool(BoolKey::Fsrs));
println!("Desired retention: {:?}", updater.desired_retention);
// Verify that the desired retention is from the deck, not the config
assert_eq!(updater.desired_retention, Some(0.85));
Ok(())
}
// make sure the 'current' state for a card matches the
// state we applied to it
#[test]

View file

@ -45,10 +45,11 @@ pub(crate) fn get_decay_from_params(params: &[f32]) -> f32 {
#[derive(Debug)]
pub(crate) struct UpdateMemoryStateRequest {
pub params: Params,
pub desired_retention: f32,
pub preset_desired_retention: f32,
pub historical_retention: f32,
pub max_interval: u32,
pub reschedule: bool,
pub deck_desired_retention: HashMap<DeckId, f32>,
}
pub(crate) struct UpdateMemoryStateEntry {
@ -98,7 +99,6 @@ impl Collection {
historical_retention.unwrap_or(0.9),
ignore_before,
)?;
let desired_retention = req.as_ref().map(|w| w.desired_retention);
let mut progress = self.new_progress_handler::<ComputeMemoryProgress>();
progress.update(false, |s| s.total_cards = items.len() as u32)?;
for (idx, (card_id, item)) in items.into_iter().enumerate() {
@ -106,10 +106,16 @@ impl Collection {
let mut card = self.storage.get_card(card_id)?.or_not_found(card_id)?;
let original = card.clone();
if let Some(req) = &req {
let preset_desired_retention = req.preset_desired_retention;
// Store decay and desired retention in the card so that add-ons, card info,
// stats and browser search/sorts don't need to access the deck config.
// Unlike memory states, scheduler doesn't use decay and dr stored in the card.
card.desired_retention = desired_retention;
let deck_id = card.original_or_current_deck_id();
let desired_retention = *req
.deck_desired_retention
.get(&deck_id)
.unwrap_or(&preset_desired_retention);
card.desired_retention = Some(desired_retention);
card.decay = decay;
if let Some(item) = item {
card.set_memory_state(&fsrs, Some(item), historical_retention.unwrap())?;
@ -132,7 +138,7 @@ impl Collection {
let original_interval = card.interval;
let interval = fsrs.next_interval(
Some(state.stability),
desired_retention.unwrap(),
desired_retention,
0,
);
card.interval = rescheduler
@ -205,7 +211,11 @@ impl Collection {
.storage
.get_deck_config(conf_id)?
.or_not_found(conf_id)?;
let desired_retention = config.inner.desired_retention;
// Get deck-specific desired retention if available, otherwise use config
// default
let desired_retention = deck.effective_desired_retention(&config);
let historical_retention = config.inner.historical_retention;
let params = config.fsrs_params();
let decay = get_decay_from_params(params);
@ -295,15 +305,15 @@ pub(crate) fn fsrs_items_for_memory_states(
.collect()
}
struct LastRevlogInfo {
pub(crate) struct LastRevlogInfo {
/// Used to determine the actual elapsed time between the last time the user
/// reviewed the card and now, so that we can determine an accurate period
/// when the card has subsequently been rescheduled to a different day.
last_reviewed_at: Option<TimestampSecs>,
pub(crate) last_reviewed_at: Option<TimestampSecs>,
}
/// Return a map of cards to info about last review/reschedule.
fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, LastRevlogInfo> {
/// Return a map of cards to info about last review.
pub(crate) fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, LastRevlogInfo> {
let mut out = HashMap::new();
revlogs
.iter()
@ -312,8 +322,10 @@ fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, LastRevlogIn
.for_each(|(card_id, group)| {
let mut last_reviewed_at = None;
for e in group.into_iter() {
if e.button_chosen >= 1 {
if e.has_rating_and_affects_scheduling() {
last_reviewed_at = Some(e.id.as_secs());
} else if e.is_reset() {
last_reviewed_at = None;
}
}
out.insert(card_id, LastRevlogInfo { last_reviewed_at });
@ -377,6 +389,7 @@ pub(crate) fn fsrs_item_for_memory_state(
Ok(None)
}
} else {
// no revlogs (new card or caused by ignore_revlogs_before or deleted revlogs)
Ok(None)
}
}

View file

@ -394,13 +394,13 @@ pub(crate) fn reviews_for_fsrs(
let mut revlogs_complete = false;
// Working backwards from the latest review...
for (index, entry) in entries.iter().enumerate().rev() {
if entry.review_kind == RevlogReviewKind::Filtered && entry.ease_factor == 0 {
if entry.is_cramming() {
continue;
}
// For incomplete review histories, initial memory state is based on the first
// user-graded review after the cutoff date with interval >= 1d.
let within_cutoff = entry.id.0 > ignore_revlogs_before.0;
let user_graded = matches!(entry.button_chosen, 1..=4);
let user_graded = entry.has_rating();
let interday = entry.interval >= 1 || entry.interval <= -86400;
if user_graded && within_cutoff && interday {
first_user_grade_idx = Some(index);
@ -409,10 +409,7 @@ pub(crate) fn reviews_for_fsrs(
if user_graded && entry.review_kind == RevlogReviewKind::Learning {
first_of_last_learn_entries = Some(index);
revlogs_complete = true;
} else if matches!(
(entry.review_kind, entry.ease_factor),
(RevlogReviewKind::Manual, 0)
) {
} else if entry.is_reset() {
// Ignore entries prior to a `Reset` if a learning step has come after,
// but consider revlogs complete.
if first_of_last_learn_entries.is_some() {
@ -472,16 +469,7 @@ pub(crate) fn reviews_for_fsrs(
}
// Filter out unwanted entries
entries.retain(|entry| {
!(
// set due date, reset or rescheduled
(entry.review_kind == RevlogReviewKind::Manual || entry.button_chosen == 0)
|| // cram
(entry.review_kind == RevlogReviewKind::Filtered && entry.ease_factor == 0)
|| // rescheduled
(entry.review_kind == RevlogReviewKind::Rescheduled)
)
});
entries.retain(|entry| entry.has_rating_and_affects_scheduling());
// Compute delta_t for each entry
let delta_ts = iter::once(0)
@ -560,10 +548,14 @@ pub(crate) mod tests {
}
pub(crate) fn revlog(review_kind: RevlogReviewKind, days_ago: i64) -> RevlogEntry {
let button_chosen = match review_kind {
RevlogReviewKind::Manual | RevlogReviewKind::Rescheduled => 0,
_ => 3,
};
RevlogEntry {
review_kind,
id: days_ago_ms(days_ago).into(),
button_chosen: 3,
button_chosen,
interval: 1,
..Default::default()
}

View file

@ -1,20 +1,27 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::collections::HashMap;
use std::sync::Arc;
use anki_proto::deck_config::deck_config::config::ReviewCardOrder;
use anki_proto::deck_config::deck_config::config::ReviewCardOrder::*;
use anki_proto::scheduler::SimulateFsrsReviewRequest;
use anki_proto::scheduler::SimulateFsrsReviewResponse;
use anki_proto::scheduler::SimulateFsrsWorkloadResponse;
use fsrs::simulate;
use fsrs::PostSchedulingFn;
use fsrs::ReviewPriorityFn;
use fsrs::SimulatorConfig;
use fsrs::FSRS;
use itertools::Itertools;
use rand::rngs::StdRng;
use rand::Rng;
use rayon::iter::IntoParallelIterator;
use rayon::iter::ParallelIterator;
use crate::card::CardQueue;
use crate::card::CardType;
use crate::card::FsrsMemoryState;
use crate::prelude::*;
use crate::scheduler::states::fuzz::constrained_fuzz_bounds;
use crate::scheduler::states::load_balancer::calculate_easy_days_modifiers;
@ -114,6 +121,12 @@ fn create_review_priority_fn(
}
}
pub(crate) fn is_included_card(c: &Card) -> bool {
c.queue != CardQueue::Suspended
&& c.queue != CardQueue::PreviewRepeat
&& c.ctype != CardType::New
}
impl Collection {
pub fn simulate_request_to_config(
&mut self,
@ -126,30 +139,42 @@ impl Collection {
.get_revlog_entries_for_searched_cards_in_card_order()?;
let mut cards = guard.col.storage.all_searched_cards()?;
drop(guard);
fn is_included_card(c: &Card) -> bool {
c.queue != CardQueue::Suspended
&& c.queue != CardQueue::PreviewRepeat
&& c.queue != CardQueue::New
}
// calculate any missing memory state
for c in &mut cards {
if is_included_card(c) && c.memory_state.is_none() {
let original = c.clone();
let new_state = self.compute_memory_state(c.id)?.state;
c.memory_state = new_state.map(Into::into);
self.update_card_inner(c, original, self.usn()?)?;
let fsrs_data = self.compute_memory_state(c.id)?;
c.memory_state = fsrs_data.state.map(Into::into);
c.desired_retention = Some(fsrs_data.desired_retention);
c.decay = Some(fsrs_data.decay);
self.storage.update_card(c)?;
}
}
let days_elapsed = self.timing_today().unwrap().days_elapsed as i32;
let new_cards = cards
.iter()
.filter(|c| c.memory_state.is_none() || c.queue == CardQueue::New)
.filter(|c| c.ctype == CardType::New && c.queue != CardQueue::Suspended)
.count()
+ req.deck_size as usize;
let fsrs = FSRS::new(Some(&req.params))?;
let mut converted_cards = cards
.into_iter()
.filter(is_included_card)
.filter_map(|c| Card::convert(c, days_elapsed))
.filter_map(|c| {
let memory_state = match c.memory_state {
Some(state) => state,
// cards that lack memory states after compute_memory_state have no FSRS items,
// implying a truncated or ignored revlog
None => fsrs
.memory_state_from_sm2(
c.ease_factor(),
c.interval as f32,
req.historical_retention,
)
.ok()?
.into(),
};
Card::convert(c, days_elapsed, memory_state)
})
.collect_vec();
let introduced_today_count = self
.search_cards(&format!("{} introduced:1", &req.search), SortMode::NoOrder)?
@ -214,8 +239,8 @@ impl Collection {
learning_step_transitions: p.learning_step_transitions,
relearning_step_transitions: p.relearning_step_transitions,
state_rating_costs: p.state_rating_costs,
learning_step_count: p.learning_step_count,
relearning_step_count: p.relearning_step_count,
learning_step_count: req.learning_step_count as usize,
relearning_step_count: req.relearning_step_count as usize,
};
Ok((config, converted_cards))
@ -248,20 +273,56 @@ impl Collection {
daily_time_cost: result.cost_per_day,
})
}
pub fn simulate_workload(
&mut self,
req: SimulateFsrsReviewRequest,
) -> Result<SimulateFsrsWorkloadResponse> {
let (config, cards) = self.simulate_request_to_config(&req)?;
let dr_workload = (70u32..=99u32)
.into_par_iter()
.map(|dr| {
let result = simulate(
&config,
&req.params,
dr as f32 / 100.,
None,
Some(cards.clone()),
)?;
Ok((
dr,
(
*result.memorized_cnt_per_day.last().unwrap_or(&0.),
result.cost_per_day.iter().sum::<f32>(),
result.review_cnt_per_day.iter().sum::<usize>() as u32
+ result.learn_cnt_per_day.iter().sum::<usize>() as u32,
),
))
})
.collect::<Result<HashMap<_, _>>>()?;
Ok(SimulateFsrsWorkloadResponse {
memorized: dr_workload.iter().map(|(k, v)| (*k, v.0)).collect(),
cost: dr_workload.iter().map(|(k, v)| (*k, v.1)).collect(),
review_count: dr_workload.iter().map(|(k, v)| (*k, v.2)).collect(),
})
}
}
impl Card {
fn convert(card: Card, days_elapsed: i32) -> Option<fsrs::Card> {
match card.memory_state {
Some(state) => match card.queue {
pub(crate) fn convert(
card: Card,
days_elapsed: i32,
memory_state: FsrsMemoryState,
) -> Option<fsrs::Card> {
match card.queue {
CardQueue::DayLearn | CardQueue::Review => {
let due = card.original_or_current_due();
let relative_due = due - days_elapsed;
let last_date = (relative_due - card.interval as i32).min(0) as f32;
Some(fsrs::Card {
id: card.id.0,
difficulty: state.difficulty,
stability: state.stability,
difficulty: memory_state.difficulty,
stability: memory_state.stability,
last_date,
due: relative_due as f32,
interval: card.interval as f32,
@ -269,21 +330,17 @@ impl Card {
})
}
CardQueue::New => None,
CardQueue::Learn | CardQueue::SchedBuried | CardQueue::UserBuried => {
Some(fsrs::Card {
CardQueue::Learn | CardQueue::SchedBuried | CardQueue::UserBuried => Some(fsrs::Card {
id: card.id.0,
difficulty: state.difficulty,
stability: state.stability,
difficulty: memory_state.difficulty,
stability: memory_state.stability,
last_date: 0.0,
due: 0.0,
interval: card.interval as f32,
lapses: card.lapses,
})
}
}),
CardQueue::PreviewRepeat => None,
CardQueue::Suspended => None,
},
None => None,
}
}
}

View file

@ -16,6 +16,7 @@ use anki_proto::scheduler::FuzzDeltaResponse;
use anki_proto::scheduler::GetOptimalRetentionParametersResponse;
use anki_proto::scheduler::SimulateFsrsReviewRequest;
use anki_proto::scheduler::SimulateFsrsReviewResponse;
use anki_proto::scheduler::SimulateFsrsWorkloadResponse;
use fsrs::ComputeParametersInput;
use fsrs::FSRSItem;
use fsrs::FSRSReview;
@ -283,6 +284,13 @@ impl crate::services::SchedulerService for Collection {
self.simulate_review(input)
}
fn simulate_fsrs_workload(
&mut self,
input: SimulateFsrsReviewRequest,
) -> Result<SimulateFsrsWorkloadResponse> {
self.simulate_workload(input)
}
fn compute_optimal_retention(
&mut self,
input: SimulateFsrsReviewRequest,

View file

@ -57,10 +57,10 @@ const SECOND: f32 = 1.0;
const MINUTE: f32 = 60.0 * SECOND;
const HOUR: f32 = 60.0 * MINUTE;
const DAY: f32 = 24.0 * HOUR;
const MONTH: f32 = 30.417 * DAY; // 365/12 ≈ 30.417
const YEAR: f32 = 365.0 * DAY;
const MONTH: f32 = YEAR / 12.0;
#[derive(Clone, Copy)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub(crate) enum TimespanUnit {
Seconds,
Minutes,
@ -111,6 +111,13 @@ impl Timespan {
}
}
pub fn to_unit(self, unit: TimespanUnit) -> Timespan {
Timespan {
seconds: self.seconds,
unit,
}
}
/// Round seconds and days to integers, otherwise
/// truncates to one decimal place.
pub fn as_rounded_unit(self) -> f32 {

View file

@ -378,9 +378,10 @@ fn card_order_from_sort_column(column: Column, timing: SchedTimingToday) -> Cow<
Column::Stability => "extract_fsrs_variable(c.data, 's') asc".into(),
Column::Difficulty => "extract_fsrs_variable(c.data, 'd') asc".into(),
Column::Retrievability => format!(
"extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {}, {}) asc",
"extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {}, {}, {}) asc",
timing.days_elapsed,
timing.next_day_at.0
timing.next_day_at.0,
timing.now.0,
)
.into(),
}

View file

@ -418,13 +418,13 @@ impl SqlWriter<'_> {
write!(self.sql, "extract_fsrs_variable(c.data, 'd') {op} {d}").unwrap()
}
PropertyKind::Retrievability(r) => {
let (elap, next_day_at) = {
let (elap, next_day_at, now) = {
let timing = self.col.timing_today()?;
(timing.days_elapsed, timing.next_day_at)
(timing.days_elapsed, timing.next_day_at, timing.now)
};
write!(
self.sql,
"extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {elap}, {next_day_at}) {op} {r}"
"extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {elap}, {next_day_at}, {now}) {op} {r}"
)
.unwrap()
}

View file

@ -30,14 +30,24 @@ impl Collection {
let (average_secs, total_secs) = average_and_total_secs_strings(&revlog);
let timing = self.timing_today()?;
let seconds_elapsed = if let Some(last_review_time) = card.last_review_time {
timing.now.elapsed_secs_since(last_review_time) as u32
let last_review_time = if let Some(last_review_time) = card.last_review_time {
last_review_time
} else {
self.storage
let mut new_card = card.clone();
let last_review_time = self
.storage
.time_of_last_review(card.id)?
.map(|ts| timing.now.elapsed_secs_since(ts))
.unwrap_or_default() as u32
.unwrap_or_default();
new_card.last_review_time = Some(last_review_time);
self.storage.update_card(&new_card)?;
last_review_time
};
let seconds_elapsed = timing.now.elapsed_secs_since(last_review_time) as u32;
let fsrs_retrievability = card
.memory_state
.zip(Some(seconds_elapsed))
@ -187,7 +197,7 @@ impl Collection {
}
fn average_and_total_secs_strings(revlog: &[RevlogEntry]) -> (f32, f32) {
let normal_answer_count = revlog.iter().filter(|r| r.button_chosen > 0).count();
let normal_answer_count = revlog.iter().filter(|r| r.has_rating()).count();
let total_secs: f32 = revlog
.iter()
.map(|entry| (entry.taken_millis as f32) / 1000.0)

View file

@ -53,10 +53,7 @@ impl GraphsContext {
self.revlog
.iter()
.filter(|review| {
// not rescheduled/set due date/reset
review.button_chosen > 0
// not cramming
&& (review.review_kind != RevlogReviewKind::Filtered || review.ease_factor != 0)
review.has_rating_and_affects_scheduling()
// cards with an interval ≥ 1 day
&& (review.review_kind == RevlogReviewKind::Review
|| review.last_interval <= -86400

View file

@ -30,10 +30,10 @@ impl GraphsContext {
.or_insert((0.0, 0));
entry.1 += 1;
if let Some(state) = card.memory_state {
let elapsed_days = card.days_since_last_review(&timing).unwrap_or_default();
let r = fsrs.current_retrievability(
let elapsed_seconds = card.seconds_since_last_review(&timing).unwrap_or_default();
let r = fsrs.current_retrievability_seconds(
state.into(),
elapsed_days,
elapsed_seconds,
card.decay.unwrap_or(FSRS5_DEFAULT_DECAY),
);

View file

@ -5,17 +5,18 @@ use anki_i18n::I18n;
use crate::prelude::*;
use crate::scheduler::timespan::Timespan;
use crate::scheduler::timespan::TimespanUnit;
pub fn studied_today(cards: u32, secs: f32, tr: &I18n) -> String {
let span = Timespan::from_secs(secs).natural_span();
let amount = span.as_unit();
let unit = span.unit().as_str();
let unit = std::cmp::min(span.unit(), TimespanUnit::Minutes);
let amount = span.to_unit(unit).as_unit();
let secs_per_card = if cards > 0 {
secs / (cards as f32)
} else {
0.0
};
tr.statistics_studied_today(unit, secs_per_card, amount, cards)
tr.statistics_studied_today(unit.as_str(), secs_per_card, amount, cards)
.into()
}
@ -41,5 +42,9 @@ mod test {
&studied_today(3, 13.0, &tr).replace('\n', " "),
"Studied 3 cards in 13 seconds today (4.33s/card)"
);
assert_eq!(
&studied_today(300, 5400.0, &tr).replace('\n', " "),
"Studied 300 cards in 90 minutes today (18s/card)"
);
}
}

View file

@ -14,6 +14,8 @@ pub(crate) fn order_and_limit_for_search(
) -> String {
let temp_string;
let today = timing.days_elapsed;
let next_day_at = timing.next_day_at.0;
let now = timing.now.0;
let order = match term.order() {
FilteredSearchOrder::OldestReviewedFirst => "(select max(id) from revlog where cid=c.id)",
FilteredSearchOrder::Random => "random()",
@ -29,15 +31,13 @@ pub(crate) fn order_and_limit_for_search(
&temp_string
}
FilteredSearchOrder::RetrievabilityAscending => {
let next_day_at = timing.next_day_at.0;
temp_string =
build_retrievability_query(fsrs, today, next_day_at, SqlSortOrder::Ascending);
build_retrievability_query(fsrs, today, next_day_at, now, SqlSortOrder::Ascending);
&temp_string
}
FilteredSearchOrder::RetrievabilityDescending => {
let next_day_at = timing.next_day_at.0;
temp_string =
build_retrievability_query(fsrs, today, next_day_at, SqlSortOrder::Descending);
build_retrievability_query(fsrs, today, next_day_at, now, SqlSortOrder::Descending);
&temp_string
}
};
@ -49,11 +49,12 @@ fn build_retrievability_query(
fsrs: bool,
today: u32,
next_day_at: i64,
now: i64,
order: SqlSortOrder,
) -> String {
if fsrs {
format!(
"extract_fsrs_relative_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, {today}, ivl, {next_day_at}) {order}"
"extract_fsrs_relative_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, {today}, ivl, {next_day_at}, {now}) {order}"
)
} else {
format!(

View file

@ -1,85 +0,0 @@
WITH searched_revlogs AS (
SELECT *,
RANK() OVER (
PARTITION BY cid
ORDER BY id ASC
) AS rank_num
FROM revlog
WHERE ease > 0
AND cid IN search_cids
ORDER BY id DESC -- Use the last 10_000 reviews
LIMIT 10000
), average_pass AS (
SELECT AVG(time)
FROM searched_revlogs
WHERE ease > 1
AND type = 1
),
lapse_count AS (
SELECT COUNT(time) AS lapse_count
FROM searched_revlogs
WHERE ease = 1
AND type = 1
),
fail_sum AS (
SELECT SUM(time) AS total_fail_time
FROM searched_revlogs
WHERE (
ease = 1
AND type = 1
)
OR type = 2
),
-- (sum(Relearning) + sum(Lapses)) / count(Lapses)
average_fail AS (
SELECT total_fail_time * 1.0 / NULLIF(lapse_count, 0) AS avg_fail_time
FROM fail_sum,
lapse_count
),
-- Can lead to cards with partial learn histories skewing the time
summed_learns AS (
SELECT cid,
SUM(time) AS total_time
FROM searched_revlogs
WHERE searched_revlogs.type = 0
GROUP BY cid
),
average_learn AS (
SELECT AVG(total_time) AS avg_learn_time
FROM summed_learns
),
initial_pass_rate AS (
SELECT AVG(
CASE
WHEN ease > 1 THEN 1.0
ELSE 0.0
END
) AS initial_pass_rate
FROM searched_revlogs
WHERE rank_num = 1
),
pass_cnt AS (
SELECT COUNT(*) AS cnt
FROM searched_revlogs
WHERE ease > 1
AND type = 1
),
fail_cnt AS (
SELECT COUNT(*) AS cnt
FROM searched_revlogs
WHERE ease = 1
AND type = 1
),
learn_cnt AS (
SELECT COUNT(*) AS cnt
FROM searched_revlogs
WHERE type = 0
)
SELECT *
FROM average_pass,
average_fail,
average_learn,
initial_pass_rate,
pass_cnt,
fail_cnt,
learn_cnt;

View file

@ -33,6 +33,7 @@ use crate::decks::DeckKind;
use crate::error::Result;
use crate::notes::NoteId;
use crate::scheduler::congrats::CongratsInfo;
use crate::scheduler::fsrs::memory_state::get_last_revlog_info;
use crate::scheduler::queue::BuryMode;
use crate::scheduler::queue::DueCard;
use crate::scheduler::queue::DueCardKind;
@ -42,15 +43,11 @@ use crate::timestamp::TimestampMillis;
use crate::timestamp::TimestampSecs;
use crate::types::Usn;
#[derive(Debug, Clone, Default)]
pub struct RetentionCosts {
pub average_pass_time_ms: f32,
pub average_fail_time_ms: f32,
pub average_learn_time_ms: f32,
pub initial_pass_rate: f32,
pub pass_count: u32,
pub fail_count: u32,
pub learn_count: u32,
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) struct CardFixStats {
pub new_cards_fixed: usize,
pub other_cards_fixed: usize,
pub last_review_time_fixed: usize,
}
impl FromSql for CardType {
@ -376,7 +373,7 @@ impl super::SqliteStorage {
mtime: TimestampSecs,
usn: Usn,
v1_sched: bool,
) -> Result<(usize, usize)> {
) -> Result<CardFixStats> {
let new_cnt = self
.db
.prepare(include_str!("fix_due_new.sql"))?
@ -401,7 +398,24 @@ impl super::SqliteStorage {
.db
.prepare(include_str!("fix_ordinal.sql"))?
.execute(params![mtime, usn])?;
Ok((new_cnt, other_cnt))
let mut last_review_time_cnt = 0;
let revlog = self.get_all_revlog_entries_in_card_order()?;
let last_revlog_info = get_last_revlog_info(&revlog);
for (card_id, last_revlog_info) in last_revlog_info {
let card = self.get_card(card_id)?;
if let Some(mut card) = card {
if card.ctype != CardType::New && card.last_review_time.is_none() {
card.last_review_time = last_revlog_info.last_reviewed_at;
self.update_card(&card)?;
last_review_time_cnt += 1;
}
}
}
Ok(CardFixStats {
new_cards_fixed: new_cnt,
other_cards_fixed: other_cnt,
last_review_time_fixed: last_review_time_cnt,
})
}
pub(crate) fn delete_orphaned_cards(&self) -> Result<usize> {
@ -759,24 +773,6 @@ impl super::SqliteStorage {
.get(0)?)
}
pub(crate) fn get_costs_for_retention(&self) -> Result<RetentionCosts> {
let mut statement = self
.db
.prepare(include_str!("get_costs_for_retention.sql"))?;
let mut query = statement.query(params![])?;
let row = query.next()?.unwrap();
Ok(RetentionCosts {
average_pass_time_ms: row.get(0).unwrap_or(7000.),
average_fail_time_ms: row.get(1).unwrap_or(23_000.),
average_learn_time_ms: row.get(2).unwrap_or(30_000.),
initial_pass_rate: row.get(3).unwrap_or(0.5),
pass_count: row.get(4).unwrap_or(0),
fail_count: row.get(5).unwrap_or(0),
learn_count: row.get(6).unwrap_or(0),
})
}
#[cfg(test)]
pub(crate) fn get_all_cards(&self) -> Vec<Card> {
self.db
@ -837,8 +833,9 @@ impl fmt::Display for ReviewOrderSubclause {
ReviewOrderSubclause::RetrievabilityFsrs { timing, order } => {
let today = timing.days_elapsed;
let next_day_at = timing.next_day_at.0;
let now = timing.now.0;
temp_string =
format!("extract_fsrs_relative_retrievability(data, case when odue !=0 then odue else due end, {today}, ivl, {next_day_at}) {order}");
format!("extract_fsrs_relative_retrievability(data, case when odue !=0 then odue else due end, {today}, ivl, {next_day_at}, {now}) {order}");
&temp_string
}
ReviewOrderSubclause::Added => "nid asc, ord asc",

View file

@ -310,14 +310,14 @@ fn add_extract_fsrs_variable(db: &Connection) -> rusqlite::Result<()> {
}
/// eg. extract_fsrs_retrievability(card.data, card.due, card.ivl,
/// timing.days_elapsed, timing.next_day_at) -> float | null
/// timing.days_elapsed, timing.next_day_at, timing.now) -> float | null
fn add_extract_fsrs_retrievability(db: &Connection) -> rusqlite::Result<()> {
db.create_scalar_function(
"extract_fsrs_retrievability",
5,
6,
FunctionFlags::SQLITE_DETERMINISTIC,
move |ctx| {
assert_eq!(ctx.len(), 5, "called with unexpected number of arguments");
assert_eq!(ctx.len(), 6, "called with unexpected number of arguments");
let Ok(card_data) = ctx.get_raw(0).as_str() else {
return Ok(None);
};
@ -328,18 +328,18 @@ fn add_extract_fsrs_retrievability(db: &Connection) -> rusqlite::Result<()> {
let Ok(due) = ctx.get_raw(1).as_i64() else {
return Ok(None);
};
let days_elapsed = if let Some(last_review_time) = card_data.last_review_time {
// Use last_review_time to calculate days_elapsed
let Ok(next_day_at) = ctx.get_raw(4).as_i64() else {
let Ok(now) = ctx.get_raw(5).as_i64() else {
return Ok(None);
};
(next_day_at as u32).saturating_sub(last_review_time.0 as u32) / 86_400
let seconds_elapsed = if let Some(last_review_time) = card_data.last_review_time {
now.saturating_sub(last_review_time.0) as u32
} else if due > 365_000 {
// (re)learning card in seconds
let Ok(next_day_at) = ctx.get_raw(4).as_i64() else {
let Ok(ivl) = ctx.get_raw(2).as_i64() else {
return Ok(None);
};
(next_day_at as u32).saturating_sub(due as u32) / 86_400
let last_review_time = due.saturating_sub(ivl);
now.saturating_sub(last_review_time) as u32
} else {
let Ok(ivl) = ctx.get_raw(2).as_i64() else {
return Ok(None);
@ -348,29 +348,32 @@ fn add_extract_fsrs_retrievability(db: &Connection) -> rusqlite::Result<()> {
return Ok(None);
};
let review_day = due.saturating_sub(ivl);
(days_elapsed as u32).saturating_sub(review_day as u32)
days_elapsed.saturating_sub(review_day) as u32 * 86_400
};
let decay = card_data.decay.unwrap_or(FSRS5_DEFAULT_DECAY);
Ok(card_data.memory_state().map(|state| {
FSRS::new(None)
.unwrap()
.current_retrievability(state.into(), days_elapsed, decay)
}))
let retrievability = card_data.memory_state().map(|state| {
FSRS::new(None).unwrap().current_retrievability_seconds(
state.into(),
seconds_elapsed,
decay,
)
});
Ok(retrievability)
},
)
}
/// eg. extract_fsrs_relative_retrievability(card.data, card.due,
/// timing.days_elapsed, card.ivl, timing.next_day_at) -> float | null. The
/// higher the number, the higher the card's retrievability relative to the
/// configured desired retention.
/// timing.days_elapsed, card.ivl, timing.next_day_at, timing.now) -> float |
/// null. The higher the number, the higher the card's retrievability relative
/// to the configured desired retention.
fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result<()> {
db.create_scalar_function(
"extract_fsrs_relative_retrievability",
5,
6,
FunctionFlags::SQLITE_DETERMINISTIC,
move |ctx| {
assert_eq!(ctx.len(), 5, "called with unexpected number of arguments");
assert_eq!(ctx.len(), 6, "called with unexpected number of arguments");
let Ok(due) = ctx.get_raw(1).as_i64() else {
return Ok(None);
@ -381,6 +384,9 @@ fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result
let Ok(next_day_at) = ctx.get_raw(4).as_i64() else {
return Ok(None);
};
let Ok(now) = ctx.get_raw(5).as_i64() else {
return Ok(None);
};
let days_elapsed = if due > 365_000 {
// (re)learning
(next_day_at as u32).saturating_sub(due as u32) / 86_400
@ -402,17 +408,30 @@ fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result
desired_retrievability = desired_retrievability.max(0.0001);
let decay = card_data.decay.unwrap_or(FSRS5_DEFAULT_DECAY);
let days_elapsed = if let Some(last_review_time) =
card_data.last_review_time
{
TimestampSecs(next_day_at).elapsed_days_since(last_review_time) as u32
let seconds_elapsed =
if let Some(last_review_time) = card_data.last_review_time {
now.saturating_sub(last_review_time.0) as u32
} else if due > 365_000 {
// (re)learning card in seconds
let Ok(ivl) = ctx.get_raw(2).as_i64() else {
return Ok(None);
};
let last_review_time = due.saturating_sub(ivl);
now.saturating_sub(last_review_time) as u32
} else {
days_elapsed
let Ok(ivl) = ctx.get_raw(2).as_i64() else {
return Ok(None);
};
let Ok(days_elapsed) = ctx.get_raw(3).as_i64() else {
return Ok(None);
};
let review_day = due.saturating_sub(ivl);
days_elapsed.saturating_sub(review_day) as u32 * 86_400
};
let current_retrievability = FSRS::new(None)
.unwrap()
.current_retrievability(state.into(), days_elapsed, decay)
.current_retrievability_seconds(state.into(), seconds_elapsed, decay)
.max(0.0001);
return Ok(Some(

View file

@ -12,14 +12,20 @@ impl Collection {
.map(component_to_regex)
.collect::<Result<_, _>>()?;
let mut tags = vec![];
let mut priority = vec![];
self.storage.get_tags_by_predicate(|tag| {
if tags.len() <= limit && filters_match(&filters, tag) {
tags.push(tag.to_string());
if priority.len() + tags.len() <= limit {
match filters_match(&filters, tag) {
Some(true) => priority.push(tag.to_string()),
Some(_) => tags.push(tag.to_string()),
_ => {}
}
}
// we only need the tag name
false
})?;
Ok(tags)
priority.append(&mut tags);
Ok(priority)
}
}
@ -27,20 +33,26 @@ fn component_to_regex(component: &str) -> Result<Regex> {
Regex::new(&format!("(?i){}", regex::escape(component))).map_err(Into::into)
}
fn filters_match(filters: &[Regex], tag: &str) -> bool {
/// Returns None if tag wasn't a match, otherwise whether it was a consecutive
/// prefix match
fn filters_match(filters: &[Regex], tag: &str) -> Option<bool> {
let mut remaining_tag_components = tag.split("::");
let mut is_prefix = true;
'outer: for filter in filters {
loop {
if let Some(component) = remaining_tag_components.next() {
if filter.is_match(component) {
if let Some(m) = filter.find(component) {
is_prefix &= m.start() == 0;
continue 'outer;
} else {
is_prefix = false;
}
} else {
return false;
return None;
}
}
}
true
Some(is_prefix)
}
#[cfg(test)]
@ -50,28 +62,32 @@ mod test {
#[test]
fn matching() -> Result<()> {
let filters = &[component_to_regex("b")?];
assert!(filters_match(filters, "ABC"));
assert!(filters_match(filters, "ABC::def"));
assert!(filters_match(filters, "def::abc"));
assert!(!filters_match(filters, "def"));
assert!(filters_match(filters, "ABC").is_some());
assert!(filters_match(filters, "ABC::def").is_some());
assert!(filters_match(filters, "def::abc").is_some());
assert!(filters_match(filters, "def").is_none());
let filters = &[component_to_regex("b")?, component_to_regex("E")?];
assert!(!filters_match(filters, "ABC"));
assert!(filters_match(filters, "ABC::def"));
assert!(!filters_match(filters, "def::abc"));
assert!(!filters_match(filters, "def"));
assert!(filters_match(filters, "ABC").is_none());
assert!(filters_match(filters, "ABC::def").is_some());
assert!(filters_match(filters, "def::abc").is_none());
assert!(filters_match(filters, "def").is_none());
let filters = &[
component_to_regex("a")?,
component_to_regex("c")?,
component_to_regex("e")?,
];
assert!(!filters_match(filters, "ace"));
assert!(!filters_match(filters, "a::c"));
assert!(!filters_match(filters, "c::e"));
assert!(filters_match(filters, "a::c::e"));
assert!(filters_match(filters, "a::b::c::d::e"));
assert!(filters_match(filters, "1::a::b::c::d::e::f"));
assert!(filters_match(filters, "ace").is_none());
assert!(filters_match(filters, "a::c").is_none());
assert!(filters_match(filters, "c::e").is_none());
assert!(filters_match(filters, "a::c::e").is_some());
assert!(filters_match(filters, "a::b::c::d::e").is_some());
assert!(filters_match(filters, "1::a::b::c::d::e::f").is_some());
assert_eq!(filters_match(filters, "a1::c2::e3"), Some(true));
assert_eq!(filters_match(filters, "a1::c2::?::e4"), Some(false));
assert_eq!(filters_match(filters, "a1::c2::3e"), Some(false));
Ok(())
}

View file

@ -93,6 +93,10 @@ impl TimestampMillis {
pub fn adding_secs(self, secs: i64) -> Self {
Self(self.0 + secs * 1000)
}
pub fn elapsed_millis(self) -> u64 {
(Self::now().0 - self.0).max(0) as u64
}
}
fn elapsed() -> time::Duration {

View file

@ -12,5 +12,6 @@ anki_io.workspace = true
anki_process.workspace = true
anyhow.workspace = true
camino.workspace = true
serde_json.workspace = true
walkdir.workspace = true
which.workspace = true

View file

@ -2,6 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::cell::LazyCell;
use std::collections::BTreeMap;
use std::collections::HashSet;
use std::env;
use std::fs;
@ -148,7 +149,7 @@ impl LintContext {
if last_author == "49699333+dependabot[bot]@users.noreply.github.com" {
println!("Dependabot whitelisted.");
return Ok(());
std::process::exit(0);
} else if all_contributors.contains(last_author.as_str()) {
return Ok(());
}
@ -267,5 +268,16 @@ fn generate_licences() -> Result<String> {
"--manifest-path",
"rslib/Cargo.toml",
])?;
Ok(output.stdout)
let licenses: Vec<BTreeMap<String, serde_json::Value>> = serde_json::from_str(&output.stdout)?;
let filtered: Vec<BTreeMap<String, serde_json::Value>> = licenses
.into_iter()
.map(|mut entry| {
entry.remove("version");
entry
})
.collect();
Ok(serde_json::to_string_pretty(&filtered)?)
}

View file

@ -4,7 +4,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
-->
<script lang="ts">
import * as tr from "@generated/ftl";
import { isApplePlatform } from "@tslib/platform";
import { chromiumVersion, isApplePlatform } from "@tslib/platform";
import { getPlatformString } from "@tslib/shortcuts";
import { createEventDispatcher } from "svelte";
import { get } from "svelte/store";
@ -22,9 +22,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
const { focusedInput, fields } = noteEditorContext.get();
// Workaround for Cmd+Option+Shift+C not working on macOS. The keyup approach works
// on Linux as well, but fails on Windows.
const event = isApplePlatform() ? "keyup" : "keydown";
// Workaround for Cmd+Option+Shift+C not working on macOS on older Chromium
// versions.
const chromiumVer = chromiumVersion();
const event =
isApplePlatform() && chromiumVer != null && chromiumVer <= 112
? "keyup"
: "keydown";
const clozePattern = /\{\{c(\d+)::/gu;
function getCurrentHighestCloze(increment: boolean): number {

View file

@ -12,7 +12,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
export { className as class };
export let title: string;
export let onTitleClick: ((_e: MouseEvent | KeyboardEvent) => void) | null = null;
</script>
<div
@ -25,22 +24,9 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
style:--container-margin="0"
>
<div class="position-relative">
{#if onTitleClick}
<span
on:click={onTitleClick}
on:keydown={onTitleClick}
role="button"
tabindex="0"
>
<h1>
{title}
</h1>
</span>
{:else}
<h1>
{title}
</h1>
{/if}
<div class="help-badge position-absolute" class:rtl>
<slot name="tooltip" />
</div>

View file

@ -24,6 +24,7 @@ export const HelpPage = {
displayOrder: "https://docs.ankiweb.net/deck-options.html#display-order",
maximumReviewsday: "https://docs.ankiweb.net/deck-options.html#maximum-reviewsday",
newCardsday: "https://docs.ankiweb.net/deck-options.html#new-cardsday",
limitsFromTop: "https://docs.ankiweb.net/deck-options.html#limits-start-from-top",
dailyLimits: "https://docs.ankiweb.net/deck-options.html#daily-limits",
audio: "https://docs.ankiweb.net/deck-options.html#audio",
fsrs: "http://docs.ankiweb.net/deck-options.html#fsrs",

View file

@ -13,3 +13,20 @@ export function isApplePlatform(): boolean {
export function isDesktop(): boolean {
return !(/iphone|ipad|ipod|android/i.test(window.navigator.userAgent));
}
export function chromiumVersion(): number | null {
const userAgent = window.navigator.userAgent;
// Check if it's a Chromium-based browser (Chrome, Edge, Opera, etc.)
// but exclude Safari which also contains "Chrome" in its user agent
if (userAgent.includes("Safari") && !userAgent.includes("Chrome")) {
return null; // Safari
}
const chromeMatch = userAgent.match(/Chrome\/(\d+)/);
if (chromeMatch) {
return parseInt(chromeMatch[1], 10);
}
return null; // Not a Chromium-based browser
}

View file

@ -89,7 +89,7 @@ export function naturalWholeUnit(secs: number): TimespanUnit {
}
export function studiedToday(cards: number, secs: number): string {
const unit = naturalUnit(secs);
const unit = Math.min(naturalUnit(secs), TimespanUnit.Minutes);
const amount = unitAmount(unit, secs);
const name = unitName(unit);

View file

@ -3,9 +3,9 @@ Copyright: Ankitects Pty Ltd and contributors
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
-->
<script lang="ts">
import { page } from "$app/stores";
import { page } from "$app/state";
$: message = $page.error!.message;
$: message = page.error!.message;
</script>
{message}

View file

@ -3,7 +3,7 @@ Copyright: Ankitects Pty Ltd and contributors
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
-->
<script lang="ts">
import { page } from "$app/stores";
import { page } from "$app/state";
import CardInfo from "../CardInfo.svelte";
import type { PageData } from "./$types";
@ -11,7 +11,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
export let data: PageData;
const showRevlog = $page.url.searchParams.get("revlog") !== "0";
const showRevlog = page.url.searchParams.get("revlog") !== "0";
globalThis.anki ||= {};
globalThis.anki.updateCard = async (card_id: string): Promise<void> => {

View file

@ -3,7 +3,7 @@ Copyright: Ankitects Pty Ltd and contributors
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
-->
<script lang="ts">
import { page } from "$app/stores";
import { page } from "$app/state";
import CardInfo from "../../CardInfo.svelte";
import type { PageData } from "./$types";
@ -11,8 +11,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
export let data: PageData;
const showRevlog = $page.url.searchParams.get("revlog") !== "0";
const showCurve = $page.url.searchParams.get("curve") !== "0";
const showRevlog = page.url.searchParams.get("revlog") !== "0";
const showCurve = page.url.searchParams.get("curve") !== "0";
globalThis.anki ||= {};
globalThis.anki.updateCardInfos = async (card_id: string): Promise<void> => {

View file

@ -140,7 +140,7 @@
applyAllParentLimits: {
title: tr.deckConfigApplyAllParentLimits(),
help: applyAllParentLimitsHelp,
url: HelpPage.DeckOptions.newCardsday,
url: HelpPage.DeckOptions.limitsFromTop,
global: true,
},
};

View file

@ -85,6 +85,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
}
.easy-days-settings input[type="range"] {
width: 100%;
cursor: pointer;
}
.day {

View file

@ -21,7 +21,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import SwitchRow from "$lib/components/SwitchRow.svelte";
import GlobalLabel from "./GlobalLabel.svelte";
import { commitEditing, fsrsParams, type DeckOptionsState } from "./lib";
import { commitEditing, fsrsParams, type DeckOptionsState, ValueTab } from "./lib";
import SpinBoxFloatRow from "./SpinBoxFloatRow.svelte";
import Warning from "./Warning.svelte";
import ParamsInputRow from "./ParamsInputRow.svelte";
@ -29,9 +29,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import SimulatorModal from "./SimulatorModal.svelte";
import {
GetRetentionWorkloadRequest,
type GetRetentionWorkloadResponse,
UpdateDeckConfigsMode,
} from "@generated/anki/deck_config_pb";
import type Modal from "bootstrap/js/dist/modal";
import TabbedValue from "./TabbedValue.svelte";
import Item from "$lib/components/Item.svelte";
import DynamicallySlottable from "$lib/components/DynamicallySlottable.svelte";
export let state: DeckOptionsState;
export let openHelpModal: (String) => void;
@ -42,13 +46,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
const defaults = state.defaults;
const fsrsReschedule = state.fsrsReschedule;
const daysSinceLastOptimization = state.daysSinceLastOptimization;
const limits = state.deckLimits;
$: lastOptimizationWarning =
$daysSinceLastOptimization > 30 ? tr.deckConfigTimeToOptimize() : "";
let desiredRetentionFocused = false;
let desiredRetentionEverFocused = false;
let optimized = false;
const startingDesiredRetention = $config.desiredRetention.toFixed(2);
$: if (desiredRetentionFocused) {
desiredRetentionEverFocused = true;
}
@ -63,28 +67,41 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
$: computing = computingParams || checkingParams;
$: defaultparamSearch = `preset:"${state.getCurrentNameForSearch()}" -is:suspended`;
$: roundedRetention = Number($config.desiredRetention.toFixed(2));
$: roundedRetention = Number(effectiveDesiredRetention.toFixed(2));
$: desiredRetentionWarning = getRetentionLongShortWarning(roundedRetention);
let timeoutId: ReturnType<typeof setTimeout> | undefined = undefined;
const WORKLOAD_UPDATE_DELAY_MS = 100;
let desiredRetentionChangeInfo = "";
$: {
clearTimeout(timeoutId);
if (showDesiredRetentionTooltip) {
timeoutId = setTimeout(() => {
$: if (showDesiredRetentionTooltip) {
getRetentionChangeInfo(roundedRetention, fsrsParams($config));
}, WORKLOAD_UPDATE_DELAY_MS);
} else {
desiredRetentionChangeInfo = "";
}
}
$: retentionWarningClass = getRetentionWarningClass(roundedRetention);
$: newCardsIgnoreReviewLimit = state.newCardsIgnoreReviewLimit;
// Create tabs for desired retention
const desiredRetentionTabs: ValueTab[] = [
new ValueTab(
tr.deckConfigSharedPreset(),
$config.desiredRetention,
(value) => ($config.desiredRetention = value!),
$config.desiredRetention,
null,
),
new ValueTab(
tr.deckConfigDeckOnly(),
$limits.desiredRetention ?? null,
(value) => ($limits.desiredRetention = value ?? undefined),
null,
null,
),
];
// Get the effective desired retention value (deck-specific if set, otherwise config default)
let effectiveDesiredRetention =
$limits.desiredRetention ?? $config.desiredRetention;
const startingDesiredRetention = effectiveDesiredRetention.toFixed(2);
$: simulateFsrsRequest = new SimulateFsrsReviewRequest({
params: fsrsParams($config),
desiredRetention: $config.desiredRetention,
@ -95,6 +112,9 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
newCardsIgnoreReviewLimit: $newCardsIgnoreReviewLimit,
easyDaysPercentages: $config.easyDaysPercentages,
reviewOrder: $config.reviewOrder,
historicalRetention: $config.historicalRetention,
learningStepCount: $config.learnSteps.length,
relearningStepCount: $config.relearnSteps.length,
});
const DESIRED_RETENTION_LOW_THRESHOLD = 0.8;
@ -110,21 +130,37 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
}
}
let retentionWorkloadInfo: undefined | Promise<GetRetentionWorkloadResponse> =
undefined;
let lastParams = [...fsrsParams($config)];
async function getRetentionChangeInfo(retention: number, params: number[]) {
if (+startingDesiredRetention == roundedRetention) {
desiredRetentionChangeInfo = tr.deckConfigWorkloadFactorUnchanged();
return;
}
if (
// If the cache is empty and a request has not yet been made to fill it
!retentionWorkloadInfo ||
// If the parameters have been changed
lastParams.toString() !== params.toString()
) {
const request = new GetRetentionWorkloadRequest({
w: params,
search: defaultparamSearch,
before: +startingDesiredRetention,
after: retention,
});
const resp = await getRetentionWorkload(request);
lastParams = [...params];
retentionWorkloadInfo = getRetentionWorkload(request);
}
const previous = +startingDesiredRetention * 100;
const after = retention * 100;
const resp = await retentionWorkloadInfo;
const factor = resp.costs[after] / resp.costs[previous];
desiredRetentionChangeInfo = tr.deckConfigWorkloadFactorChange({
factor: resp.factor.toFixed(2),
previousDr: (+startingDesiredRetention * 100).toString(),
factor: factor.toFixed(2),
previousDr: previous.toString(),
});
}
@ -184,29 +220,34 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
healthCheck: $healthCheck,
});
const already_optimal =
const alreadyOptimal =
(params.length &&
params.every(
(n, i) => n.toFixed(4) === resp.params[i].toFixed(4),
)) ||
resp.params.length === 0;
let healthCheckMessage = "";
if (resp.healthCheckPassed !== undefined) {
if (resp.healthCheckPassed) {
setTimeout(() => alert(tr.deckConfigFsrsGoodFit()), 200);
} else {
setTimeout(
() => alert(tr.deckConfigFsrsBadFitWarning()),
200,
);
healthCheckMessage = resp.healthCheckPassed
? tr.deckConfigFsrsGoodFit()
: tr.deckConfigFsrsBadFitWarning();
}
} else if (already_optimal) {
const msg = resp.fsrsItems
let alreadyOptimalMessage = "";
if (alreadyOptimal) {
alreadyOptimalMessage = resp.fsrsItems
? tr.deckConfigFsrsParamsOptimal()
: tr.deckConfigFsrsParamsNoReviews();
setTimeout(() => alert(msg), 200);
}
if (!already_optimal) {
const message = [alreadyOptimalMessage, healthCheckMessage]
.filter((a) => a)
.join("\n\n");
if (message) {
setTimeout(() => alert(message), 200);
}
if (!alreadyOptimal) {
$config.fsrsParams6 = resp.params;
setTimeout(() => {
optimized = true;
@ -298,20 +339,40 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
}
let simulatorModal: Modal;
let workloadModal: Modal;
</script>
<SpinBoxFloatRow
bind:value={$config.desiredRetention}
<DynamicallySlottable slotHost={Item} api={{}}>
<Item>
<SpinBoxFloatRow
bind:value={effectiveDesiredRetention}
defaultValue={defaults.desiredRetention}
min={0.7}
max={0.99}
percentage={true}
bind:focused={desiredRetentionFocused}
>
>
<TabbedValue
slot="tabs"
tabs={desiredRetentionTabs}
bind:value={effectiveDesiredRetention}
/>
<SettingTitle on:click={() => openHelpModal("desiredRetention")}>
{tr.deckConfigDesiredRetention()}
</SettingTitle>
</SpinBoxFloatRow>
</SpinBoxFloatRow>
</Item>
</DynamicallySlottable>
<button
class="btn btn-primary"
on:click={() => {
simulateFsrsRequest.reviewLimit = 9999;
workloadModal?.show();
}}
>
{tr.deckConfigFsrsDesiredRetentionHelpMeDecideExperimental()}
</button>
<Warning warning={desiredRetentionChangeInfo} className={"alert-info two-line"} />
<Warning warning={desiredRetentionWarning} className={retentionWarningClass} />
@ -408,6 +469,16 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
{onPresetChange}
/>
<SimulatorModal
bind:modal={workloadModal}
workload
{state}
{simulateFsrsRequest}
{computing}
{openHelpModal}
{onPresetChange}
/>
<style>
.btn {
margin-bottom: 0.375rem;

View file

@ -13,15 +13,25 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import TableData from "../graphs/TableData.svelte";
import InputBox from "../graphs/InputBox.svelte";
import { defaultGraphBounds, type TableDatum } from "../graphs/graph-helpers";
import { SimulateSubgraph, type Point } from "../graphs/simulator";
import {
SimulateSubgraph,
SimulateWorkloadSubgraph,
type Point,
type WorkloadPoint,
} from "../graphs/simulator";
import * as tr from "@generated/ftl";
import { renderSimulationChart } from "../graphs/simulator";
import { computeOptimalRetention, simulateFsrsReview } from "@generated/backend";
import { renderSimulationChart, renderWorkloadChart } from "../graphs/simulator";
import {
computeOptimalRetention,
simulateFsrsReview,
simulateFsrsWorkload,
} from "@generated/backend";
import { runWithBackendProgress } from "@tslib/progress";
import type {
ComputeOptimalRetentionResponse,
SimulateFsrsReviewRequest,
SimulateFsrsReviewResponse,
SimulateFsrsWorkloadResponse,
} from "@generated/anki/scheduler_pb";
import type { DeckOptionsState } from "./lib";
import SwitchRow from "$lib/components/SwitchRow.svelte";
@ -40,9 +50,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
export let computing: boolean;
export let openHelpModal: (key: string) => void;
export let onPresetChange: () => void;
/** Do not modify this once set */
export let workload: boolean = false;
const config = state.currentConfig;
let simulateSubgraph: SimulateSubgraph = SimulateSubgraph.count;
let simulateWorkloadSubgraph: SimulateWorkloadSubgraph =
SimulateWorkloadSubgraph.ratio;
let tableData: TableDatum[] = [];
let simulating: boolean = false;
const fsrs = state.fsrs;
@ -50,7 +64,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
let svg: HTMLElement | SVGElement | null = null;
let simulationNumber = 0;
let points: Point[] = [];
let points: (WorkloadPoint | Point)[] = [];
const newCardsIgnoreReviewLimit = state.newCardsIgnoreReviewLimit;
let smooth = true;
let suspendLeeches = $config.leechAction == DeckConfig_Config_LeechAction.SUSPEND;
@ -177,6 +191,43 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
}
}
async function simulateWorkload(): Promise<void> {
let resp: SimulateFsrsWorkloadResponse | undefined;
updateRequest();
try {
await runWithBackendProgress(
async () => {
simulating = true;
resp = await simulateFsrsWorkload(simulateFsrsRequest);
},
() => {},
);
} finally {
simulating = false;
if (resp) {
simulationNumber += 1;
points = points.concat(
Object.entries(resp.memorized).map(([dr, v]) => ({
x: parseInt(dr),
timeCost: resp!.cost[dr],
memorized: v,
count: resp!.reviewCount[dr],
label: simulationNumber,
learnSpan: simulateFsrsRequest.daysToSimulate,
})),
);
tableData = renderWorkloadChart(
svg as SVGElement,
bounds,
points as WorkloadPoint[],
simulateWorkloadSubgraph,
);
}
}
}
function clearSimulation() {
points = points.filter((p) => p.label !== simulationNumber);
simulationNumber = Math.max(0, simulationNumber - 1);
@ -188,6 +239,25 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
);
}
function saveConfigToPreset() {
if (confirm(tr.deckConfigSaveOptionsToPresetConfirm())) {
$config.newPerDay = simulateFsrsRequest.newLimit;
$config.reviewsPerDay = simulateFsrsRequest.reviewLimit;
$config.maximumReviewInterval = simulateFsrsRequest.maxInterval;
if (!workload) {
$config.desiredRetention = simulateFsrsRequest.desiredRetention;
}
$newCardsIgnoreReviewLimit = simulateFsrsRequest.newCardsIgnoreReviewLimit;
$config.reviewOrder = simulateFsrsRequest.reviewOrder;
$config.leechAction = suspendLeeches
? DeckConfig_Config_LeechAction.SUSPEND
: DeckConfig_Config_LeechAction.TAG_ONLY;
$config.leechThreshold = leechThreshold;
$config.easyDaysPercentages = [...easyDayPercentages];
onPresetChange();
}
}
$: if (svg) {
let pointsToRender = points;
if (smooth) {
@ -225,11 +295,14 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
});
}
tableData = renderSimulationChart(
const render_function = workload ? renderWorkloadChart : renderSimulationChart;
tableData = render_function(
svg as SVGElement,
bounds,
pointsToRender,
simulateSubgraph,
// This cast shouldn't matter because we aren't switching between modes in the same modal
pointsToRender as WorkloadPoint[],
(workload ? simulateWorkloadSubgraph : simulateSubgraph) as any as never,
);
}
@ -252,7 +325,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
<div class="modal-dialog modal-xl">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">{tr.deckConfigFsrsSimulatorExperimental()}</h5>
<h5 class="modal-title">
{#if workload}
{tr.deckConfigFsrsSimulateDesiredRetentionExperimental()}
{:else}
{tr.deckConfigFsrsSimulatorExperimental()}
{/if}
</h5>
<button
type="button"
class="btn-close"
@ -278,6 +357,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
</SettingTitle>
</SpinBoxRow>
{#if !workload}
<SpinBoxFloatRow
bind:value={simulateFsrsRequest.desiredRetention}
defaultValue={$config.desiredRetention}
@ -285,10 +365,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
max={0.99}
percentage={true}
>
<SettingTitle on:click={() => openHelpModal("desiredRetention")}>
<SettingTitle
on:click={() => openHelpModal("desiredRetention")}
>
{tr.deckConfigDesiredRetention()}
</SettingTitle>
</SpinBoxFloatRow>
{/if}
<SpinBoxRow
bind:value={simulateFsrsRequest.newLimit}
@ -421,10 +504,12 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
{/if}
</details>
</div>
<div>
<button
class="btn {computing ? 'btn-warning' : 'btn-primary'}"
disabled={computing}
on:click={simulateFsrs}
on:click={workload ? simulateWorkload : simulateFsrs}
>
{tr.deckConfigSimulate()}
</button>
@ -440,25 +525,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
<button
class="btn {computing ? 'btn-warning' : 'btn-primary'}"
disabled={computing}
on:click={() => {
if (confirm(tr.deckConfigSaveOptionsToPresetConfirm())) {
$config.newPerDay = simulateFsrsRequest.newLimit;
$config.reviewsPerDay = simulateFsrsRequest.reviewLimit;
$config.maximumReviewInterval =
simulateFsrsRequest.maxInterval;
$config.desiredRetention =
simulateFsrsRequest.desiredRetention;
$newCardsIgnoreReviewLimit =
simulateFsrsRequest.newCardsIgnoreReviewLimit;
$config.reviewOrder = simulateFsrsRequest.reviewOrder;
$config.leechAction = suspendLeeches
? DeckConfig_Config_LeechAction.SUSPEND
: DeckConfig_Config_LeechAction.TAG_ONLY;
$config.leechThreshold = leechThreshold;
$config.easyDaysPercentages = [...easyDayPercentages];
onPresetChange();
}
}}
on:click={saveConfigToPreset}
>
{tr.deckConfigSaveOptionsToPreset()}
</button>
@ -466,10 +533,12 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
{#if processing}
{tr.actionsProcessing()}
{/if}
</div>
<Graph>
<div class="radio-group">
<InputBox>
{#if !workload}
<label>
<input
type="radio"
@ -494,6 +563,40 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
/>
{tr.deckConfigFsrsSimulatorRadioMemorized()}
</label>
{:else}
<label>
<input
type="radio"
value={SimulateWorkloadSubgraph.ratio}
bind:group={simulateWorkloadSubgraph}
/>
{tr.deckConfigFsrsSimulatorRadioRatio()}
</label>
<label>
<input
type="radio"
value={SimulateWorkloadSubgraph.count}
bind:group={simulateWorkloadSubgraph}
/>
{tr.deckConfigFsrsSimulatorRadioCount()}
</label>
<label>
<input
type="radio"
value={SimulateWorkloadSubgraph.time}
bind:group={simulateWorkloadSubgraph}
/>
{tr.statisticsReviewsTimeCheckbox()}
</label>
<label>
<input
type="radio"
value={SimulateWorkloadSubgraph.memorized}
bind:group={simulateWorkloadSubgraph}
/>
{tr.deckConfigFsrsSimulatorRadioMemorized()}
</label>
{/if}
</InputBox>
</div>
@ -524,7 +627,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
.svg-container {
width: 100%;
max-height: calc(100vh - 400px); /* Account for modal header, controls, etc */
/* Account for modal header, controls, etc */
max-height: max(calc(100vh - 400px), 200px);
aspect-ratio: 600 / 250;
display: flex;
align-items: center;

View file

@ -23,9 +23,12 @@
<slot />
</Col>
<Col --col-size={6} breakpoint="xs">
<Row class="flex-grow-1">
<slot name="tabs" />
<ConfigInput>
<SpinBox bind:value {min} {max} {step} {percentage} bind:focused />
<RevertButton slot="revert" bind:value {defaultValue} />
</ConfigInput>
</Row>
</Col>
</Row>

View file

@ -55,7 +55,10 @@
width: 100%;
display: flex;
flex-wrap: nowrap;
&:has(li:nth-child(3)) {
justify-content: space-between;
}
justify-content: space-around;
padding-inline: 0;
margin-bottom: 0.5rem;
list-style: none;

View file

@ -8,7 +8,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
// When title is null (default), the graph is inlined, not having TitledContainer wrapper.
export let title: string | null = null;
export let subtitle: string | null = null;
export let onTitleClick: ((_e: MouseEvent | KeyboardEvent) => void) | null = null;
</script>
{#if title == null}
@ -19,8 +18,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
<slot />
</div>
{:else}
<TitledContainer class="d-flex flex-column" {title} {onTitleClick}>
<slot slot="tooltip" name="tooltip"></slot>
<TitledContainer class="d-flex flex-column" {title}>
<slot name="tooltip" slot="tooltip"></slot>
<div class="graph d-flex flex-grow-1 flex-column justify-content-center">
{#if subtitle}
<div class="subtitle">{subtitle}</div>

View file

@ -47,6 +47,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
white-space: nowrap;
padding: 15px;
border-radius: 5px;
font-family: inherit;
font-size: 15px;
opacity: 0;
pointer-events: none;

View file

@ -57,22 +57,29 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
const title = tr.statisticsTrueRetentionTitle();
const subtitle = tr.statisticsTrueRetentionSubtitle();
const onTitleClick = () => {
const onHelpClick = () => {
openHelpModal(Object.keys(retentionHelp).indexOf("trueRetention"));
};
</script>
<Graph {title} {subtitle} {onTitleClick}>
<Graph {title} {subtitle}>
<div
slot="tooltip"
onclick={onHelpClick}
onkeydown={onHelpClick}
role="button"
tabindex="-1"
>
<HelpModal
title={tr.statisticsTrueRetentionTitle()}
url={HelpPage.DeckOptions.fsrs}
slot="tooltip"
{helpSections}
on:mount={(e) => {
modal = e.detail.modal;
carousel = e.detail.carousel;
}}
/>
</div>
<InputBox>
<label>
<input type="radio" bind:group={mode} value={DisplayMode.Young} />

View file

@ -18,8 +18,8 @@ import {
bin,
cumsum,
curveBasis,
interpolateBlues,
interpolateGreens,
interpolateOranges,
interpolatePurples,
interpolateReds,
max,
@ -181,7 +181,7 @@ export function renderReviews(
const reds = scaleSequential((n) => interpolateReds(cappedRange(n)!)).domain(
x.domain() as any,
);
const blues = scaleSequential((n) => interpolateBlues(cappedRange(n)!)).domain(
const oranges = scaleSequential((n) => interpolateOranges(cappedRange(n)!)).domain(
x.domain() as any,
);
const purples = scaleSequential((n) => interpolatePurples(cappedRange(n)!)).domain(
@ -195,7 +195,7 @@ export function renderReviews(
case BinIndex.Young:
return lighterGreens;
case BinIndex.Learn:
return blues;
return oranges;
case BinIndex.Relearn:
return reds;
case BinIndex.Filtered:

View file

@ -1,7 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import { localizedDate } from "@tslib/i18n";
import { createLocaleNumberFormat, localizedDate } from "@tslib/i18n";
import {
axisBottom,
axisLeft,
@ -31,50 +31,94 @@ export interface Point {
label: number;
}
export type WorkloadPoint = Point & {
learnSpan: number;
};
export enum SimulateSubgraph {
time,
count,
memorized,
}
export enum SimulateWorkloadSubgraph {
ratio,
time,
count,
memorized,
}
export function renderWorkloadChart(
svgElem: SVGElement,
bounds: GraphBounds,
data: WorkloadPoint[],
subgraph: SimulateWorkloadSubgraph,
) {
const xMin = 70;
const xMax = 99;
const x = scaleLinear()
.domain([xMin, xMax])
.range([bounds.marginLeft, bounds.width - bounds.marginRight]);
const subgraph_data = ({
[SimulateWorkloadSubgraph.ratio]: data.map(d => ({ ...d, y: d.timeCost / d.memorized })),
[SimulateWorkloadSubgraph.time]: data.map(d => ({ ...d, y: d.timeCost / d.learnSpan })),
[SimulateWorkloadSubgraph.count]: data.map(d => ({ ...d, y: d.count / d.learnSpan })),
[SimulateWorkloadSubgraph.memorized]: data.map(d => ({ ...d, y: d.memorized })),
})[subgraph];
const yTickFormat = (n: number): string => {
return subgraph == SimulateWorkloadSubgraph.time || subgraph == SimulateWorkloadSubgraph.ratio
? timeSpan(n, true)
: n.toString();
};
const formatter = createLocaleNumberFormat({
style: "percent",
minimumFractionDigits: 0,
maximumFractionDigits: 0,
});
const xTickFormat = (n: number) => formatter.format(n / 100);
const formatY: (value: number) => string = ({
[SimulateWorkloadSubgraph.ratio]: (value: number) =>
tr.deckConfigFsrsSimulatorRatioTooltip({ time: timeSpan(value) }),
[SimulateWorkloadSubgraph.time]: (value: number) =>
tr.statisticsMinutesPerDay({ count: parseFloat((value / 60).toPrecision(2)) }),
[SimulateWorkloadSubgraph.count]: (value: number) => tr.statisticsReviewsPerDay({ count: Math.round(value) }),
[SimulateWorkloadSubgraph.memorized]: (value: number) =>
tr.statisticsMemorized({ memorized: Math.round(value).toFixed(0) }),
})[subgraph];
function formatX(dr: number) {
return `${tr.deckConfigDesiredRetention()}: ${xTickFormat(dr)}<br>`;
}
return _renderSimulationChart(
svgElem,
bounds,
subgraph_data,
x,
formatY,
formatX,
(_e: MouseEvent, _d: number) => undefined,
yTickFormat,
xTickFormat,
);
}
export function renderSimulationChart(
svgElem: SVGElement,
bounds: GraphBounds,
data: Point[],
subgraph: SimulateSubgraph,
): TableDatum[] {
const svg = select(svgElem);
svg.selectAll(".lines").remove();
svg.selectAll(".hover-columns").remove();
svg.selectAll(".focus-line").remove();
svg.selectAll(".legend").remove();
if (data.length == 0) {
setDataAvailable(svg, false);
return [];
}
const trans = svg.transition().duration(600) as any;
// Prepare data
const today = new Date();
const convertedData = data.map(d => ({
...d,
date: new Date(today.getTime() + d.x * 24 * 60 * 60 * 1000),
x: new Date(today.getTime() + d.x * 24 * 60 * 60 * 1000),
}));
const xMin = today;
const xMax = max(convertedData, d => d.date);
const x = scaleTime()
.domain([xMin, xMax!])
.range([bounds.marginLeft, bounds.width - bounds.marginRight]);
svg.select<SVGGElement>(".x-ticks")
.call((selection) => selection.transition(trans).call(axisBottom(x).ticks(7).tickSizeOuter(0)))
.attr("direction", "ltr");
// y scale
const yTickFormat = (n: number): string => {
return subgraph == SimulateSubgraph.time ? timeSpan(n, true) : n.toString();
};
const subgraph_data = ({
[SimulateSubgraph.count]: convertedData.map(d => ({ ...d, y: d.count })),
@ -82,6 +126,94 @@ export function renderSimulationChart(
[SimulateSubgraph.memorized]: convertedData.map(d => ({ ...d, y: d.memorized })),
})[subgraph];
const xMin = today;
const xMax = max(subgraph_data, d => d.x);
const x = scaleTime()
.domain([xMin, xMax!])
.range([bounds.marginLeft, bounds.width - bounds.marginRight]);
const yTickFormat = (n: number): string => {
return subgraph == SimulateSubgraph.time ? timeSpan(n, true) : n.toString();
};
const formatY: (value: number) => string = ({
[SimulateSubgraph.time]: timeSpan,
[SimulateSubgraph.count]: (value: number) => tr.statisticsReviews({ reviews: Math.round(value) }),
[SimulateSubgraph.memorized]: (value: number) =>
tr.statisticsMemorized({ memorized: Math.round(value).toFixed(0) }),
})[subgraph];
const perDay = ({
[SimulateSubgraph.count]: tr.statisticsReviewsPerDay,
[SimulateSubgraph.time]: ({ count }: { count: number }) => timeSpan(count),
[SimulateSubgraph.memorized]: tr.statisticsCardsPerDay,
})[subgraph];
function legendMouseMove(e: MouseEvent, d: number) {
const data = subgraph_data.filter(datum => datum.label == d);
const total = subgraph == SimulateSubgraph.memorized
? data[data.length - 1].memorized - data[0].memorized
: sumBy(data, d => d.y);
const average = total / (data?.length || 1);
showTooltip(
`#${d}:<br/>
${tr.statisticsAverage()}: ${perDay({ count: average })}<br/>
${tr.statisticsTotal()}: ${formatY(total)}`,
e.pageX,
e.pageY,
);
}
function formatX(date: Date) {
const days = +((date.getTime() - Date.now()) / (60 * 60 * 24 * 1000)).toFixed();
return `Date: ${localizedDate(date)}<br>In ${days} Days<br>`;
}
return _renderSimulationChart(
svgElem,
bounds,
subgraph_data,
x,
formatY,
formatX,
legendMouseMove,
yTickFormat,
undefined,
);
}
function _renderSimulationChart<T extends { x: any; y: any; label: number }>(
svgElem: SVGElement,
bounds: GraphBounds,
subgraph_data: T[],
x: any,
formatY: (n: T["y"]) => string,
formatX: (n: T["x"]) => string,
legendMouseMove: (e: MouseEvent, d: number) => void,
yTickFormat?: (n: number) => string,
xTickFormat?: (n: number) => string,
): TableDatum[] {
const svg = select(svgElem);
svg.selectAll(".lines").remove();
svg.selectAll(".hover-columns").remove();
svg.selectAll(".focus-line").remove();
svg.selectAll(".legend").remove();
if (subgraph_data.length == 0) {
setDataAvailable(svg, false);
return [];
}
const trans = svg.transition().duration(600) as any;
svg.select<SVGGElement>(".x-ticks")
.call((selection) =>
selection.transition(trans).call(axisBottom(x).ticks(7).tickSizeOuter(0).tickFormat(xTickFormat as any))
)
.attr("direction", "ltr");
// y scale
const yMax = max(subgraph_data, d => d.y)!;
const y = scaleLinear()
.range([bounds.height - bounds.marginBottom, bounds.marginTop])
@ -110,7 +242,7 @@ export function renderSimulationChart(
.attr("fill", "currentColor");
// x lines
const points = subgraph_data.map((d) => [x(d.date), y(d.y), d.label]);
const points = subgraph_data.map((d) => [x(d.x), y(d.y), d.label]);
const groups = rollup(points, v => Object.assign(v, { z: v[0][2] }), d => d[2]);
const color = schemeCategory10;
@ -157,13 +289,6 @@ export function renderSimulationChart(
hideTooltip();
});
const formatY: (value: number) => string = ({
[SimulateSubgraph.time]: timeSpan,
[SimulateSubgraph.count]: (value: number) => tr.statisticsReviews({ reviews: Math.round(value) }),
[SimulateSubgraph.memorized]: (value: number) =>
tr.statisticsMemorized({ memorized: Math.round(value).toFixed(0) }),
})[subgraph];
function mousemove(event: MouseEvent, d: any): void {
pointer(event, document.body);
const date = x.invert(d[0]);
@ -182,8 +307,7 @@ export function renderSimulationChart(
focusLine.attr("x1", d[0]).attr("x2", d[0]).style("opacity", 1);
const days = +((date.getTime() - Date.now()) / (60 * 60 * 24 * 1000)).toFixed();
let tooltipContent = `Date: ${localizedDate(date)}<br>In ${days} Days<br>`;
let tooltipContent = formatX(date);
for (const [key, value] of Object.entries(groupData)) {
const path = svg.select(`path[data-group="${key}"]`);
const hidden = path.classed("hidden");
@ -212,29 +336,6 @@ export function renderSimulationChart(
.on("mousemove", legendMouseMove)
.on("mouseout", hideTooltip);
const perDay = ({
[SimulateSubgraph.count]: tr.statisticsReviewsPerDay,
[SimulateSubgraph.time]: ({ count }: { count: number }) => timeSpan(count),
[SimulateSubgraph.memorized]: tr.statisticsCardsPerDay,
})[subgraph];
function legendMouseMove(e: MouseEvent, d: number) {
const data = subgraph_data.filter(datum => datum.label == d);
const total = subgraph == SimulateSubgraph.memorized
? data[data.length - 1].memorized - data[0].memorized
: sumBy(data, d => d.y);
const average = total / (data?.length || 1);
showTooltip(
`#${d}:<br/>
${tr.statisticsAverage()}: ${perDay({ count: average })}<br/>
${tr.statisticsTotal()}: ${formatY(total)}`,
e.pageX,
e.pageY,
);
}
legend.append("rect")
.attr("x", bounds.width - bounds.marginRight + 36)
.attr("width", 12)

Some files were not shown because too many files have changed in this diff Show more