mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 14:02:21 -04:00
Merge branch 'main' into editor-3830
This commit is contained in:
commit
93f38924a6
60 changed files with 1088 additions and 601 deletions
2
.version
2
.version
|
@ -1 +1 @@
|
|||
25.07.5
|
||||
25.08b5
|
||||
|
|
|
@ -236,6 +236,7 @@ Marvin Kopf <marvinkopf@outlook.com>
|
|||
Kevin Nakamura <grinkers@grinkers.net>
|
||||
Bradley Szoke <bradleyszoke@gmail.com>
|
||||
jcznk <https://github.com/jcznk>
|
||||
Thomas Rixen <thomas.rixen@student.uclouvain.be>
|
||||
|
||||
********************
|
||||
|
||||
|
|
47
Cargo.lock
generated
47
Cargo.lock
generated
|
@ -130,7 +130,8 @@ dependencies = [
|
|||
"prost",
|
||||
"prost-reflect",
|
||||
"pulldown-cmark 0.13.0",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"rayon",
|
||||
"regex",
|
||||
"reqwest 0.12.20",
|
||||
"rusqlite",
|
||||
|
@ -143,7 +144,7 @@ dependencies = [
|
|||
"serde_tuple",
|
||||
"sha1",
|
||||
"snafu",
|
||||
"strum 0.27.1",
|
||||
"strum 0.27.2",
|
||||
"syn 2.0.103",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
|
@ -219,7 +220,7 @@ dependencies = [
|
|||
"prost-types",
|
||||
"serde",
|
||||
"snafu",
|
||||
"strum 0.27.1",
|
||||
"strum 0.27.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -705,7 +706,7 @@ dependencies = [
|
|||
"log",
|
||||
"num-traits",
|
||||
"portable-atomic-util",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"rmp-serde",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
@ -731,7 +732,7 @@ dependencies = [
|
|||
"hashbrown 0.15.4",
|
||||
"log",
|
||||
"num-traits",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"serde",
|
||||
"spin 0.10.0",
|
||||
"text_placeholder",
|
||||
|
@ -761,12 +762,12 @@ dependencies = [
|
|||
"csv",
|
||||
"derive-new 0.7.0",
|
||||
"dirs 6.0.0",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"rmp-serde",
|
||||
"sanitize-filename 0.6.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"strum 0.27.1",
|
||||
"strum 0.27.2",
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
]
|
||||
|
@ -816,7 +817,7 @@ dependencies = [
|
|||
"num-traits",
|
||||
"paste",
|
||||
"portable-atomic-util",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"seq-macro",
|
||||
"spin 0.10.0",
|
||||
]
|
||||
|
@ -864,7 +865,7 @@ dependencies = [
|
|||
"half",
|
||||
"hashbrown 0.15.4",
|
||||
"num-traits",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"rand_distr",
|
||||
"serde",
|
||||
"serde_bytes",
|
||||
|
@ -958,7 +959,7 @@ dependencies = [
|
|||
"memmap2",
|
||||
"num-traits",
|
||||
"num_cpus",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"rand_distr",
|
||||
"rayon",
|
||||
"safetensors",
|
||||
|
@ -1402,7 +1403,7 @@ dependencies = [
|
|||
"log",
|
||||
"num-traits",
|
||||
"portable-atomic",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"sanitize-filename 0.5.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
@ -2213,20 +2214,20 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "fsrs"
|
||||
version = "4.1.1"
|
||||
version = "5.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c1f3a8c3df2c324ebab71461178fe8c1fe2d7373cf603f312b652befd026f06d"
|
||||
checksum = "04954cc67c3c11ee342a2ee1f5222bf76d73f7772df08d37dc9a6cdd73c467eb"
|
||||
dependencies = [
|
||||
"burn",
|
||||
"itertools 0.14.0",
|
||||
"log",
|
||||
"ndarray",
|
||||
"priority-queue",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"rayon",
|
||||
"serde",
|
||||
"snafu",
|
||||
"strum 0.27.1",
|
||||
"strum 0.27.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2803,7 +2804,7 @@ dependencies = [
|
|||
"cfg-if",
|
||||
"crunchy",
|
||||
"num-traits",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"rand_distr",
|
||||
"serde",
|
||||
]
|
||||
|
@ -3657,7 +3658,7 @@ dependencies = [
|
|||
"linkcheck",
|
||||
"regex",
|
||||
"reqwest 0.12.20",
|
||||
"strum 0.27.1",
|
||||
"strum 0.27.2",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
|
@ -5095,7 +5096,7 @@ dependencies = [
|
|||
"bytes",
|
||||
"getrandom 0.3.3",
|
||||
"lru-slab",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"ring",
|
||||
"rustc-hash 2.1.1",
|
||||
"rustls",
|
||||
|
@ -5149,9 +5150,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.9.1"
|
||||
version = "0.9.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
|
||||
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
|
||||
dependencies = [
|
||||
"rand_chacha 0.9.0",
|
||||
"rand_core 0.9.3",
|
||||
|
@ -5202,7 +5203,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "6a8615d50dcf34fa31f7ab52692afec947c4dd0ab803cc87cb3b0b4570ff7463"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -6112,9 +6113,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "strum"
|
||||
version = "0.27.1"
|
||||
version = "0.27.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32"
|
||||
checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
|
||||
dependencies = [
|
||||
"strum_macros 0.27.1",
|
||||
]
|
||||
|
|
|
@ -33,9 +33,8 @@ git = "https://github.com/ankitects/linkcheck.git"
|
|||
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
|
||||
|
||||
[workspace.dependencies.fsrs]
|
||||
version = "4.1.1"
|
||||
version = "5.1.0"
|
||||
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
|
||||
# rev = "a7f7efc10f0a26b14ee348cc7402155685f2a24f"
|
||||
# path = "../open-spaced-repetition/fsrs-rs"
|
||||
|
||||
[workspace.dependencies]
|
||||
|
@ -110,6 +109,7 @@ prost-types = "0.13"
|
|||
pulldown-cmark = "0.13.0"
|
||||
pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] }
|
||||
rand = "0.9.1"
|
||||
rayon = "1.10.0"
|
||||
regex = "1.11.1"
|
||||
reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] }
|
||||
rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] }
|
||||
|
|
|
@ -1450,7 +1450,7 @@
|
|||
},
|
||||
{
|
||||
"name": "fsrs",
|
||||
"version": "4.1.1",
|
||||
"version": "5.1.0",
|
||||
"authors": "Open Spaced Repetition",
|
||||
"repository": "https://github.com/open-spaced-repetition/fsrs-rs",
|
||||
"license": "BSD-3-Clause",
|
||||
|
@ -3322,7 +3322,7 @@
|
|||
},
|
||||
{
|
||||
"name": "rand",
|
||||
"version": "0.9.1",
|
||||
"version": "0.9.2",
|
||||
"authors": "The Rand Project Developers|The Rust Project Developers",
|
||||
"repository": "https://github.com/rust-random/rand",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
|
@ -4132,7 +4132,7 @@
|
|||
},
|
||||
{
|
||||
"name": "strum",
|
||||
"version": "0.27.1",
|
||||
"version": "0.27.2",
|
||||
"authors": "Peter Glotfelty <peter.glotfelty@microsoft.com>",
|
||||
"repository": "https://github.com/Peternator7/strum",
|
||||
"license": "MIT",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 939298f7c461407951988f362b1a08b451336a1e
|
||||
Subproject commit a599715d3c27ff2eb895c749f3534ab73d83dad1
|
|
@ -5,6 +5,11 @@ database-check-card-properties =
|
|||
[one] Fixed { $count } invalid card property.
|
||||
*[other] Fixed { $count } invalid card properties.
|
||||
}
|
||||
database-check-card-last-review-time-empty =
|
||||
{ $count ->
|
||||
[one] Added last review time to { $count } card.
|
||||
*[other] Added last review time to { $count } cards.
|
||||
}
|
||||
database-check-missing-templates =
|
||||
{ $count ->
|
||||
[one] Deleted { $count } card with missing template.
|
||||
|
|
|
@ -505,7 +505,9 @@ deck-config-desired-retention-below-optimal = Your desired retention is below op
|
|||
# Description of the y axis in the FSRS simulation
|
||||
# diagram (Deck options -> FSRS) showing the total number of
|
||||
# cards that can be recalled or retrieved on a specific date.
|
||||
deck-config-fsrs-simulator-experimental = FSRS simulator (experimental)
|
||||
deck-config-fsrs-simulator-experimental = FSRS Simulator (Experimental)
|
||||
deck-config-fsrs-simulate-desired-retention-experimental = FSRS Desired Retention Simulator (Experimental)
|
||||
deck-config-fsrs-desired-retention-help-me-decide-experimental = Help Me Decide (Experimental)
|
||||
deck-config-additional-new-cards-to-simulate = Additional new cards to simulate
|
||||
deck-config-simulate = Simulate
|
||||
deck-config-clear-last-simulate = Clear Last Simulation
|
||||
|
@ -519,6 +521,9 @@ deck-config-save-options-to-preset-confirm = Overwrite the options in your curre
|
|||
# to show the total number of cards that can be recalled or retrieved on a
|
||||
# specific date.
|
||||
deck-config-fsrs-simulator-radio-memorized = Memorized
|
||||
deck-config-fsrs-simulator-radio-ratio = Time / Memorized Ratio
|
||||
# $time here is pre-formatted e.g. "10 Seconds"
|
||||
deck-config-fsrs-simulator-ratio-tooltip = { $time } per memorized card
|
||||
|
||||
## Messages related to the FSRS scheduler’s health check. The health check determines whether the correlation between FSRS predictions and your memory is good or bad. It can be optionally triggered as part of the "Optimize" function.
|
||||
|
||||
|
@ -539,6 +544,7 @@ deck-config-fsrs-good-fit = Health Check:
|
|||
|
||||
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
|
||||
|
||||
deck-config-plotted-on-x-axis = (Plotted on the X-axis)
|
||||
deck-config-a-100-day-interval =
|
||||
{ $days ->
|
||||
[one] A 100 day interval will become { $days } day.
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit bc2da83c77749d96f3df8144f00c87d68dd2187a
|
||||
Subproject commit bb4207f3b8e9a7c428db282d12c75b850be532f3
|
|
@ -40,12 +40,10 @@ message DeckConfigId {
|
|||
message GetRetentionWorkloadRequest {
|
||||
repeated float w = 1;
|
||||
string search = 2;
|
||||
float before = 3;
|
||||
float after = 4;
|
||||
}
|
||||
|
||||
message GetRetentionWorkloadResponse {
|
||||
float factor = 1;
|
||||
map<uint32, float> costs = 1;
|
||||
}
|
||||
|
||||
message GetIgnoredBeforeCountRequest {
|
||||
|
@ -219,6 +217,8 @@ message DeckConfigsForUpdate {
|
|||
bool review_today_active = 5;
|
||||
// Whether new_today applies to today or a past day.
|
||||
bool new_today_active = 6;
|
||||
// Deck-specific desired retention override
|
||||
optional float desired_retention = 7;
|
||||
}
|
||||
string name = 1;
|
||||
int64 config_id = 2;
|
||||
|
|
|
@ -83,6 +83,8 @@ message Deck {
|
|||
optional uint32 new_limit = 7;
|
||||
DayLimit review_limit_today = 8;
|
||||
DayLimit new_limit_today = 9;
|
||||
// Deck-specific desired retention override
|
||||
optional float desired_retention = 10;
|
||||
|
||||
reserved 12 to 15;
|
||||
}
|
||||
|
|
|
@ -55,6 +55,8 @@ service SchedulerService {
|
|||
returns (ComputeOptimalRetentionResponse);
|
||||
rpc SimulateFsrsReview(SimulateFsrsReviewRequest)
|
||||
returns (SimulateFsrsReviewResponse);
|
||||
rpc SimulateFsrsWorkload(SimulateFsrsReviewRequest)
|
||||
returns (SimulateFsrsWorkloadResponse);
|
||||
rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse);
|
||||
rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest)
|
||||
returns (EvaluateParamsResponse);
|
||||
|
@ -405,6 +407,8 @@ message SimulateFsrsReviewRequest {
|
|||
deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11;
|
||||
optional uint32 suspend_after_lapse_count = 12;
|
||||
float historical_retention = 13;
|
||||
uint32 learning_step_count = 14;
|
||||
uint32 relearning_step_count = 15;
|
||||
}
|
||||
|
||||
message SimulateFsrsReviewResponse {
|
||||
|
@ -414,6 +418,12 @@ message SimulateFsrsReviewResponse {
|
|||
repeated float daily_time_cost = 4;
|
||||
}
|
||||
|
||||
message SimulateFsrsWorkloadResponse {
|
||||
map<uint32, float> cost = 1;
|
||||
map<uint32, float> memorized = 2;
|
||||
map<uint32, uint32> review_count = 3;
|
||||
}
|
||||
|
||||
message ComputeOptimalRetentionResponse {
|
||||
float optimal_retention = 1;
|
||||
}
|
||||
|
|
|
@ -246,7 +246,7 @@ def backend_exception_to_pylib(err: backend_pb2.BackendError) -> Exception:
|
|||
return BackendError(err.message, help_page, context, backtrace)
|
||||
|
||||
elif val == kind.SEARCH_ERROR:
|
||||
return SearchError(markdown(err.message), help_page, context, backtrace)
|
||||
return SearchError(err.message, help_page, context, backtrace)
|
||||
|
||||
elif val == kind.UNDO_EMPTY:
|
||||
return UndoEmpty(err.message, help_page, context, backtrace)
|
||||
|
|
|
@ -73,7 +73,7 @@ langs = sorted(
|
|||
("ଓଡ଼ିଆ", "or_OR"),
|
||||
("Filipino", "tl"),
|
||||
("ئۇيغۇر", "ug"),
|
||||
("Oʻzbek", "uz_UZ"),
|
||||
("Oʻzbekcha", "uz_UZ"),
|
||||
]
|
||||
)
|
||||
|
||||
|
|
|
@ -10,6 +10,8 @@ import re
|
|||
from collections.abc import Callable, Sequence
|
||||
from typing import Any, cast
|
||||
|
||||
from markdown import markdown
|
||||
|
||||
import aqt
|
||||
import aqt.browser
|
||||
import aqt.editor
|
||||
|
@ -20,6 +22,7 @@ from anki.cards import Card, CardId
|
|||
from anki.collection import Collection, Config, OpChanges, SearchNode
|
||||
from anki.consts import *
|
||||
from anki.decks import DeckId
|
||||
from anki.errors import SearchError
|
||||
from anki.lang import without_unicode_isolation
|
||||
from anki.models import NotetypeId
|
||||
from anki.notes import NoteId
|
||||
|
@ -488,6 +491,8 @@ class Browser(QMainWindow):
|
|||
text = self.current_search()
|
||||
try:
|
||||
normed = self.col.build_search_string(text)
|
||||
except SearchError as err:
|
||||
showWarning(markdown(str(err)))
|
||||
except Exception as err:
|
||||
showWarning(str(err))
|
||||
else:
|
||||
|
|
|
@ -999,6 +999,7 @@ exposed_backend_list = [
|
|||
"evaluate_params_legacy",
|
||||
"get_optimal_retention_parameters",
|
||||
"simulate_fsrs_review",
|
||||
"simulate_fsrs_workload",
|
||||
# DeckConfigService
|
||||
"get_ignored_before_count",
|
||||
"get_retention_workload",
|
||||
|
|
|
@ -147,6 +147,7 @@ def update_and_restart() -> None:
|
|||
|
||||
with contextlib.suppress(ResourceWarning):
|
||||
env = os.environ.copy()
|
||||
env["ANKI_LAUNCHER_WANT_TERMINAL"] = "1"
|
||||
# fixes a bug where launcher fails to appear if opening it
|
||||
# straight after updating
|
||||
if "GNOME_TERMINAL_SCREEN" in env:
|
||||
|
@ -156,12 +157,15 @@ def update_and_restart() -> None:
|
|||
creationflags = (
|
||||
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
|
||||
)
|
||||
# On Windows 10, changing the handles breaks ANSI display
|
||||
io = None if sys.platform == "win32" else subprocess.DEVNULL
|
||||
|
||||
subprocess.Popen(
|
||||
[launcher],
|
||||
start_new_session=True,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
stdin=io,
|
||||
stdout=io,
|
||||
stderr=io,
|
||||
env=env,
|
||||
creationflags=creationflags,
|
||||
)
|
||||
|
|
|
@ -118,7 +118,7 @@ def sync_collection(mw: aqt.main.AnkiQt, on_done: Callable[[], None]) -> None:
|
|||
if out.new_endpoint:
|
||||
mw.pm.set_current_sync_url(out.new_endpoint)
|
||||
if out.server_message:
|
||||
showText(out.server_message)
|
||||
showText(out.server_message, parent=mw)
|
||||
if out.required == out.NO_CHANGES:
|
||||
tooltip(parent=mw, msg=tr.sync_collection_complete())
|
||||
# all done; track media progress
|
||||
|
|
|
@ -90,17 +90,21 @@ def update_and_restart() -> None:
|
|||
|
||||
with contextlib.suppress(ResourceWarning):
|
||||
env = os.environ.copy()
|
||||
env["ANKI_LAUNCHER_WANT_TERMINAL"] = "1"
|
||||
creationflags = 0
|
||||
if sys.platform == "win32":
|
||||
creationflags = (
|
||||
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
|
||||
)
|
||||
# On Windows, changing the handles breaks ANSI display
|
||||
io = None if sys.platform == "win32" else subprocess.DEVNULL
|
||||
|
||||
subprocess.Popen(
|
||||
[launcher],
|
||||
start_new_session=True,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
stdin=io,
|
||||
stdout=io,
|
||||
stderr=io,
|
||||
env=env,
|
||||
creationflags=creationflags,
|
||||
)
|
||||
|
|
|
@ -46,6 +46,7 @@ struct State {
|
|||
uv_lock_path: std::path::PathBuf,
|
||||
sync_complete_marker: std::path::PathBuf,
|
||||
launcher_trigger_file: std::path::PathBuf,
|
||||
mirror_path: std::path::PathBuf,
|
||||
pyproject_modified_by_user: bool,
|
||||
previous_version: Option<String>,
|
||||
resources_dir: std::path::PathBuf,
|
||||
|
@ -71,6 +72,7 @@ pub enum MainMenuChoice {
|
|||
Version(VersionKind),
|
||||
ToggleBetas,
|
||||
ToggleCache,
|
||||
DownloadMirror,
|
||||
Uninstall,
|
||||
}
|
||||
|
||||
|
@ -108,6 +110,7 @@ fn run() -> Result<()> {
|
|||
uv_lock_path: uv_install_root.join("uv.lock"),
|
||||
sync_complete_marker: uv_install_root.join(".sync_complete"),
|
||||
launcher_trigger_file: uv_install_root.join(".want-launcher"),
|
||||
mirror_path: uv_install_root.join("mirror"),
|
||||
pyproject_modified_by_user: false, // calculated later
|
||||
previous_version: None,
|
||||
resources_dir,
|
||||
|
@ -155,12 +158,7 @@ fn run() -> Result<()> {
|
|||
|
||||
check_versions(&mut state);
|
||||
|
||||
let first_run = !state.venv_folder.exists();
|
||||
if first_run {
|
||||
handle_version_install_or_update(&state, MainMenuChoice::Latest)?;
|
||||
} else {
|
||||
main_menu_loop(&state)?;
|
||||
}
|
||||
main_menu_loop(&state)?;
|
||||
|
||||
// Write marker file to indicate we've completed the sync process
|
||||
write_sync_marker(&state)?;
|
||||
|
@ -379,6 +377,11 @@ fn main_menu_loop(state: &State) -> Result<()> {
|
|||
println!();
|
||||
continue;
|
||||
}
|
||||
MainMenuChoice::DownloadMirror => {
|
||||
show_mirror_submenu(state)?;
|
||||
println!();
|
||||
continue;
|
||||
}
|
||||
MainMenuChoice::Uninstall => {
|
||||
if handle_uninstall(state)? {
|
||||
std::process::exit(0);
|
||||
|
@ -443,8 +446,13 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
|
|||
"6) Cache downloads: {}",
|
||||
if cache_enabled { "on" } else { "off" }
|
||||
);
|
||||
let mirror_enabled = is_mirror_enabled(state);
|
||||
println!(
|
||||
"7) Download mirror: {}",
|
||||
if mirror_enabled { "on" } else { "off" }
|
||||
);
|
||||
println!();
|
||||
println!("7) Uninstall");
|
||||
println!("8) Uninstall");
|
||||
print!("> ");
|
||||
let _ = stdout().flush();
|
||||
|
||||
|
@ -483,7 +491,8 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
|
|||
}
|
||||
"5" => MainMenuChoice::ToggleBetas,
|
||||
"6" => MainMenuChoice::ToggleCache,
|
||||
"7" => MainMenuChoice::Uninstall,
|
||||
"7" => MainMenuChoice::DownloadMirror,
|
||||
"8" => MainMenuChoice::Uninstall,
|
||||
_ => {
|
||||
println!("Invalid input. Please try again.");
|
||||
continue;
|
||||
|
@ -652,7 +661,7 @@ fn fetch_versions(state: &State) -> Result<Vec<String>> {
|
|||
let mut cmd = Command::new(&state.uv_path);
|
||||
cmd.current_dir(&state.uv_install_root)
|
||||
.args(["run", "--no-project", "--no-config", "--managed-python"])
|
||||
.args(["--with", "pip-system-certs"]);
|
||||
.args(["--with", "pip-system-certs,requests[socks]"]);
|
||||
|
||||
let python_version = read_file(&state.dist_python_version_path)?;
|
||||
let python_version_str =
|
||||
|
@ -716,7 +725,15 @@ fn apply_version_kind(version_kind: &VersionKind, state: &State) -> Result<()> {
|
|||
&format!("anki-release=={version}\",\n \"anki=={version}\",\n \"aqt=={version}"),
|
||||
),
|
||||
};
|
||||
write_file(&state.user_pyproject_path, &updated_content)?;
|
||||
|
||||
// Add mirror configuration if enabled
|
||||
let final_content = if let Some((python_mirror, pypi_mirror)) = get_mirror_urls(state)? {
|
||||
format!("{updated_content}\n\n[[tool.uv.index]]\nname = \"mirror\"\nurl = \"{pypi_mirror}\"\ndefault = true\n\n[tool.uv]\npython-install-mirror = \"{python_mirror}\"\n")
|
||||
} else {
|
||||
updated_content
|
||||
};
|
||||
|
||||
write_file(&state.user_pyproject_path, &final_content)?;
|
||||
|
||||
// Update .python-version based on version kind
|
||||
match version_kind {
|
||||
|
@ -750,6 +767,9 @@ fn update_pyproject_for_version(menu_choice: MainMenuChoice, state: &State) -> R
|
|||
MainMenuChoice::ToggleCache => {
|
||||
unreachable!();
|
||||
}
|
||||
MainMenuChoice::DownloadMirror => {
|
||||
unreachable!();
|
||||
}
|
||||
MainMenuChoice::Uninstall => {
|
||||
unreachable!();
|
||||
}
|
||||
|
@ -939,6 +959,70 @@ fn build_python_command(state: &State, args: &[String]) -> Result<Command> {
|
|||
Ok(cmd)
|
||||
}
|
||||
|
||||
fn is_mirror_enabled(state: &State) -> bool {
|
||||
state.mirror_path.exists()
|
||||
}
|
||||
|
||||
fn get_mirror_urls(state: &State) -> Result<Option<(String, String)>> {
|
||||
if !state.mirror_path.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let content = read_file(&state.mirror_path)?;
|
||||
let content_str = String::from_utf8(content).context("Invalid UTF-8 in mirror file")?;
|
||||
|
||||
let lines: Vec<&str> = content_str.lines().collect();
|
||||
if lines.len() >= 2 {
|
||||
Ok(Some((
|
||||
lines[0].trim().to_string(),
|
||||
lines[1].trim().to_string(),
|
||||
)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
fn show_mirror_submenu(state: &State) -> Result<()> {
|
||||
loop {
|
||||
println!("Download mirror options:");
|
||||
println!("1) No mirror");
|
||||
println!("2) China");
|
||||
print!("> ");
|
||||
let _ = stdout().flush();
|
||||
|
||||
let mut input = String::new();
|
||||
let _ = stdin().read_line(&mut input);
|
||||
let input = input.trim();
|
||||
|
||||
match input {
|
||||
"1" => {
|
||||
// Remove mirror file
|
||||
if state.mirror_path.exists() {
|
||||
let _ = remove_file(&state.mirror_path);
|
||||
}
|
||||
println!("Mirror disabled.");
|
||||
break;
|
||||
}
|
||||
"2" => {
|
||||
// Write China mirror URLs
|
||||
let china_mirrors = "https://registry.npmmirror.com/-/binary/python-build-standalone/\nhttps://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple/";
|
||||
write_file(&state.mirror_path, china_mirrors)?;
|
||||
println!("China mirror enabled.");
|
||||
break;
|
||||
}
|
||||
"" => {
|
||||
// Empty input - return to main menu
|
||||
break;
|
||||
}
|
||||
_ => {
|
||||
println!("Invalid input. Please try again.");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
@ -116,8 +116,9 @@ pub use windows::ensure_terminal_shown;
|
|||
pub fn ensure_terminal_shown() -> Result<()> {
|
||||
use std::io::IsTerminal;
|
||||
|
||||
let want_terminal = std::env::var("ANKI_LAUNCHER_WANT_TERMINAL").is_ok();
|
||||
let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout());
|
||||
if !stdout_is_terminal {
|
||||
if want_terminal || !stdout_is_terminal {
|
||||
#[cfg(target_os = "macos")]
|
||||
mac::relaunch_in_terminal()?;
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
|
||||
import json
|
||||
import sys
|
||||
import urllib.request
|
||||
|
||||
import pip_system_certs.wrapt_requests
|
||||
import requests
|
||||
|
||||
pip_system_certs.wrapt_requests.inject_truststore()
|
||||
|
||||
|
@ -15,25 +15,26 @@ def main():
|
|||
url = "https://pypi.org/pypi/aqt/json"
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(url, timeout=30) as response:
|
||||
data = json.loads(response.read().decode("utf-8"))
|
||||
releases = data.get("releases", {})
|
||||
response = requests.get(url, timeout=30)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
releases = data.get("releases", {})
|
||||
|
||||
# Create list of (version, upload_time) tuples
|
||||
version_times = []
|
||||
for version, files in releases.items():
|
||||
if files: # Only include versions that have files
|
||||
# Use the upload time of the first file for each version
|
||||
upload_time = files[0].get("upload_time_iso_8601")
|
||||
if upload_time:
|
||||
version_times.append((version, upload_time))
|
||||
# Create list of (version, upload_time) tuples
|
||||
version_times = []
|
||||
for version, files in releases.items():
|
||||
if files: # Only include versions that have files
|
||||
# Use the upload time of the first file for each version
|
||||
upload_time = files[0].get("upload_time_iso_8601")
|
||||
if upload_time:
|
||||
version_times.append((version, upload_time))
|
||||
|
||||
# Sort by upload time
|
||||
version_times.sort(key=lambda x: x[1])
|
||||
# Sort by upload time
|
||||
version_times.sort(key=lambda x: x[1])
|
||||
|
||||
# Extract just the version names
|
||||
versions = [version for version, _ in version_times]
|
||||
print(json.dumps(versions))
|
||||
# Extract just the version names
|
||||
versions = [version for version, _ in version_times]
|
||||
print(json.dumps(versions))
|
||||
except Exception as e:
|
||||
print(f"Error fetching versions: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
|
|
@ -81,6 +81,7 @@ pin-project.workspace = true
|
|||
prost.workspace = true
|
||||
pulldown-cmark.workspace = true
|
||||
rand.workspace = true
|
||||
rayon.workspace = true
|
||||
regex.workspace = true
|
||||
reqwest.workspace = true
|
||||
rusqlite.workspace = true
|
||||
|
|
|
@ -105,7 +105,8 @@ impl Card {
|
|||
|
||||
/// Returns true if the card has a due date in terms of days.
|
||||
fn is_due_in_days(&self) -> bool {
|
||||
matches!(self.queue, CardQueue::DayLearn | CardQueue::Review)
|
||||
self.ctype != CardType::New && self.original_or_current_due() <= 365_000 // keep consistent with SQL
|
||||
|| matches!(self.queue, CardQueue::DayLearn | CardQueue::Review)
|
||||
|| (self.ctype == CardType::Review && self.is_undue_queue())
|
||||
}
|
||||
|
||||
|
@ -125,21 +126,20 @@ impl Card {
|
|||
}
|
||||
}
|
||||
|
||||
/// This uses card.due and card.ivl to infer the elapsed time. If 'set due
|
||||
/// date' or an add-on has changed the due date, this won't be accurate.
|
||||
pub(crate) fn days_since_last_review(&self, timing: &SchedTimingToday) -> Option<u32> {
|
||||
/// If last_review_date isn't stored in the card, this uses card.due and
|
||||
/// card.ivl to infer the elapsed time, which won't be accurate if
|
||||
/// 'set due date' or an add-on has changed the due date.
|
||||
pub(crate) fn seconds_since_last_review(&self, timing: &SchedTimingToday) -> Option<u32> {
|
||||
if let Some(last_review_time) = self.last_review_time {
|
||||
Some(timing.next_day_at.elapsed_days_since(last_review_time) as u32)
|
||||
Some(timing.now.elapsed_secs_since(last_review_time) as u32)
|
||||
} else if !self.is_due_in_days() {
|
||||
Some(
|
||||
(timing.next_day_at.0 as u32).saturating_sub(self.original_or_current_due() as u32)
|
||||
/ 86_400,
|
||||
)
|
||||
let last_review_time =
|
||||
TimestampSecs(self.original_or_current_due() as i64 - self.interval as i64);
|
||||
Some(timing.now.elapsed_secs_since(last_review_time) as u32)
|
||||
} else {
|
||||
self.due_time(timing).map(|due| {
|
||||
(due.adding_secs(-86_400 * self.interval as i64)
|
||||
.elapsed_secs()
|
||||
/ 86_400) as u32
|
||||
.elapsed_secs()) as u32
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -543,12 +543,12 @@ impl RowContext {
|
|||
self.cards[0]
|
||||
.memory_state
|
||||
.as_ref()
|
||||
.zip(self.cards[0].days_since_last_review(&self.timing))
|
||||
.zip(self.cards[0].seconds_since_last_review(&self.timing))
|
||||
.zip(Some(self.cards[0].decay.unwrap_or(FSRS5_DEFAULT_DECAY)))
|
||||
.map(|((state, days_elapsed), decay)| {
|
||||
let r = FSRS::new(None).unwrap().current_retrievability(
|
||||
.map(|((state, seconds), decay)| {
|
||||
let r = FSRS::new(None).unwrap().current_retrievability_seconds(
|
||||
(*state).into(),
|
||||
days_elapsed,
|
||||
seconds,
|
||||
decay,
|
||||
);
|
||||
format!("{:.0}%", r * 100.)
|
||||
|
|
|
@ -24,6 +24,7 @@ use crate::notetype::NotetypeId;
|
|||
use crate::notetype::NotetypeKind;
|
||||
use crate::prelude::*;
|
||||
use crate::progress::ThrottlingProgressHandler;
|
||||
use crate::storage::card::CardFixStats;
|
||||
use crate::timestamp::TimestampMillis;
|
||||
use crate::timestamp::TimestampSecs;
|
||||
|
||||
|
@ -40,6 +41,7 @@ pub struct CheckDatabaseOutput {
|
|||
notetypes_recovered: usize,
|
||||
invalid_utf8: usize,
|
||||
invalid_ids: usize,
|
||||
card_last_review_time_empty: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
|
@ -69,6 +71,11 @@ impl CheckDatabaseOutput {
|
|||
if self.card_properties_invalid > 0 {
|
||||
probs.push(tr.database_check_card_properties(self.card_properties_invalid));
|
||||
}
|
||||
if self.card_last_review_time_empty > 0 {
|
||||
probs.push(
|
||||
tr.database_check_card_last_review_time_empty(self.card_last_review_time_empty),
|
||||
);
|
||||
}
|
||||
if self.cards_missing_note > 0 {
|
||||
probs.push(tr.database_check_card_missing_note(self.cards_missing_note));
|
||||
}
|
||||
|
@ -158,14 +165,25 @@ impl Collection {
|
|||
|
||||
fn check_card_properties(&mut self, out: &mut CheckDatabaseOutput) -> Result<()> {
|
||||
let timing = self.timing_today()?;
|
||||
let (new_cnt, other_cnt) = self.storage.fix_card_properties(
|
||||
let CardFixStats {
|
||||
new_cards_fixed,
|
||||
other_cards_fixed,
|
||||
last_review_time_fixed,
|
||||
} = self.storage.fix_card_properties(
|
||||
timing.days_elapsed,
|
||||
TimestampSecs::now(),
|
||||
self.usn()?,
|
||||
self.scheduler_version() == SchedulerVersion::V1,
|
||||
)?;
|
||||
out.card_position_too_high = new_cnt;
|
||||
out.card_properties_invalid += other_cnt;
|
||||
out.card_position_too_high = new_cards_fixed;
|
||||
out.card_properties_invalid += other_cards_fixed;
|
||||
out.card_last_review_time_empty = last_review_time_fixed;
|
||||
|
||||
// Trigger one-way sync if last_review_time was updated to avoid conflicts
|
||||
if last_review_time_fixed > 0 {
|
||||
self.set_schema_modified()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
use std::collections::HashMap;
|
||||
|
||||
use anki_proto::generic;
|
||||
use rayon::iter::IntoParallelIterator;
|
||||
use rayon::iter::ParallelIterator;
|
||||
|
||||
use crate::collection::Collection;
|
||||
use crate::deckconfig::DeckConfSchema11;
|
||||
|
@ -9,6 +13,7 @@ use crate::deckconfig::DeckConfigId;
|
|||
use crate::deckconfig::UpdateDeckConfigsRequest;
|
||||
use crate::error::Result;
|
||||
use crate::scheduler::fsrs::params::ignore_revlogs_before_date_to_ms;
|
||||
use crate::scheduler::fsrs::simulator::is_included_card;
|
||||
|
||||
impl crate::services::DeckConfigService for Collection {
|
||||
fn add_or_update_deck_config_legacy(
|
||||
|
@ -101,68 +106,41 @@ impl crate::services::DeckConfigService for Collection {
|
|||
&mut self,
|
||||
input: anki_proto::deck_config::GetRetentionWorkloadRequest,
|
||||
) -> Result<anki_proto::deck_config::GetRetentionWorkloadResponse> {
|
||||
const LEARN_SPAN: usize = 100_000_000;
|
||||
const TERMINATION_PROB: f32 = 0.001;
|
||||
// the default values are from https://github.com/open-spaced-repetition/Anki-button-usage/blob/881009015c2a85ac911021d76d0aacb124849937/analysis.ipynb
|
||||
const DEFAULT_LEARN_COST: f32 = 19.4698;
|
||||
const DEFAULT_PASS_COST: f32 = 7.8454;
|
||||
const DEFAULT_FAIL_COST: f32 = 23.185;
|
||||
const DEFAULT_INITIAL_PASS_RATE: f32 = 0.7645;
|
||||
|
||||
let days_elapsed = self.timing_today().unwrap().days_elapsed as i32;
|
||||
let guard =
|
||||
self.search_cards_into_table(&input.search, crate::search::SortMode::NoOrder)?;
|
||||
let costs = guard.col.storage.get_costs_for_retention()?;
|
||||
|
||||
fn smoothing(obs: f32, default: f32, count: u32) -> f32 {
|
||||
let alpha = count as f32 / (50.0 + count as f32);
|
||||
obs * alpha + default * (1.0 - alpha)
|
||||
}
|
||||
let revlogs = guard
|
||||
.col
|
||||
.storage
|
||||
.get_revlog_entries_for_searched_cards_in_card_order()?;
|
||||
|
||||
let cost_success = smoothing(
|
||||
costs.average_pass_time_ms / 1000.0,
|
||||
DEFAULT_PASS_COST,
|
||||
costs.pass_count,
|
||||
);
|
||||
let cost_failure = smoothing(
|
||||
costs.average_fail_time_ms / 1000.0,
|
||||
DEFAULT_FAIL_COST,
|
||||
costs.fail_count,
|
||||
);
|
||||
let cost_learn = smoothing(
|
||||
costs.average_learn_time_ms / 1000.0,
|
||||
DEFAULT_LEARN_COST,
|
||||
costs.learn_count,
|
||||
);
|
||||
let initial_pass_rate = smoothing(
|
||||
costs.initial_pass_rate,
|
||||
DEFAULT_INITIAL_PASS_RATE,
|
||||
costs.pass_count,
|
||||
);
|
||||
let config = guard.col.get_optimal_retention_parameters(revlogs)?;
|
||||
let cards = guard
|
||||
.col
|
||||
.storage
|
||||
.all_searched_cards()?
|
||||
.into_iter()
|
||||
.filter(is_included_card)
|
||||
.filter_map(|c| crate::card::Card::convert(c.clone(), days_elapsed, c.memory_state?))
|
||||
.collect::<Vec<fsrs::Card>>();
|
||||
|
||||
let before = fsrs::expected_workload(
|
||||
&input.w,
|
||||
input.before,
|
||||
LEARN_SPAN,
|
||||
cost_success,
|
||||
cost_failure,
|
||||
cost_learn,
|
||||
initial_pass_rate,
|
||||
TERMINATION_PROB,
|
||||
)?;
|
||||
let after = fsrs::expected_workload(
|
||||
&input.w,
|
||||
input.after,
|
||||
LEARN_SPAN,
|
||||
cost_success,
|
||||
cost_failure,
|
||||
cost_learn,
|
||||
initial_pass_rate,
|
||||
TERMINATION_PROB,
|
||||
)?;
|
||||
let costs = (70u32..=99u32)
|
||||
.into_par_iter()
|
||||
.map(|dr| {
|
||||
Ok((
|
||||
dr,
|
||||
fsrs::expected_workload_with_existing_cards(
|
||||
&input.w,
|
||||
dr as f32 / 100.,
|
||||
&config,
|
||||
&cards,
|
||||
)?,
|
||||
))
|
||||
})
|
||||
.collect::<Result<HashMap<_, _>>>()?;
|
||||
|
||||
Ok(anki_proto::deck_config::GetRetentionWorkloadResponse {
|
||||
factor: after / before,
|
||||
})
|
||||
Ok(anki_proto::deck_config::GetRetentionWorkloadResponse { costs })
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -212,10 +212,13 @@ impl Collection {
|
|||
if fsrs_toggled {
|
||||
self.set_config_bool_inner(BoolKey::Fsrs, req.fsrs)?;
|
||||
}
|
||||
let mut deck_desired_retention: HashMap<DeckId, f32> = Default::default();
|
||||
for deck in self.storage.get_all_decks()? {
|
||||
if let Ok(normal) = deck.normal() {
|
||||
let deck_id = deck.id;
|
||||
|
||||
if let Some(desired_retention) = normal.desired_retention {
|
||||
deck_desired_retention.insert(deck_id, desired_retention);
|
||||
}
|
||||
// previous order & params
|
||||
let previous_config_id = DeckConfigId(normal.config_id);
|
||||
let previous_config = configs_before_update.get(&previous_config_id);
|
||||
|
@ -277,10 +280,11 @@ impl Collection {
|
|||
if req.fsrs {
|
||||
Some(UpdateMemoryStateRequest {
|
||||
params: c.fsrs_params().clone(),
|
||||
desired_retention: c.inner.desired_retention,
|
||||
preset_desired_retention: c.inner.desired_retention,
|
||||
max_interval: c.inner.maximum_review_interval,
|
||||
reschedule: req.fsrs_reschedule,
|
||||
historical_retention: c.inner.historical_retention,
|
||||
deck_desired_retention: deck_desired_retention.clone(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
|
@ -409,6 +413,7 @@ fn normal_deck_to_limits(deck: &NormalDeck, today: u32) -> Limits {
|
|||
.new_limit_today
|
||||
.map(|limit| limit.today == today)
|
||||
.unwrap_or_default(),
|
||||
desired_retention: deck.desired_retention,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -417,6 +422,7 @@ fn update_deck_limits(deck: &mut NormalDeck, limits: &Limits, today: u32) {
|
|||
deck.new_limit = limits.new;
|
||||
update_day_limit(&mut deck.review_limit_today, limits.review_today, today);
|
||||
update_day_limit(&mut deck.new_limit_today, limits.new_today, today);
|
||||
deck.desired_retention = limits.desired_retention;
|
||||
}
|
||||
|
||||
fn update_day_limit(day_limit: &mut Option<DayLimit>, new_limit: Option<u32>, today: u32) {
|
||||
|
|
|
@ -31,6 +31,7 @@ pub(crate) use name::immediate_parent_name;
|
|||
pub use name::NativeDeckName;
|
||||
pub use schema11::DeckSchema11;
|
||||
|
||||
use crate::deckconfig::DeckConfig;
|
||||
use crate::define_newtype;
|
||||
use crate::error::FilteredDeckError;
|
||||
use crate::markdown::render_markdown;
|
||||
|
@ -89,6 +90,16 @@ impl Deck {
|
|||
}
|
||||
}
|
||||
|
||||
/// Get the effective desired retention value for a deck.
|
||||
/// Returns deck-specific desired retention if available, otherwise falls
|
||||
/// back to config default.
|
||||
pub fn effective_desired_retention(&self, config: &DeckConfig) -> f32 {
|
||||
self.normal()
|
||||
.ok()
|
||||
.and_then(|d| d.desired_retention)
|
||||
.unwrap_or(config.inner.desired_retention)
|
||||
}
|
||||
|
||||
// used by tests at the moment
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
|
|
@ -325,6 +325,7 @@ impl From<NormalDeckSchema11> for NormalDeck {
|
|||
new_limit: deck.new_limit,
|
||||
review_limit_today: deck.review_limit_today,
|
||||
new_limit_today: deck.new_limit_today,
|
||||
desired_retention: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -84,6 +84,42 @@ impl RevlogEntry {
|
|||
})
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
/// Returns true if this entry represents a reset operation.
|
||||
/// These entries are created when a card is reset using
|
||||
/// [`Collection::reschedule_cards_as_new`].
|
||||
/// The 0 value of `ease_factor` differentiates it
|
||||
/// from entry created by [`Collection::set_due_date`] that has
|
||||
/// `RevlogReviewKind::Manual` but non-zero `ease_factor`.
|
||||
pub(crate) fn is_reset(&self) -> bool {
|
||||
self.review_kind == RevlogReviewKind::Manual && self.ease_factor == 0
|
||||
}
|
||||
|
||||
/// Returns true if this entry represents a cramming operation.
|
||||
/// These entries are created when a card is reviewed in a
|
||||
/// filtered deck with "Reschedule cards based on my answers
|
||||
/// in this deck" disabled.
|
||||
/// [`crate::scheduler::answering::CardStateUpdater::apply_preview_state`].
|
||||
/// The 0 value of `ease_factor` distinguishes it from the entry
|
||||
/// created when a card is reviewed before its due date in a
|
||||
/// filtered deck with reschedule enabled or using Grade Now.
|
||||
pub(crate) fn is_cramming(&self) -> bool {
|
||||
self.review_kind == RevlogReviewKind::Filtered && self.ease_factor == 0
|
||||
}
|
||||
|
||||
pub(crate) fn has_rating(&self) -> bool {
|
||||
self.button_chosen > 0
|
||||
}
|
||||
|
||||
/// Returns true if the review entry is not manually rescheduled and not
|
||||
/// cramming. Used to filter out entries that shouldn't be considered
|
||||
/// for statistics and scheduling.
|
||||
pub(crate) fn has_rating_and_affects_scheduling(&self) -> bool {
|
||||
// not rescheduled/set due date/reset
|
||||
self.has_rating()
|
||||
// not cramming
|
||||
&& !self.is_cramming()
|
||||
}
|
||||
}
|
||||
|
||||
impl Collection {
|
||||
|
|
|
@ -444,6 +444,8 @@ impl Collection {
|
|||
.get_deck(card.deck_id)?
|
||||
.or_not_found(card.deck_id)?;
|
||||
let config = self.home_deck_config(deck.config_id(), card.original_deck_id)?;
|
||||
|
||||
let desired_retention = deck.effective_desired_retention(&config);
|
||||
let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs);
|
||||
let fsrs_next_states = if fsrs_enabled {
|
||||
let params = config.fsrs_params();
|
||||
|
@ -473,13 +475,13 @@ impl Collection {
|
|||
};
|
||||
Some(fsrs.next_states(
|
||||
card.memory_state.map(Into::into),
|
||||
config.inner.desired_retention,
|
||||
desired_retention,
|
||||
days_elapsed,
|
||||
)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let desired_retention = fsrs_enabled.then_some(config.inner.desired_retention);
|
||||
let desired_retention = fsrs_enabled.then_some(desired_retention);
|
||||
let fsrs_short_term_with_steps =
|
||||
self.get_config_bool(BoolKey::FsrsShortTermWithStepsEnabled);
|
||||
let fsrs_allow_short_term = if fsrs_enabled {
|
||||
|
@ -662,6 +664,43 @@ pub(crate) mod test {
|
|||
col.get_scheduling_states(card_id).unwrap().current
|
||||
}
|
||||
|
||||
// Test that deck-specific desired retention is used when available
|
||||
#[test]
|
||||
fn deck_specific_desired_retention() -> Result<()> {
|
||||
let mut col = Collection::new();
|
||||
|
||||
// Enable FSRS
|
||||
col.set_config_bool(BoolKey::Fsrs, true, false)?;
|
||||
|
||||
// Create a deck with specific desired retention
|
||||
let deck_id = DeckId(1);
|
||||
let deck = col.get_deck(deck_id)?.unwrap();
|
||||
let mut deck_clone = (*deck).clone();
|
||||
deck_clone.normal_mut().unwrap().desired_retention = Some(0.85);
|
||||
col.update_deck(&mut deck_clone)?;
|
||||
|
||||
// Create a card in this deck
|
||||
let nt = col.get_notetype_by_name("Basic")?.unwrap();
|
||||
let mut note = nt.new_note();
|
||||
col.add_note(&mut note, deck_id)?;
|
||||
|
||||
// Get the card using search_cards
|
||||
let cards = col.search_cards(note.id, SortMode::NoOrder)?;
|
||||
let card = col.storage.get_card(cards[0])?.unwrap();
|
||||
|
||||
// Test that the card state updater uses deck-specific desired retention
|
||||
let updater = col.card_state_updater(card)?;
|
||||
|
||||
// Print debug information
|
||||
println!("FSRS enabled: {}", col.get_config_bool(BoolKey::Fsrs));
|
||||
println!("Desired retention: {:?}", updater.desired_retention);
|
||||
|
||||
// Verify that the desired retention is from the deck, not the config
|
||||
assert_eq!(updater.desired_retention, Some(0.85));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// make sure the 'current' state for a card matches the
|
||||
// state we applied to it
|
||||
#[test]
|
||||
|
|
|
@ -45,10 +45,11 @@ pub(crate) fn get_decay_from_params(params: &[f32]) -> f32 {
|
|||
#[derive(Debug)]
|
||||
pub(crate) struct UpdateMemoryStateRequest {
|
||||
pub params: Params,
|
||||
pub desired_retention: f32,
|
||||
pub preset_desired_retention: f32,
|
||||
pub historical_retention: f32,
|
||||
pub max_interval: u32,
|
||||
pub reschedule: bool,
|
||||
pub deck_desired_retention: HashMap<DeckId, f32>,
|
||||
}
|
||||
|
||||
pub(crate) struct UpdateMemoryStateEntry {
|
||||
|
@ -98,7 +99,8 @@ impl Collection {
|
|||
historical_retention.unwrap_or(0.9),
|
||||
ignore_before,
|
||||
)?;
|
||||
let desired_retention = req.as_ref().map(|w| w.desired_retention);
|
||||
let preset_desired_retention =
|
||||
req.as_ref().map(|w| w.preset_desired_retention).unwrap();
|
||||
let mut progress = self.new_progress_handler::<ComputeMemoryProgress>();
|
||||
progress.update(false, |s| s.total_cards = items.len() as u32)?;
|
||||
for (idx, (card_id, item)) in items.into_iter().enumerate() {
|
||||
|
@ -109,7 +111,12 @@ impl Collection {
|
|||
// Store decay and desired retention in the card so that add-ons, card info,
|
||||
// stats and browser search/sorts don't need to access the deck config.
|
||||
// Unlike memory states, scheduler doesn't use decay and dr stored in the card.
|
||||
card.desired_retention = desired_retention;
|
||||
let deck_id = card.original_or_current_deck_id();
|
||||
let desired_retention = *req
|
||||
.deck_desired_retention
|
||||
.get(&deck_id)
|
||||
.unwrap_or(&preset_desired_retention);
|
||||
card.desired_retention = Some(desired_retention);
|
||||
card.decay = decay;
|
||||
if let Some(item) = item {
|
||||
card.set_memory_state(&fsrs, Some(item), historical_retention.unwrap())?;
|
||||
|
@ -132,7 +139,7 @@ impl Collection {
|
|||
let original_interval = card.interval;
|
||||
let interval = fsrs.next_interval(
|
||||
Some(state.stability),
|
||||
desired_retention.unwrap(),
|
||||
desired_retention,
|
||||
0,
|
||||
);
|
||||
card.interval = rescheduler
|
||||
|
@ -205,7 +212,11 @@ impl Collection {
|
|||
.storage
|
||||
.get_deck_config(conf_id)?
|
||||
.or_not_found(conf_id)?;
|
||||
let desired_retention = config.inner.desired_retention;
|
||||
|
||||
// Get deck-specific desired retention if available, otherwise use config
|
||||
// default
|
||||
let desired_retention = deck.effective_desired_retention(&config);
|
||||
|
||||
let historical_retention = config.inner.historical_retention;
|
||||
let params = config.fsrs_params();
|
||||
let decay = get_decay_from_params(params);
|
||||
|
@ -295,15 +306,15 @@ pub(crate) fn fsrs_items_for_memory_states(
|
|||
.collect()
|
||||
}
|
||||
|
||||
struct LastRevlogInfo {
|
||||
pub(crate) struct LastRevlogInfo {
|
||||
/// Used to determine the actual elapsed time between the last time the user
|
||||
/// reviewed the card and now, so that we can determine an accurate period
|
||||
/// when the card has subsequently been rescheduled to a different day.
|
||||
last_reviewed_at: Option<TimestampSecs>,
|
||||
pub(crate) last_reviewed_at: Option<TimestampSecs>,
|
||||
}
|
||||
|
||||
/// Return a map of cards to info about last review/reschedule.
|
||||
fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, LastRevlogInfo> {
|
||||
/// Return a map of cards to info about last review.
|
||||
pub(crate) fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, LastRevlogInfo> {
|
||||
let mut out = HashMap::new();
|
||||
revlogs
|
||||
.iter()
|
||||
|
@ -312,8 +323,10 @@ fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, LastRevlogIn
|
|||
.for_each(|(card_id, group)| {
|
||||
let mut last_reviewed_at = None;
|
||||
for e in group.into_iter() {
|
||||
if e.button_chosen >= 1 {
|
||||
if e.has_rating_and_affects_scheduling() {
|
||||
last_reviewed_at = Some(e.id.as_secs());
|
||||
} else if e.is_reset() {
|
||||
last_reviewed_at = None;
|
||||
}
|
||||
}
|
||||
out.insert(card_id, LastRevlogInfo { last_reviewed_at });
|
||||
|
|
|
@ -394,13 +394,13 @@ pub(crate) fn reviews_for_fsrs(
|
|||
let mut revlogs_complete = false;
|
||||
// Working backwards from the latest review...
|
||||
for (index, entry) in entries.iter().enumerate().rev() {
|
||||
if entry.review_kind == RevlogReviewKind::Filtered && entry.ease_factor == 0 {
|
||||
if entry.is_cramming() {
|
||||
continue;
|
||||
}
|
||||
// For incomplete review histories, initial memory state is based on the first
|
||||
// user-graded review after the cutoff date with interval >= 1d.
|
||||
let within_cutoff = entry.id.0 > ignore_revlogs_before.0;
|
||||
let user_graded = matches!(entry.button_chosen, 1..=4);
|
||||
let user_graded = entry.has_rating();
|
||||
let interday = entry.interval >= 1 || entry.interval <= -86400;
|
||||
if user_graded && within_cutoff && interday {
|
||||
first_user_grade_idx = Some(index);
|
||||
|
@ -409,10 +409,7 @@ pub(crate) fn reviews_for_fsrs(
|
|||
if user_graded && entry.review_kind == RevlogReviewKind::Learning {
|
||||
first_of_last_learn_entries = Some(index);
|
||||
revlogs_complete = true;
|
||||
} else if matches!(
|
||||
(entry.review_kind, entry.ease_factor),
|
||||
(RevlogReviewKind::Manual, 0)
|
||||
) {
|
||||
} else if entry.is_reset() {
|
||||
// Ignore entries prior to a `Reset` if a learning step has come after,
|
||||
// but consider revlogs complete.
|
||||
if first_of_last_learn_entries.is_some() {
|
||||
|
@ -472,16 +469,7 @@ pub(crate) fn reviews_for_fsrs(
|
|||
}
|
||||
|
||||
// Filter out unwanted entries
|
||||
entries.retain(|entry| {
|
||||
!(
|
||||
// set due date, reset or rescheduled
|
||||
(entry.review_kind == RevlogReviewKind::Manual || entry.button_chosen == 0)
|
||||
|| // cram
|
||||
(entry.review_kind == RevlogReviewKind::Filtered && entry.ease_factor == 0)
|
||||
|| // rescheduled
|
||||
(entry.review_kind == RevlogReviewKind::Rescheduled)
|
||||
)
|
||||
});
|
||||
entries.retain(|entry| entry.has_rating_and_affects_scheduling());
|
||||
|
||||
// Compute delta_t for each entry
|
||||
let delta_ts = iter::once(0)
|
||||
|
@ -560,10 +548,14 @@ pub(crate) mod tests {
|
|||
}
|
||||
|
||||
pub(crate) fn revlog(review_kind: RevlogReviewKind, days_ago: i64) -> RevlogEntry {
|
||||
let button_chosen = match review_kind {
|
||||
RevlogReviewKind::Manual | RevlogReviewKind::Rescheduled => 0,
|
||||
_ => 3,
|
||||
};
|
||||
RevlogEntry {
|
||||
review_kind,
|
||||
id: days_ago_ms(days_ago).into(),
|
||||
button_chosen: 3,
|
||||
button_chosen,
|
||||
interval: 1,
|
||||
..Default::default()
|
||||
}
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anki_proto::deck_config::deck_config::config::ReviewCardOrder;
|
||||
use anki_proto::deck_config::deck_config::config::ReviewCardOrder::*;
|
||||
use anki_proto::scheduler::SimulateFsrsReviewRequest;
|
||||
use anki_proto::scheduler::SimulateFsrsReviewResponse;
|
||||
use anki_proto::scheduler::SimulateFsrsWorkloadResponse;
|
||||
use fsrs::simulate;
|
||||
use fsrs::PostSchedulingFn;
|
||||
use fsrs::ReviewPriorityFn;
|
||||
|
@ -14,6 +16,8 @@ use fsrs::FSRS;
|
|||
use itertools::Itertools;
|
||||
use rand::rngs::StdRng;
|
||||
use rand::Rng;
|
||||
use rayon::iter::IntoParallelIterator;
|
||||
use rayon::iter::ParallelIterator;
|
||||
|
||||
use crate::card::CardQueue;
|
||||
use crate::card::CardType;
|
||||
|
@ -117,6 +121,12 @@ fn create_review_priority_fn(
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_included_card(c: &Card) -> bool {
|
||||
c.queue != CardQueue::Suspended
|
||||
&& c.queue != CardQueue::PreviewRepeat
|
||||
&& c.ctype != CardType::New
|
||||
}
|
||||
|
||||
impl Collection {
|
||||
pub fn simulate_request_to_config(
|
||||
&mut self,
|
||||
|
@ -129,11 +139,6 @@ impl Collection {
|
|||
.get_revlog_entries_for_searched_cards_in_card_order()?;
|
||||
let mut cards = guard.col.storage.all_searched_cards()?;
|
||||
drop(guard);
|
||||
fn is_included_card(c: &Card) -> bool {
|
||||
c.queue != CardQueue::Suspended
|
||||
&& c.queue != CardQueue::PreviewRepeat
|
||||
&& c.ctype != CardType::New
|
||||
}
|
||||
// calculate any missing memory state
|
||||
for c in &mut cards {
|
||||
if is_included_card(c) && c.memory_state.is_none() {
|
||||
|
@ -233,8 +238,8 @@ impl Collection {
|
|||
learning_step_transitions: p.learning_step_transitions,
|
||||
relearning_step_transitions: p.relearning_step_transitions,
|
||||
state_rating_costs: p.state_rating_costs,
|
||||
learning_step_count: p.learning_step_count,
|
||||
relearning_step_count: p.relearning_step_count,
|
||||
learning_step_count: req.learning_step_count as usize,
|
||||
relearning_step_count: req.relearning_step_count as usize,
|
||||
};
|
||||
|
||||
Ok((config, converted_cards))
|
||||
|
@ -267,10 +272,46 @@ impl Collection {
|
|||
daily_time_cost: result.cost_per_day,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn simulate_workload(
|
||||
&mut self,
|
||||
req: SimulateFsrsReviewRequest,
|
||||
) -> Result<SimulateFsrsWorkloadResponse> {
|
||||
let (config, cards) = self.simulate_request_to_config(&req)?;
|
||||
let dr_workload = (70u32..=99u32)
|
||||
.into_par_iter()
|
||||
.map(|dr| {
|
||||
let result = simulate(
|
||||
&config,
|
||||
&req.params,
|
||||
dr as f32 / 100.,
|
||||
None,
|
||||
Some(cards.clone()),
|
||||
)?;
|
||||
Ok((
|
||||
dr,
|
||||
(
|
||||
*result.memorized_cnt_per_day.last().unwrap_or(&0.),
|
||||
result.cost_per_day.iter().sum::<f32>(),
|
||||
result.review_cnt_per_day.iter().sum::<usize>() as u32,
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect::<Result<HashMap<_, _>>>()?;
|
||||
Ok(SimulateFsrsWorkloadResponse {
|
||||
memorized: dr_workload.iter().map(|(k, v)| (*k, v.0)).collect(),
|
||||
cost: dr_workload.iter().map(|(k, v)| (*k, v.1)).collect(),
|
||||
review_count: dr_workload.iter().map(|(k, v)| (*k, v.2)).collect(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Card {
|
||||
fn convert(card: Card, days_elapsed: i32, memory_state: FsrsMemoryState) -> Option<fsrs::Card> {
|
||||
pub(crate) fn convert(
|
||||
card: Card,
|
||||
days_elapsed: i32,
|
||||
memory_state: FsrsMemoryState,
|
||||
) -> Option<fsrs::Card> {
|
||||
match card.queue {
|
||||
CardQueue::DayLearn | CardQueue::Review => {
|
||||
let due = card.original_or_current_due();
|
||||
|
|
|
@ -16,6 +16,7 @@ use anki_proto::scheduler::FuzzDeltaResponse;
|
|||
use anki_proto::scheduler::GetOptimalRetentionParametersResponse;
|
||||
use anki_proto::scheduler::SimulateFsrsReviewRequest;
|
||||
use anki_proto::scheduler::SimulateFsrsReviewResponse;
|
||||
use anki_proto::scheduler::SimulateFsrsWorkloadResponse;
|
||||
use fsrs::ComputeParametersInput;
|
||||
use fsrs::FSRSItem;
|
||||
use fsrs::FSRSReview;
|
||||
|
@ -283,6 +284,13 @@ impl crate::services::SchedulerService for Collection {
|
|||
self.simulate_review(input)
|
||||
}
|
||||
|
||||
fn simulate_fsrs_workload(
|
||||
&mut self,
|
||||
input: SimulateFsrsReviewRequest,
|
||||
) -> Result<SimulateFsrsWorkloadResponse> {
|
||||
self.simulate_workload(input)
|
||||
}
|
||||
|
||||
fn compute_optimal_retention(
|
||||
&mut self,
|
||||
input: SimulateFsrsReviewRequest,
|
||||
|
|
|
@ -57,10 +57,10 @@ const SECOND: f32 = 1.0;
|
|||
const MINUTE: f32 = 60.0 * SECOND;
|
||||
const HOUR: f32 = 60.0 * MINUTE;
|
||||
const DAY: f32 = 24.0 * HOUR;
|
||||
const MONTH: f32 = 30.417 * DAY; // 365/12 ≈ 30.417
|
||||
const YEAR: f32 = 365.0 * DAY;
|
||||
const MONTH: f32 = YEAR / 12.0;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub(crate) enum TimespanUnit {
|
||||
Seconds,
|
||||
Minutes,
|
||||
|
@ -111,6 +111,13 @@ impl Timespan {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn to_unit(self, unit: TimespanUnit) -> Timespan {
|
||||
Timespan {
|
||||
seconds: self.seconds,
|
||||
unit,
|
||||
}
|
||||
}
|
||||
|
||||
/// Round seconds and days to integers, otherwise
|
||||
/// truncates to one decimal place.
|
||||
pub fn as_rounded_unit(self) -> f32 {
|
||||
|
|
|
@ -378,9 +378,10 @@ fn card_order_from_sort_column(column: Column, timing: SchedTimingToday) -> Cow<
|
|||
Column::Stability => "extract_fsrs_variable(c.data, 's') asc".into(),
|
||||
Column::Difficulty => "extract_fsrs_variable(c.data, 'd') asc".into(),
|
||||
Column::Retrievability => format!(
|
||||
"extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {}, {}) asc",
|
||||
"extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {}, {}, {}) asc",
|
||||
timing.days_elapsed,
|
||||
timing.next_day_at.0
|
||||
timing.next_day_at.0,
|
||||
timing.now.0,
|
||||
)
|
||||
.into(),
|
||||
}
|
||||
|
|
|
@ -418,13 +418,13 @@ impl SqlWriter<'_> {
|
|||
write!(self.sql, "extract_fsrs_variable(c.data, 'd') {op} {d}").unwrap()
|
||||
}
|
||||
PropertyKind::Retrievability(r) => {
|
||||
let (elap, next_day_at) = {
|
||||
let (elap, next_day_at, now) = {
|
||||
let timing = self.col.timing_today()?;
|
||||
(timing.days_elapsed, timing.next_day_at)
|
||||
(timing.days_elapsed, timing.next_day_at, timing.now)
|
||||
};
|
||||
write!(
|
||||
self.sql,
|
||||
"extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {elap}, {next_day_at}) {op} {r}"
|
||||
"extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {elap}, {next_day_at}, {now}) {op} {r}"
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
|
|
@ -30,14 +30,24 @@ impl Collection {
|
|||
|
||||
let (average_secs, total_secs) = average_and_total_secs_strings(&revlog);
|
||||
let timing = self.timing_today()?;
|
||||
let seconds_elapsed = if let Some(last_review_time) = card.last_review_time {
|
||||
timing.now.elapsed_secs_since(last_review_time) as u32
|
||||
|
||||
let last_review_time = if let Some(last_review_time) = card.last_review_time {
|
||||
last_review_time
|
||||
} else {
|
||||
self.storage
|
||||
let mut new_card = card.clone();
|
||||
let last_review_time = self
|
||||
.storage
|
||||
.time_of_last_review(card.id)?
|
||||
.map(|ts| timing.now.elapsed_secs_since(ts))
|
||||
.unwrap_or_default() as u32
|
||||
.unwrap_or_default();
|
||||
|
||||
new_card.last_review_time = Some(last_review_time);
|
||||
|
||||
self.storage.update_card(&new_card)?;
|
||||
last_review_time
|
||||
};
|
||||
|
||||
let seconds_elapsed = timing.now.elapsed_secs_since(last_review_time) as u32;
|
||||
|
||||
let fsrs_retrievability = card
|
||||
.memory_state
|
||||
.zip(Some(seconds_elapsed))
|
||||
|
@ -187,7 +197,7 @@ impl Collection {
|
|||
}
|
||||
|
||||
fn average_and_total_secs_strings(revlog: &[RevlogEntry]) -> (f32, f32) {
|
||||
let normal_answer_count = revlog.iter().filter(|r| r.button_chosen > 0).count();
|
||||
let normal_answer_count = revlog.iter().filter(|r| r.has_rating()).count();
|
||||
let total_secs: f32 = revlog
|
||||
.iter()
|
||||
.map(|entry| (entry.taken_millis as f32) / 1000.0)
|
||||
|
|
|
@ -53,10 +53,7 @@ impl GraphsContext {
|
|||
self.revlog
|
||||
.iter()
|
||||
.filter(|review| {
|
||||
// not rescheduled/set due date/reset
|
||||
review.button_chosen > 0
|
||||
// not cramming
|
||||
&& (review.review_kind != RevlogReviewKind::Filtered || review.ease_factor != 0)
|
||||
review.has_rating_and_affects_scheduling()
|
||||
// cards with an interval ≥ 1 day
|
||||
&& (review.review_kind == RevlogReviewKind::Review
|
||||
|| review.last_interval <= -86400
|
||||
|
|
|
@ -30,10 +30,10 @@ impl GraphsContext {
|
|||
.or_insert((0.0, 0));
|
||||
entry.1 += 1;
|
||||
if let Some(state) = card.memory_state {
|
||||
let elapsed_days = card.days_since_last_review(&timing).unwrap_or_default();
|
||||
let r = fsrs.current_retrievability(
|
||||
let elapsed_seconds = card.seconds_since_last_review(&timing).unwrap_or_default();
|
||||
let r = fsrs.current_retrievability_seconds(
|
||||
state.into(),
|
||||
elapsed_days,
|
||||
elapsed_seconds,
|
||||
card.decay.unwrap_or(FSRS5_DEFAULT_DECAY),
|
||||
);
|
||||
|
||||
|
|
|
@ -5,17 +5,18 @@ use anki_i18n::I18n;
|
|||
|
||||
use crate::prelude::*;
|
||||
use crate::scheduler::timespan::Timespan;
|
||||
use crate::scheduler::timespan::TimespanUnit;
|
||||
|
||||
pub fn studied_today(cards: u32, secs: f32, tr: &I18n) -> String {
|
||||
let span = Timespan::from_secs(secs).natural_span();
|
||||
let amount = span.as_unit();
|
||||
let unit = span.unit().as_str();
|
||||
let unit = std::cmp::min(span.unit(), TimespanUnit::Minutes);
|
||||
let amount = span.to_unit(unit).as_unit();
|
||||
let secs_per_card = if cards > 0 {
|
||||
secs / (cards as f32)
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
tr.statistics_studied_today(unit, secs_per_card, amount, cards)
|
||||
tr.statistics_studied_today(unit.as_str(), secs_per_card, amount, cards)
|
||||
.into()
|
||||
}
|
||||
|
||||
|
@ -41,5 +42,9 @@ mod test {
|
|||
&studied_today(3, 13.0, &tr).replace('\n', " "),
|
||||
"Studied 3 cards in 13 seconds today (4.33s/card)"
|
||||
);
|
||||
assert_eq!(
|
||||
&studied_today(300, 5400.0, &tr).replace('\n', " "),
|
||||
"Studied 300 cards in 90 minutes today (18s/card)"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,6 +14,8 @@ pub(crate) fn order_and_limit_for_search(
|
|||
) -> String {
|
||||
let temp_string;
|
||||
let today = timing.days_elapsed;
|
||||
let next_day_at = timing.next_day_at.0;
|
||||
let now = timing.now.0;
|
||||
let order = match term.order() {
|
||||
FilteredSearchOrder::OldestReviewedFirst => "(select max(id) from revlog where cid=c.id)",
|
||||
FilteredSearchOrder::Random => "random()",
|
||||
|
@ -29,15 +31,13 @@ pub(crate) fn order_and_limit_for_search(
|
|||
&temp_string
|
||||
}
|
||||
FilteredSearchOrder::RetrievabilityAscending => {
|
||||
let next_day_at = timing.next_day_at.0;
|
||||
temp_string =
|
||||
build_retrievability_query(fsrs, today, next_day_at, SqlSortOrder::Ascending);
|
||||
build_retrievability_query(fsrs, today, next_day_at, now, SqlSortOrder::Ascending);
|
||||
&temp_string
|
||||
}
|
||||
FilteredSearchOrder::RetrievabilityDescending => {
|
||||
let next_day_at = timing.next_day_at.0;
|
||||
temp_string =
|
||||
build_retrievability_query(fsrs, today, next_day_at, SqlSortOrder::Descending);
|
||||
build_retrievability_query(fsrs, today, next_day_at, now, SqlSortOrder::Descending);
|
||||
&temp_string
|
||||
}
|
||||
};
|
||||
|
@ -49,11 +49,12 @@ fn build_retrievability_query(
|
|||
fsrs: bool,
|
||||
today: u32,
|
||||
next_day_at: i64,
|
||||
now: i64,
|
||||
order: SqlSortOrder,
|
||||
) -> String {
|
||||
if fsrs {
|
||||
format!(
|
||||
"extract_fsrs_relative_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, {today}, ivl, {next_day_at}) {order}"
|
||||
"extract_fsrs_relative_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, {today}, ivl, {next_day_at}, {now}) {order}"
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
|
|
|
@ -1,85 +0,0 @@
|
|||
WITH searched_revlogs AS (
|
||||
SELECT *,
|
||||
RANK() OVER (
|
||||
PARTITION BY cid
|
||||
ORDER BY id ASC
|
||||
) AS rank_num
|
||||
FROM revlog
|
||||
WHERE ease > 0
|
||||
AND cid IN search_cids
|
||||
ORDER BY id DESC -- Use the last 10_000 reviews
|
||||
LIMIT 10000
|
||||
), average_pass AS (
|
||||
SELECT AVG(time)
|
||||
FROM searched_revlogs
|
||||
WHERE ease > 1
|
||||
AND type = 1
|
||||
),
|
||||
lapse_count AS (
|
||||
SELECT COUNT(time) AS lapse_count
|
||||
FROM searched_revlogs
|
||||
WHERE ease = 1
|
||||
AND type = 1
|
||||
),
|
||||
fail_sum AS (
|
||||
SELECT SUM(time) AS total_fail_time
|
||||
FROM searched_revlogs
|
||||
WHERE (
|
||||
ease = 1
|
||||
AND type = 1
|
||||
)
|
||||
OR type = 2
|
||||
),
|
||||
-- (sum(Relearning) + sum(Lapses)) / count(Lapses)
|
||||
average_fail AS (
|
||||
SELECT total_fail_time * 1.0 / NULLIF(lapse_count, 0) AS avg_fail_time
|
||||
FROM fail_sum,
|
||||
lapse_count
|
||||
),
|
||||
-- Can lead to cards with partial learn histories skewing the time
|
||||
summed_learns AS (
|
||||
SELECT cid,
|
||||
SUM(time) AS total_time
|
||||
FROM searched_revlogs
|
||||
WHERE searched_revlogs.type = 0
|
||||
GROUP BY cid
|
||||
),
|
||||
average_learn AS (
|
||||
SELECT AVG(total_time) AS avg_learn_time
|
||||
FROM summed_learns
|
||||
),
|
||||
initial_pass_rate AS (
|
||||
SELECT AVG(
|
||||
CASE
|
||||
WHEN ease > 1 THEN 1.0
|
||||
ELSE 0.0
|
||||
END
|
||||
) AS initial_pass_rate
|
||||
FROM searched_revlogs
|
||||
WHERE rank_num = 1
|
||||
),
|
||||
pass_cnt AS (
|
||||
SELECT COUNT(*) AS cnt
|
||||
FROM searched_revlogs
|
||||
WHERE ease > 1
|
||||
AND type = 1
|
||||
),
|
||||
fail_cnt AS (
|
||||
SELECT COUNT(*) AS cnt
|
||||
FROM searched_revlogs
|
||||
WHERE ease = 1
|
||||
AND type = 1
|
||||
),
|
||||
learn_cnt AS (
|
||||
SELECT COUNT(*) AS cnt
|
||||
FROM searched_revlogs
|
||||
WHERE type = 0
|
||||
)
|
||||
SELECT *
|
||||
FROM average_pass,
|
||||
average_fail,
|
||||
average_learn,
|
||||
initial_pass_rate,
|
||||
pass_cnt,
|
||||
fail_cnt,
|
||||
learn_cnt;
|
|
@ -33,6 +33,7 @@ use crate::decks::DeckKind;
|
|||
use crate::error::Result;
|
||||
use crate::notes::NoteId;
|
||||
use crate::scheduler::congrats::CongratsInfo;
|
||||
use crate::scheduler::fsrs::memory_state::get_last_revlog_info;
|
||||
use crate::scheduler::queue::BuryMode;
|
||||
use crate::scheduler::queue::DueCard;
|
||||
use crate::scheduler::queue::DueCardKind;
|
||||
|
@ -42,15 +43,11 @@ use crate::timestamp::TimestampMillis;
|
|||
use crate::timestamp::TimestampSecs;
|
||||
use crate::types::Usn;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct RetentionCosts {
|
||||
pub average_pass_time_ms: f32,
|
||||
pub average_fail_time_ms: f32,
|
||||
pub average_learn_time_ms: f32,
|
||||
pub initial_pass_rate: f32,
|
||||
pub pass_count: u32,
|
||||
pub fail_count: u32,
|
||||
pub learn_count: u32,
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) struct CardFixStats {
|
||||
pub new_cards_fixed: usize,
|
||||
pub other_cards_fixed: usize,
|
||||
pub last_review_time_fixed: usize,
|
||||
}
|
||||
|
||||
impl FromSql for CardType {
|
||||
|
@ -376,7 +373,7 @@ impl super::SqliteStorage {
|
|||
mtime: TimestampSecs,
|
||||
usn: Usn,
|
||||
v1_sched: bool,
|
||||
) -> Result<(usize, usize)> {
|
||||
) -> Result<CardFixStats> {
|
||||
let new_cnt = self
|
||||
.db
|
||||
.prepare(include_str!("fix_due_new.sql"))?
|
||||
|
@ -401,7 +398,24 @@ impl super::SqliteStorage {
|
|||
.db
|
||||
.prepare(include_str!("fix_ordinal.sql"))?
|
||||
.execute(params![mtime, usn])?;
|
||||
Ok((new_cnt, other_cnt))
|
||||
let mut last_review_time_cnt = 0;
|
||||
let revlog = self.get_all_revlog_entries_in_card_order()?;
|
||||
let last_revlog_info = get_last_revlog_info(&revlog);
|
||||
for (card_id, last_revlog_info) in last_revlog_info {
|
||||
let card = self.get_card(card_id)?;
|
||||
if let Some(mut card) = card {
|
||||
if card.ctype != CardType::New && card.last_review_time.is_none() {
|
||||
card.last_review_time = last_revlog_info.last_reviewed_at;
|
||||
self.update_card(&card)?;
|
||||
last_review_time_cnt += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(CardFixStats {
|
||||
new_cards_fixed: new_cnt,
|
||||
other_cards_fixed: other_cnt,
|
||||
last_review_time_fixed: last_review_time_cnt,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn delete_orphaned_cards(&self) -> Result<usize> {
|
||||
|
@ -759,24 +773,6 @@ impl super::SqliteStorage {
|
|||
.get(0)?)
|
||||
}
|
||||
|
||||
pub(crate) fn get_costs_for_retention(&self) -> Result<RetentionCosts> {
|
||||
let mut statement = self
|
||||
.db
|
||||
.prepare(include_str!("get_costs_for_retention.sql"))?;
|
||||
let mut query = statement.query(params![])?;
|
||||
let row = query.next()?.unwrap();
|
||||
|
||||
Ok(RetentionCosts {
|
||||
average_pass_time_ms: row.get(0).unwrap_or(7000.),
|
||||
average_fail_time_ms: row.get(1).unwrap_or(23_000.),
|
||||
average_learn_time_ms: row.get(2).unwrap_or(30_000.),
|
||||
initial_pass_rate: row.get(3).unwrap_or(0.5),
|
||||
pass_count: row.get(4).unwrap_or(0),
|
||||
fail_count: row.get(5).unwrap_or(0),
|
||||
learn_count: row.get(6).unwrap_or(0),
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn get_all_cards(&self) -> Vec<Card> {
|
||||
self.db
|
||||
|
@ -837,8 +833,9 @@ impl fmt::Display for ReviewOrderSubclause {
|
|||
ReviewOrderSubclause::RetrievabilityFsrs { timing, order } => {
|
||||
let today = timing.days_elapsed;
|
||||
let next_day_at = timing.next_day_at.0;
|
||||
let now = timing.now.0;
|
||||
temp_string =
|
||||
format!("extract_fsrs_relative_retrievability(data, case when odue !=0 then odue else due end, {today}, ivl, {next_day_at}) {order}");
|
||||
format!("extract_fsrs_relative_retrievability(data, case when odue !=0 then odue else due end, {today}, ivl, {next_day_at}, {now}) {order}");
|
||||
&temp_string
|
||||
}
|
||||
ReviewOrderSubclause::Added => "nid asc, ord asc",
|
||||
|
|
|
@ -310,14 +310,14 @@ fn add_extract_fsrs_variable(db: &Connection) -> rusqlite::Result<()> {
|
|||
}
|
||||
|
||||
/// eg. extract_fsrs_retrievability(card.data, card.due, card.ivl,
|
||||
/// timing.days_elapsed, timing.next_day_at) -> float | null
|
||||
/// timing.days_elapsed, timing.next_day_at, timing.now) -> float | null
|
||||
fn add_extract_fsrs_retrievability(db: &Connection) -> rusqlite::Result<()> {
|
||||
db.create_scalar_function(
|
||||
"extract_fsrs_retrievability",
|
||||
5,
|
||||
6,
|
||||
FunctionFlags::SQLITE_DETERMINISTIC,
|
||||
move |ctx| {
|
||||
assert_eq!(ctx.len(), 5, "called with unexpected number of arguments");
|
||||
assert_eq!(ctx.len(), 6, "called with unexpected number of arguments");
|
||||
let Ok(card_data) = ctx.get_raw(0).as_str() else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
@ -328,18 +328,18 @@ fn add_extract_fsrs_retrievability(db: &Connection) -> rusqlite::Result<()> {
|
|||
let Ok(due) = ctx.get_raw(1).as_i64() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let days_elapsed = if let Some(last_review_time) = card_data.last_review_time {
|
||||
// Use last_review_time to calculate days_elapsed
|
||||
let Ok(next_day_at) = ctx.get_raw(4).as_i64() else {
|
||||
return Ok(None);
|
||||
};
|
||||
(next_day_at as u32).saturating_sub(last_review_time.0 as u32) / 86_400
|
||||
let Ok(now) = ctx.get_raw(5).as_i64() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let seconds_elapsed = if let Some(last_review_time) = card_data.last_review_time {
|
||||
now.saturating_sub(last_review_time.0) as u32
|
||||
} else if due > 365_000 {
|
||||
// (re)learning card in seconds
|
||||
let Ok(next_day_at) = ctx.get_raw(4).as_i64() else {
|
||||
let Ok(ivl) = ctx.get_raw(2).as_i64() else {
|
||||
return Ok(None);
|
||||
};
|
||||
(next_day_at as u32).saturating_sub(due as u32) / 86_400
|
||||
let last_review_time = due.saturating_sub(ivl);
|
||||
now.saturating_sub(last_review_time) as u32
|
||||
} else {
|
||||
let Ok(ivl) = ctx.get_raw(2).as_i64() else {
|
||||
return Ok(None);
|
||||
|
@ -348,29 +348,32 @@ fn add_extract_fsrs_retrievability(db: &Connection) -> rusqlite::Result<()> {
|
|||
return Ok(None);
|
||||
};
|
||||
let review_day = due.saturating_sub(ivl);
|
||||
(days_elapsed as u32).saturating_sub(review_day as u32)
|
||||
days_elapsed.saturating_sub(review_day) as u32 * 86_400
|
||||
};
|
||||
let decay = card_data.decay.unwrap_or(FSRS5_DEFAULT_DECAY);
|
||||
Ok(card_data.memory_state().map(|state| {
|
||||
FSRS::new(None)
|
||||
.unwrap()
|
||||
.current_retrievability(state.into(), days_elapsed, decay)
|
||||
}))
|
||||
let retrievability = card_data.memory_state().map(|state| {
|
||||
FSRS::new(None).unwrap().current_retrievability_seconds(
|
||||
state.into(),
|
||||
seconds_elapsed,
|
||||
decay,
|
||||
)
|
||||
});
|
||||
Ok(retrievability)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// eg. extract_fsrs_relative_retrievability(card.data, card.due,
|
||||
/// timing.days_elapsed, card.ivl, timing.next_day_at) -> float | null. The
|
||||
/// higher the number, the higher the card's retrievability relative to the
|
||||
/// configured desired retention.
|
||||
/// timing.days_elapsed, card.ivl, timing.next_day_at, timing.now) -> float |
|
||||
/// null. The higher the number, the higher the card's retrievability relative
|
||||
/// to the configured desired retention.
|
||||
fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result<()> {
|
||||
db.create_scalar_function(
|
||||
"extract_fsrs_relative_retrievability",
|
||||
5,
|
||||
6,
|
||||
FunctionFlags::SQLITE_DETERMINISTIC,
|
||||
move |ctx| {
|
||||
assert_eq!(ctx.len(), 5, "called with unexpected number of arguments");
|
||||
assert_eq!(ctx.len(), 6, "called with unexpected number of arguments");
|
||||
|
||||
let Ok(due) = ctx.get_raw(1).as_i64() else {
|
||||
return Ok(None);
|
||||
|
@ -381,6 +384,9 @@ fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result
|
|||
let Ok(next_day_at) = ctx.get_raw(4).as_i64() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Ok(now) = ctx.get_raw(5).as_i64() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let days_elapsed = if due > 365_000 {
|
||||
// (re)learning
|
||||
(next_day_at as u32).saturating_sub(due as u32) / 86_400
|
||||
|
@ -402,17 +408,30 @@ fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result
|
|||
desired_retrievability = desired_retrievability.max(0.0001);
|
||||
let decay = card_data.decay.unwrap_or(FSRS5_DEFAULT_DECAY);
|
||||
|
||||
let days_elapsed = if let Some(last_review_time) =
|
||||
card_data.last_review_time
|
||||
{
|
||||
TimestampSecs(next_day_at).elapsed_days_since(last_review_time) as u32
|
||||
} else {
|
||||
days_elapsed
|
||||
};
|
||||
let seconds_elapsed =
|
||||
if let Some(last_review_time) = card_data.last_review_time {
|
||||
now.saturating_sub(last_review_time.0) as u32
|
||||
} else if due > 365_000 {
|
||||
// (re)learning card in seconds
|
||||
let Ok(ivl) = ctx.get_raw(2).as_i64() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let last_review_time = due.saturating_sub(ivl);
|
||||
now.saturating_sub(last_review_time) as u32
|
||||
} else {
|
||||
let Ok(ivl) = ctx.get_raw(2).as_i64() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Ok(days_elapsed) = ctx.get_raw(3).as_i64() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let review_day = due.saturating_sub(ivl);
|
||||
days_elapsed.saturating_sub(review_day) as u32 * 86_400
|
||||
};
|
||||
|
||||
let current_retrievability = FSRS::new(None)
|
||||
.unwrap()
|
||||
.current_retrievability(state.into(), days_elapsed, decay)
|
||||
.current_retrievability_seconds(state.into(), seconds_elapsed, decay)
|
||||
.max(0.0001);
|
||||
|
||||
return Ok(Some(
|
||||
|
|
|
@ -93,6 +93,10 @@ impl TimestampMillis {
|
|||
pub fn adding_secs(self, secs: i64) -> Self {
|
||||
Self(self.0 + secs * 1000)
|
||||
}
|
||||
|
||||
pub fn elapsed_millis(self) -> u64 {
|
||||
(Self::now().0 - self.0).max(0) as u64
|
||||
}
|
||||
}
|
||||
|
||||
fn elapsed() -> time::Duration {
|
||||
|
|
|
@ -12,7 +12,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
export { className as class };
|
||||
|
||||
export let title: string;
|
||||
export let onTitleClick: ((_e: MouseEvent | KeyboardEvent) => void) | null = null;
|
||||
export let onHelpClick: ((_e: MouseEvent | KeyboardEvent) => void) | null = null;
|
||||
</script>
|
||||
|
||||
<div
|
||||
|
@ -25,25 +25,21 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
style:--container-margin="0"
|
||||
>
|
||||
<div class="position-relative">
|
||||
{#if onTitleClick}
|
||||
<span
|
||||
on:click={onTitleClick}
|
||||
on:keydown={onTitleClick}
|
||||
<h1>
|
||||
{title}
|
||||
</h1>
|
||||
{#if onHelpClick}
|
||||
<div
|
||||
on:click={onHelpClick}
|
||||
on:keydown={onHelpClick}
|
||||
role="button"
|
||||
tabindex="0"
|
||||
class="help-badge position-absolute"
|
||||
class:rtl
|
||||
>
|
||||
<h1>
|
||||
{title}
|
||||
</h1>
|
||||
</span>
|
||||
{:else}
|
||||
<h1>
|
||||
{title}
|
||||
</h1>
|
||||
<slot name="tooltip" />
|
||||
</div>
|
||||
{/if}
|
||||
<div class="help-badge position-absolute" class:rtl>
|
||||
<slot name="tooltip" />
|
||||
</div>
|
||||
</div>
|
||||
<slot />
|
||||
</div>
|
||||
|
|
|
@ -89,7 +89,7 @@ export function naturalWholeUnit(secs: number): TimespanUnit {
|
|||
}
|
||||
|
||||
export function studiedToday(cards: number, secs: number): string {
|
||||
const unit = naturalUnit(secs);
|
||||
const unit = Math.min(naturalUnit(secs), TimespanUnit.Minutes);
|
||||
const amount = unitAmount(unit, secs);
|
||||
const name = unitName(unit);
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
import SwitchRow from "$lib/components/SwitchRow.svelte";
|
||||
|
||||
import GlobalLabel from "./GlobalLabel.svelte";
|
||||
import { commitEditing, fsrsParams, type DeckOptionsState } from "./lib";
|
||||
import { commitEditing, fsrsParams, type DeckOptionsState, ValueTab } from "./lib";
|
||||
import SpinBoxFloatRow from "./SpinBoxFloatRow.svelte";
|
||||
import Warning from "./Warning.svelte";
|
||||
import ParamsInputRow from "./ParamsInputRow.svelte";
|
||||
|
@ -29,9 +29,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
import SimulatorModal from "./SimulatorModal.svelte";
|
||||
import {
|
||||
GetRetentionWorkloadRequest,
|
||||
type GetRetentionWorkloadResponse,
|
||||
UpdateDeckConfigsMode,
|
||||
} from "@generated/anki/deck_config_pb";
|
||||
import type Modal from "bootstrap/js/dist/modal";
|
||||
import TabbedValue from "./TabbedValue.svelte";
|
||||
import Item from "$lib/components/Item.svelte";
|
||||
import DynamicallySlottable from "$lib/components/DynamicallySlottable.svelte";
|
||||
|
||||
export let state: DeckOptionsState;
|
||||
export let openHelpModal: (String) => void;
|
||||
|
@ -42,13 +46,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
const defaults = state.defaults;
|
||||
const fsrsReschedule = state.fsrsReschedule;
|
||||
const daysSinceLastOptimization = state.daysSinceLastOptimization;
|
||||
const limits = state.deckLimits;
|
||||
|
||||
$: lastOptimizationWarning =
|
||||
$daysSinceLastOptimization > 30 ? tr.deckConfigTimeToOptimize() : "";
|
||||
let desiredRetentionFocused = false;
|
||||
let desiredRetentionEverFocused = false;
|
||||
let optimized = false;
|
||||
const startingDesiredRetention = $config.desiredRetention.toFixed(2);
|
||||
$: if (desiredRetentionFocused) {
|
||||
desiredRetentionEverFocused = true;
|
||||
}
|
||||
|
@ -63,28 +67,41 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
|
||||
$: computing = computingParams || checkingParams;
|
||||
$: defaultparamSearch = `preset:"${state.getCurrentNameForSearch()}" -is:suspended`;
|
||||
$: roundedRetention = Number($config.desiredRetention.toFixed(2));
|
||||
$: roundedRetention = Number(effectiveDesiredRetention.toFixed(2));
|
||||
$: desiredRetentionWarning = getRetentionLongShortWarning(roundedRetention);
|
||||
|
||||
let timeoutId: ReturnType<typeof setTimeout> | undefined = undefined;
|
||||
const WORKLOAD_UPDATE_DELAY_MS = 100;
|
||||
|
||||
let desiredRetentionChangeInfo = "";
|
||||
$: {
|
||||
clearTimeout(timeoutId);
|
||||
if (showDesiredRetentionTooltip) {
|
||||
timeoutId = setTimeout(() => {
|
||||
getRetentionChangeInfo(roundedRetention, fsrsParams($config));
|
||||
}, WORKLOAD_UPDATE_DELAY_MS);
|
||||
} else {
|
||||
desiredRetentionChangeInfo = "";
|
||||
}
|
||||
$: if (showDesiredRetentionTooltip) {
|
||||
getRetentionChangeInfo(roundedRetention, fsrsParams($config));
|
||||
}
|
||||
|
||||
$: retentionWarningClass = getRetentionWarningClass(roundedRetention);
|
||||
|
||||
$: newCardsIgnoreReviewLimit = state.newCardsIgnoreReviewLimit;
|
||||
|
||||
// Create tabs for desired retention
|
||||
const desiredRetentionTabs: ValueTab[] = [
|
||||
new ValueTab(
|
||||
tr.deckConfigSharedPreset(),
|
||||
$config.desiredRetention,
|
||||
(value) => ($config.desiredRetention = value!),
|
||||
$config.desiredRetention,
|
||||
null,
|
||||
),
|
||||
new ValueTab(
|
||||
tr.deckConfigDeckOnly(),
|
||||
$limits.desiredRetention ?? null,
|
||||
(value) => ($limits.desiredRetention = value ?? undefined),
|
||||
null,
|
||||
null,
|
||||
),
|
||||
];
|
||||
|
||||
// Get the effective desired retention value (deck-specific if set, otherwise config default)
|
||||
let effectiveDesiredRetention =
|
||||
$limits.desiredRetention ?? $config.desiredRetention;
|
||||
const startingDesiredRetention = effectiveDesiredRetention.toFixed(2);
|
||||
|
||||
$: simulateFsrsRequest = new SimulateFsrsReviewRequest({
|
||||
params: fsrsParams($config),
|
||||
desiredRetention: $config.desiredRetention,
|
||||
|
@ -96,6 +113,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
easyDaysPercentages: $config.easyDaysPercentages,
|
||||
reviewOrder: $config.reviewOrder,
|
||||
historicalRetention: $config.historicalRetention,
|
||||
learningStepCount: $config.learnSteps.length,
|
||||
relearningStepCount: $config.relearnSteps.length,
|
||||
});
|
||||
|
||||
const DESIRED_RETENTION_LOW_THRESHOLD = 0.8;
|
||||
|
@ -111,21 +130,37 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
}
|
||||
}
|
||||
|
||||
let retentionWorkloadInfo: undefined | Promise<GetRetentionWorkloadResponse> =
|
||||
undefined;
|
||||
let lastParams = [...fsrsParams($config)];
|
||||
|
||||
async function getRetentionChangeInfo(retention: number, params: number[]) {
|
||||
if (+startingDesiredRetention == roundedRetention) {
|
||||
desiredRetentionChangeInfo = tr.deckConfigWorkloadFactorUnchanged();
|
||||
return;
|
||||
}
|
||||
const request = new GetRetentionWorkloadRequest({
|
||||
w: params,
|
||||
search: defaultparamSearch,
|
||||
before: +startingDesiredRetention,
|
||||
after: retention,
|
||||
});
|
||||
const resp = await getRetentionWorkload(request);
|
||||
if (
|
||||
// If the cache is empty and a request has not yet been made to fill it
|
||||
!retentionWorkloadInfo ||
|
||||
// If the parameters have been changed
|
||||
lastParams.toString() !== params.toString()
|
||||
) {
|
||||
const request = new GetRetentionWorkloadRequest({
|
||||
w: params,
|
||||
search: defaultparamSearch,
|
||||
});
|
||||
lastParams = [...params];
|
||||
retentionWorkloadInfo = getRetentionWorkload(request);
|
||||
}
|
||||
|
||||
const previous = +startingDesiredRetention * 100;
|
||||
const after = retention * 100;
|
||||
const resp = await retentionWorkloadInfo;
|
||||
const factor = resp.costs[after] / resp.costs[previous];
|
||||
|
||||
desiredRetentionChangeInfo = tr.deckConfigWorkloadFactorChange({
|
||||
factor: resp.factor.toFixed(2),
|
||||
previousDr: (+startingDesiredRetention * 100).toString(),
|
||||
factor: factor.toFixed(2),
|
||||
previousDr: previous.toString(),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -185,29 +220,34 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
healthCheck: $healthCheck,
|
||||
});
|
||||
|
||||
const already_optimal =
|
||||
const alreadyOptimal =
|
||||
(params.length &&
|
||||
params.every(
|
||||
(n, i) => n.toFixed(4) === resp.params[i].toFixed(4),
|
||||
)) ||
|
||||
resp.params.length === 0;
|
||||
|
||||
let healthCheckMessage = "";
|
||||
if (resp.healthCheckPassed !== undefined) {
|
||||
if (resp.healthCheckPassed) {
|
||||
setTimeout(() => alert(tr.deckConfigFsrsGoodFit()), 200);
|
||||
} else {
|
||||
setTimeout(
|
||||
() => alert(tr.deckConfigFsrsBadFitWarning()),
|
||||
200,
|
||||
);
|
||||
}
|
||||
} else if (already_optimal) {
|
||||
const msg = resp.fsrsItems
|
||||
healthCheckMessage = resp.healthCheckPassed
|
||||
? tr.deckConfigFsrsGoodFit()
|
||||
: tr.deckConfigFsrsBadFitWarning();
|
||||
}
|
||||
let alreadyOptimalMessage = "";
|
||||
if (alreadyOptimal) {
|
||||
alreadyOptimalMessage = resp.fsrsItems
|
||||
? tr.deckConfigFsrsParamsOptimal()
|
||||
: tr.deckConfigFsrsParamsNoReviews();
|
||||
setTimeout(() => alert(msg), 200);
|
||||
}
|
||||
if (!already_optimal) {
|
||||
const message = [alreadyOptimalMessage, healthCheckMessage]
|
||||
.filter((a) => a)
|
||||
.join("\n\n");
|
||||
|
||||
if (message) {
|
||||
setTimeout(() => alert(message), 200);
|
||||
}
|
||||
|
||||
if (!alreadyOptimal) {
|
||||
$config.fsrsParams6 = resp.params;
|
||||
setTimeout(() => {
|
||||
optimized = true;
|
||||
|
@ -299,20 +339,40 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
}
|
||||
|
||||
let simulatorModal: Modal;
|
||||
let workloadModal: Modal;
|
||||
</script>
|
||||
|
||||
<SpinBoxFloatRow
|
||||
bind:value={$config.desiredRetention}
|
||||
defaultValue={defaults.desiredRetention}
|
||||
min={0.7}
|
||||
max={0.99}
|
||||
percentage={true}
|
||||
bind:focused={desiredRetentionFocused}
|
||||
<DynamicallySlottable slotHost={Item} api={{}}>
|
||||
<Item>
|
||||
<SpinBoxFloatRow
|
||||
bind:value={effectiveDesiredRetention}
|
||||
defaultValue={defaults.desiredRetention}
|
||||
min={0.7}
|
||||
max={0.99}
|
||||
percentage={true}
|
||||
bind:focused={desiredRetentionFocused}
|
||||
>
|
||||
<TabbedValue
|
||||
slot="tabs"
|
||||
tabs={desiredRetentionTabs}
|
||||
bind:value={effectiveDesiredRetention}
|
||||
/>
|
||||
<SettingTitle on:click={() => openHelpModal("desiredRetention")}>
|
||||
{tr.deckConfigDesiredRetention()}
|
||||
</SettingTitle>
|
||||
</SpinBoxFloatRow>
|
||||
</Item>
|
||||
</DynamicallySlottable>
|
||||
|
||||
<button
|
||||
class="btn btn-primary"
|
||||
on:click={() => {
|
||||
simulateFsrsRequest.reviewLimit = 9999;
|
||||
workloadModal?.show();
|
||||
}}
|
||||
>
|
||||
<SettingTitle on:click={() => openHelpModal("desiredRetention")}>
|
||||
{tr.deckConfigDesiredRetention()}
|
||||
</SettingTitle>
|
||||
</SpinBoxFloatRow>
|
||||
{tr.deckConfigFsrsDesiredRetentionHelpMeDecideExperimental()}
|
||||
</button>
|
||||
|
||||
<Warning warning={desiredRetentionChangeInfo} className={"alert-info two-line"} />
|
||||
<Warning warning={desiredRetentionWarning} className={retentionWarningClass} />
|
||||
|
@ -409,6 +469,16 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
{onPresetChange}
|
||||
/>
|
||||
|
||||
<SimulatorModal
|
||||
bind:modal={workloadModal}
|
||||
workload
|
||||
{state}
|
||||
{simulateFsrsRequest}
|
||||
{computing}
|
||||
{openHelpModal}
|
||||
{onPresetChange}
|
||||
/>
|
||||
|
||||
<style>
|
||||
.btn {
|
||||
margin-bottom: 0.375rem;
|
||||
|
|
|
@ -13,15 +13,25 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
import TableData from "../graphs/TableData.svelte";
|
||||
import InputBox from "../graphs/InputBox.svelte";
|
||||
import { defaultGraphBounds, type TableDatum } from "../graphs/graph-helpers";
|
||||
import { SimulateSubgraph, type Point } from "../graphs/simulator";
|
||||
import {
|
||||
SimulateSubgraph,
|
||||
SimulateWorkloadSubgraph,
|
||||
type Point,
|
||||
type WorkloadPoint,
|
||||
} from "../graphs/simulator";
|
||||
import * as tr from "@generated/ftl";
|
||||
import { renderSimulationChart } from "../graphs/simulator";
|
||||
import { computeOptimalRetention, simulateFsrsReview } from "@generated/backend";
|
||||
import { renderSimulationChart, renderWorkloadChart } from "../graphs/simulator";
|
||||
import {
|
||||
computeOptimalRetention,
|
||||
simulateFsrsReview,
|
||||
simulateFsrsWorkload,
|
||||
} from "@generated/backend";
|
||||
import { runWithBackendProgress } from "@tslib/progress";
|
||||
import type {
|
||||
ComputeOptimalRetentionResponse,
|
||||
SimulateFsrsReviewRequest,
|
||||
SimulateFsrsReviewResponse,
|
||||
SimulateFsrsWorkloadResponse,
|
||||
} from "@generated/anki/scheduler_pb";
|
||||
import type { DeckOptionsState } from "./lib";
|
||||
import SwitchRow from "$lib/components/SwitchRow.svelte";
|
||||
|
@ -40,9 +50,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
export let computing: boolean;
|
||||
export let openHelpModal: (key: string) => void;
|
||||
export let onPresetChange: () => void;
|
||||
/** Do not modify this once set */
|
||||
export let workload: boolean = false;
|
||||
|
||||
const config = state.currentConfig;
|
||||
let simulateSubgraph: SimulateSubgraph = SimulateSubgraph.count;
|
||||
let simulateWorkloadSubgraph: SimulateWorkloadSubgraph =
|
||||
SimulateWorkloadSubgraph.ratio;
|
||||
let tableData: TableDatum[] = [];
|
||||
let simulating: boolean = false;
|
||||
const fsrs = state.fsrs;
|
||||
|
@ -50,7 +64,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
|
||||
let svg: HTMLElement | SVGElement | null = null;
|
||||
let simulationNumber = 0;
|
||||
let points: Point[] = [];
|
||||
let points: (WorkloadPoint | Point)[] = [];
|
||||
const newCardsIgnoreReviewLimit = state.newCardsIgnoreReviewLimit;
|
||||
let smooth = true;
|
||||
let suspendLeeches = $config.leechAction == DeckConfig_Config_LeechAction.SUSPEND;
|
||||
|
@ -177,6 +191,43 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
}
|
||||
}
|
||||
|
||||
async function simulateWorkload(): Promise<void> {
|
||||
let resp: SimulateFsrsWorkloadResponse | undefined;
|
||||
updateRequest();
|
||||
try {
|
||||
await runWithBackendProgress(
|
||||
async () => {
|
||||
simulating = true;
|
||||
resp = await simulateFsrsWorkload(simulateFsrsRequest);
|
||||
},
|
||||
() => {},
|
||||
);
|
||||
} finally {
|
||||
simulating = false;
|
||||
if (resp) {
|
||||
simulationNumber += 1;
|
||||
|
||||
points = points.concat(
|
||||
Object.entries(resp.memorized).map(([dr, v]) => ({
|
||||
x: parseInt(dr),
|
||||
timeCost: resp!.cost[dr],
|
||||
memorized: v,
|
||||
count: resp!.reviewCount[dr],
|
||||
label: simulationNumber,
|
||||
learnSpan: simulateFsrsRequest.daysToSimulate,
|
||||
})),
|
||||
);
|
||||
|
||||
tableData = renderWorkloadChart(
|
||||
svg as SVGElement,
|
||||
bounds,
|
||||
points as WorkloadPoint[],
|
||||
simulateWorkloadSubgraph,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function clearSimulation() {
|
||||
points = points.filter((p) => p.label !== simulationNumber);
|
||||
simulationNumber = Math.max(0, simulationNumber - 1);
|
||||
|
@ -188,6 +239,25 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
);
|
||||
}
|
||||
|
||||
function saveConfigToPreset() {
|
||||
if (confirm(tr.deckConfigSaveOptionsToPresetConfirm())) {
|
||||
$config.newPerDay = simulateFsrsRequest.newLimit;
|
||||
$config.reviewsPerDay = simulateFsrsRequest.reviewLimit;
|
||||
$config.maximumReviewInterval = simulateFsrsRequest.maxInterval;
|
||||
if (!workload) {
|
||||
$config.desiredRetention = simulateFsrsRequest.desiredRetention;
|
||||
}
|
||||
$newCardsIgnoreReviewLimit = simulateFsrsRequest.newCardsIgnoreReviewLimit;
|
||||
$config.reviewOrder = simulateFsrsRequest.reviewOrder;
|
||||
$config.leechAction = suspendLeeches
|
||||
? DeckConfig_Config_LeechAction.SUSPEND
|
||||
: DeckConfig_Config_LeechAction.TAG_ONLY;
|
||||
$config.leechThreshold = leechThreshold;
|
||||
$config.easyDaysPercentages = [...easyDayPercentages];
|
||||
onPresetChange();
|
||||
}
|
||||
}
|
||||
|
||||
$: if (svg) {
|
||||
let pointsToRender = points;
|
||||
if (smooth) {
|
||||
|
@ -225,11 +295,14 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
});
|
||||
}
|
||||
|
||||
tableData = renderSimulationChart(
|
||||
const render_function = workload ? renderWorkloadChart : renderSimulationChart;
|
||||
|
||||
tableData = render_function(
|
||||
svg as SVGElement,
|
||||
bounds,
|
||||
pointsToRender,
|
||||
simulateSubgraph,
|
||||
// This cast shouldn't matter because we aren't switching between modes in the same modal
|
||||
pointsToRender as WorkloadPoint[],
|
||||
(workload ? simulateWorkloadSubgraph : simulateSubgraph) as any as never,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -252,7 +325,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
<div class="modal-dialog modal-xl">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h5 class="modal-title">{tr.deckConfigFsrsSimulatorExperimental()}</h5>
|
||||
<h5 class="modal-title">
|
||||
{#if workload}
|
||||
{tr.deckConfigFsrsSimulateDesiredRetentionExperimental()}
|
||||
{:else}
|
||||
{tr.deckConfigFsrsSimulatorExperimental()}
|
||||
{/if}
|
||||
</h5>
|
||||
<button
|
||||
type="button"
|
||||
class="btn-close"
|
||||
|
@ -278,17 +357,21 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
</SettingTitle>
|
||||
</SpinBoxRow>
|
||||
|
||||
<SpinBoxFloatRow
|
||||
bind:value={simulateFsrsRequest.desiredRetention}
|
||||
defaultValue={$config.desiredRetention}
|
||||
min={0.7}
|
||||
max={0.99}
|
||||
percentage={true}
|
||||
>
|
||||
<SettingTitle on:click={() => openHelpModal("desiredRetention")}>
|
||||
{tr.deckConfigDesiredRetention()}
|
||||
</SettingTitle>
|
||||
</SpinBoxFloatRow>
|
||||
{#if !workload}
|
||||
<SpinBoxFloatRow
|
||||
bind:value={simulateFsrsRequest.desiredRetention}
|
||||
defaultValue={$config.desiredRetention}
|
||||
min={0.7}
|
||||
max={0.99}
|
||||
percentage={true}
|
||||
>
|
||||
<SettingTitle
|
||||
on:click={() => openHelpModal("desiredRetention")}
|
||||
>
|
||||
{tr.deckConfigDesiredRetention()}
|
||||
</SettingTitle>
|
||||
</SpinBoxFloatRow>
|
||||
{/if}
|
||||
|
||||
<SpinBoxRow
|
||||
bind:value={simulateFsrsRequest.newLimit}
|
||||
|
@ -421,79 +504,99 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
{/if}
|
||||
</details>
|
||||
</div>
|
||||
<button
|
||||
class="btn {computing ? 'btn-warning' : 'btn-primary'}"
|
||||
disabled={computing}
|
||||
on:click={simulateFsrs}
|
||||
>
|
||||
{tr.deckConfigSimulate()}
|
||||
</button>
|
||||
|
||||
<button
|
||||
class="btn {computing ? 'btn-warning' : 'btn-primary'}"
|
||||
disabled={computing}
|
||||
on:click={clearSimulation}
|
||||
>
|
||||
{tr.deckConfigClearLastSimulate()}
|
||||
</button>
|
||||
<div>
|
||||
<button
|
||||
class="btn {computing ? 'btn-warning' : 'btn-primary'}"
|
||||
disabled={computing}
|
||||
on:click={workload ? simulateWorkload : simulateFsrs}
|
||||
>
|
||||
{tr.deckConfigSimulate()}
|
||||
</button>
|
||||
|
||||
<button
|
||||
class="btn {computing ? 'btn-warning' : 'btn-primary'}"
|
||||
disabled={computing}
|
||||
on:click={() => {
|
||||
if (confirm(tr.deckConfigSaveOptionsToPresetConfirm())) {
|
||||
$config.newPerDay = simulateFsrsRequest.newLimit;
|
||||
$config.reviewsPerDay = simulateFsrsRequest.reviewLimit;
|
||||
$config.maximumReviewInterval =
|
||||
simulateFsrsRequest.maxInterval;
|
||||
$config.desiredRetention =
|
||||
simulateFsrsRequest.desiredRetention;
|
||||
$newCardsIgnoreReviewLimit =
|
||||
simulateFsrsRequest.newCardsIgnoreReviewLimit;
|
||||
$config.reviewOrder = simulateFsrsRequest.reviewOrder;
|
||||
$config.leechAction = suspendLeeches
|
||||
? DeckConfig_Config_LeechAction.SUSPEND
|
||||
: DeckConfig_Config_LeechAction.TAG_ONLY;
|
||||
$config.leechThreshold = leechThreshold;
|
||||
$config.easyDaysPercentages = [...easyDayPercentages];
|
||||
onPresetChange();
|
||||
}
|
||||
}}
|
||||
>
|
||||
{tr.deckConfigSaveOptionsToPreset()}
|
||||
</button>
|
||||
<button
|
||||
class="btn {computing ? 'btn-warning' : 'btn-primary'}"
|
||||
disabled={computing}
|
||||
on:click={clearSimulation}
|
||||
>
|
||||
{tr.deckConfigClearLastSimulate()}
|
||||
</button>
|
||||
|
||||
{#if processing}
|
||||
{tr.actionsProcessing()}
|
||||
{/if}
|
||||
<button
|
||||
class="btn {computing ? 'btn-warning' : 'btn-primary'}"
|
||||
disabled={computing}
|
||||
on:click={saveConfigToPreset}
|
||||
>
|
||||
{tr.deckConfigSaveOptionsToPreset()}
|
||||
</button>
|
||||
|
||||
{#if processing}
|
||||
{tr.actionsProcessing()}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<Graph>
|
||||
<div class="radio-group">
|
||||
<InputBox>
|
||||
<label>
|
||||
<input
|
||||
type="radio"
|
||||
value={SimulateSubgraph.count}
|
||||
bind:group={simulateSubgraph}
|
||||
/>
|
||||
{tr.deckConfigFsrsSimulatorRadioCount()}
|
||||
</label>
|
||||
<label>
|
||||
<input
|
||||
type="radio"
|
||||
value={SimulateSubgraph.time}
|
||||
bind:group={simulateSubgraph}
|
||||
/>
|
||||
{tr.statisticsReviewsTimeCheckbox()}
|
||||
</label>
|
||||
<label>
|
||||
<input
|
||||
type="radio"
|
||||
value={SimulateSubgraph.memorized}
|
||||
bind:group={simulateSubgraph}
|
||||
/>
|
||||
{tr.deckConfigFsrsSimulatorRadioMemorized()}
|
||||
</label>
|
||||
{#if !workload}
|
||||
<label>
|
||||
<input
|
||||
type="radio"
|
||||
value={SimulateSubgraph.count}
|
||||
bind:group={simulateSubgraph}
|
||||
/>
|
||||
{tr.deckConfigFsrsSimulatorRadioCount()}
|
||||
</label>
|
||||
<label>
|
||||
<input
|
||||
type="radio"
|
||||
value={SimulateSubgraph.time}
|
||||
bind:group={simulateSubgraph}
|
||||
/>
|
||||
{tr.statisticsReviewsTimeCheckbox()}
|
||||
</label>
|
||||
<label>
|
||||
<input
|
||||
type="radio"
|
||||
value={SimulateSubgraph.memorized}
|
||||
bind:group={simulateSubgraph}
|
||||
/>
|
||||
{tr.deckConfigFsrsSimulatorRadioMemorized()}
|
||||
</label>
|
||||
{:else}
|
||||
<label>
|
||||
<input
|
||||
type="radio"
|
||||
value={SimulateWorkloadSubgraph.ratio}
|
||||
bind:group={simulateWorkloadSubgraph}
|
||||
/>
|
||||
{tr.deckConfigFsrsSimulatorRadioRatio()}
|
||||
</label>
|
||||
<label>
|
||||
<input
|
||||
type="radio"
|
||||
value={SimulateWorkloadSubgraph.count}
|
||||
bind:group={simulateWorkloadSubgraph}
|
||||
/>
|
||||
{tr.deckConfigFsrsSimulatorRadioCount()}
|
||||
</label>
|
||||
<label>
|
||||
<input
|
||||
type="radio"
|
||||
value={SimulateWorkloadSubgraph.time}
|
||||
bind:group={simulateWorkloadSubgraph}
|
||||
/>
|
||||
{tr.statisticsReviewsTimeCheckbox()}
|
||||
</label>
|
||||
<label>
|
||||
<input
|
||||
type="radio"
|
||||
value={SimulateWorkloadSubgraph.memorized}
|
||||
bind:group={simulateWorkloadSubgraph}
|
||||
/>
|
||||
{tr.deckConfigFsrsSimulatorRadioMemorized()}
|
||||
</label>
|
||||
{/if}
|
||||
</InputBox>
|
||||
</div>
|
||||
|
||||
|
@ -524,7 +627,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
|
||||
.svg-container {
|
||||
width: 100%;
|
||||
max-height: calc(100vh - 400px); /* Account for modal header, controls, etc */
|
||||
/* Account for modal header, controls, etc */
|
||||
max-height: max(calc(100vh - 400px), 200px);
|
||||
aspect-ratio: 600 / 250;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
|
|
@ -23,9 +23,12 @@
|
|||
<slot />
|
||||
</Col>
|
||||
<Col --col-size={6} breakpoint="xs">
|
||||
<ConfigInput>
|
||||
<SpinBox bind:value {min} {max} {step} {percentage} bind:focused />
|
||||
<RevertButton slot="revert" bind:value {defaultValue} />
|
||||
</ConfigInput>
|
||||
<Row class="flex-grow-1">
|
||||
<slot name="tabs" />
|
||||
<ConfigInput>
|
||||
<SpinBox bind:value {min} {max} {step} {percentage} bind:focused />
|
||||
<RevertButton slot="revert" bind:value {defaultValue} />
|
||||
</ConfigInput>
|
||||
</Row>
|
||||
</Col>
|
||||
</Row>
|
||||
|
|
|
@ -55,7 +55,10 @@
|
|||
width: 100%;
|
||||
display: flex;
|
||||
flex-wrap: nowrap;
|
||||
justify-content: space-between;
|
||||
&:has(li:nth-child(3)) {
|
||||
justify-content: space-between;
|
||||
}
|
||||
justify-content: space-around;
|
||||
padding-inline: 0;
|
||||
margin-bottom: 0.5rem;
|
||||
list-style: none;
|
||||
|
|
|
@ -8,7 +8,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
// When title is null (default), the graph is inlined, not having TitledContainer wrapper.
|
||||
export let title: string | null = null;
|
||||
export let subtitle: string | null = null;
|
||||
export let onTitleClick: ((_e: MouseEvent | KeyboardEvent) => void) | null = null;
|
||||
export let onHelpClick: ((_e: MouseEvent | KeyboardEvent) => void) | null = null;
|
||||
</script>
|
||||
|
||||
{#if title == null}
|
||||
|
@ -19,7 +19,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
<slot />
|
||||
</div>
|
||||
{:else}
|
||||
<TitledContainer class="d-flex flex-column" {title} {onTitleClick}>
|
||||
<TitledContainer class="d-flex flex-column" {title} {onHelpClick}>
|
||||
<slot slot="tooltip" name="tooltip"></slot>
|
||||
<div class="graph d-flex flex-grow-1 flex-column justify-content-center">
|
||||
{#if subtitle}
|
||||
|
|
|
@ -57,12 +57,12 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
|
||||
const title = tr.statisticsTrueRetentionTitle();
|
||||
const subtitle = tr.statisticsTrueRetentionSubtitle();
|
||||
const onTitleClick = () => {
|
||||
const onHelpClick = () => {
|
||||
openHelpModal(Object.keys(retentionHelp).indexOf("trueRetention"));
|
||||
};
|
||||
</script>
|
||||
|
||||
<Graph {title} {subtitle} {onTitleClick}>
|
||||
<Graph {title} {subtitle} {onHelpClick}>
|
||||
<HelpModal
|
||||
title={tr.statisticsTrueRetentionTitle()}
|
||||
url={HelpPage.DeckOptions.fsrs}
|
||||
|
|
|
@ -18,8 +18,8 @@ import {
|
|||
bin,
|
||||
cumsum,
|
||||
curveBasis,
|
||||
interpolateBlues,
|
||||
interpolateGreens,
|
||||
interpolateOranges,
|
||||
interpolatePurples,
|
||||
interpolateReds,
|
||||
max,
|
||||
|
@ -181,7 +181,7 @@ export function renderReviews(
|
|||
const reds = scaleSequential((n) => interpolateReds(cappedRange(n)!)).domain(
|
||||
x.domain() as any,
|
||||
);
|
||||
const blues = scaleSequential((n) => interpolateBlues(cappedRange(n)!)).domain(
|
||||
const oranges = scaleSequential((n) => interpolateOranges(cappedRange(n)!)).domain(
|
||||
x.domain() as any,
|
||||
);
|
||||
const purples = scaleSequential((n) => interpolatePurples(cappedRange(n)!)).domain(
|
||||
|
@ -195,7 +195,7 @@ export function renderReviews(
|
|||
case BinIndex.Young:
|
||||
return lighterGreens;
|
||||
case BinIndex.Learn:
|
||||
return blues;
|
||||
return oranges;
|
||||
case BinIndex.Relearn:
|
||||
return reds;
|
||||
case BinIndex.Filtered:
|
||||
|
|
|
@ -31,50 +31,94 @@ export interface Point {
|
|||
label: number;
|
||||
}
|
||||
|
||||
export type WorkloadPoint = Point & {
|
||||
learnSpan: number;
|
||||
};
|
||||
|
||||
export enum SimulateSubgraph {
|
||||
time,
|
||||
count,
|
||||
memorized,
|
||||
}
|
||||
|
||||
export enum SimulateWorkloadSubgraph {
|
||||
ratio,
|
||||
time,
|
||||
count,
|
||||
memorized,
|
||||
}
|
||||
|
||||
export function renderWorkloadChart(
|
||||
svgElem: SVGElement,
|
||||
bounds: GraphBounds,
|
||||
data: WorkloadPoint[],
|
||||
subgraph: SimulateWorkloadSubgraph,
|
||||
) {
|
||||
const xMin = 70;
|
||||
const xMax = 99;
|
||||
|
||||
const x = scaleLinear()
|
||||
.domain([xMin, xMax])
|
||||
.range([bounds.marginLeft, bounds.width - bounds.marginRight]);
|
||||
|
||||
const subgraph_data = ({
|
||||
[SimulateWorkloadSubgraph.ratio]: data.map(d => ({ ...d, y: d.timeCost / d.memorized })),
|
||||
[SimulateWorkloadSubgraph.time]: data.map(d => ({ ...d, y: d.timeCost / d.learnSpan })),
|
||||
[SimulateWorkloadSubgraph.count]: data.map(d => ({ ...d, y: d.count / d.learnSpan })),
|
||||
[SimulateWorkloadSubgraph.memorized]: data.map(d => ({ ...d, y: d.memorized })),
|
||||
})[subgraph];
|
||||
|
||||
const yTickFormat = (n: number): string => {
|
||||
return subgraph == SimulateWorkloadSubgraph.time || subgraph == SimulateWorkloadSubgraph.ratio
|
||||
? timeSpan(n, true)
|
||||
: n.toString();
|
||||
};
|
||||
|
||||
const formatter = new Intl.NumberFormat(undefined, {
|
||||
style: "percent",
|
||||
minimumFractionDigits: 0,
|
||||
maximumFractionDigits: 0,
|
||||
});
|
||||
const xTickFormat = (n: number) => formatter.format(n / 100);
|
||||
|
||||
const formatY: (value: number) => string = ({
|
||||
[SimulateWorkloadSubgraph.ratio]: (value: number) =>
|
||||
tr.deckConfigFsrsSimulatorRatioTooltip({ time: timeSpan(value) }),
|
||||
[SimulateWorkloadSubgraph.time]: (value: number) =>
|
||||
tr.statisticsMinutesPerDay({ count: parseFloat((value / 60).toPrecision(2)) }),
|
||||
[SimulateWorkloadSubgraph.count]: (value: number) => tr.statisticsReviewsPerDay({ count: Math.round(value) }),
|
||||
[SimulateWorkloadSubgraph.memorized]: (value: number) =>
|
||||
tr.statisticsMemorized({ memorized: Math.round(value).toFixed(0) }),
|
||||
})[subgraph];
|
||||
|
||||
function formatX(dr: number) {
|
||||
return `${tr.deckConfigDesiredRetention()}: ${xTickFormat(dr)}<br>`;
|
||||
}
|
||||
|
||||
return _renderSimulationChart(
|
||||
svgElem,
|
||||
bounds,
|
||||
subgraph_data,
|
||||
x,
|
||||
formatY,
|
||||
formatX,
|
||||
(_e: MouseEvent, _d: number) => undefined,
|
||||
yTickFormat,
|
||||
xTickFormat,
|
||||
);
|
||||
}
|
||||
|
||||
export function renderSimulationChart(
|
||||
svgElem: SVGElement,
|
||||
bounds: GraphBounds,
|
||||
data: Point[],
|
||||
subgraph: SimulateSubgraph,
|
||||
): TableDatum[] {
|
||||
const svg = select(svgElem);
|
||||
svg.selectAll(".lines").remove();
|
||||
svg.selectAll(".hover-columns").remove();
|
||||
svg.selectAll(".focus-line").remove();
|
||||
svg.selectAll(".legend").remove();
|
||||
if (data.length == 0) {
|
||||
setDataAvailable(svg, false);
|
||||
return [];
|
||||
}
|
||||
const trans = svg.transition().duration(600) as any;
|
||||
|
||||
// Prepare data
|
||||
const today = new Date();
|
||||
const convertedData = data.map(d => ({
|
||||
...d,
|
||||
date: new Date(today.getTime() + d.x * 24 * 60 * 60 * 1000),
|
||||
x: new Date(today.getTime() + d.x * 24 * 60 * 60 * 1000),
|
||||
}));
|
||||
const xMin = today;
|
||||
const xMax = max(convertedData, d => d.date);
|
||||
|
||||
const x = scaleTime()
|
||||
.domain([xMin, xMax!])
|
||||
.range([bounds.marginLeft, bounds.width - bounds.marginRight]);
|
||||
|
||||
svg.select<SVGGElement>(".x-ticks")
|
||||
.call((selection) => selection.transition(trans).call(axisBottom(x).ticks(7).tickSizeOuter(0)))
|
||||
.attr("direction", "ltr");
|
||||
// y scale
|
||||
|
||||
const yTickFormat = (n: number): string => {
|
||||
return subgraph == SimulateSubgraph.time ? timeSpan(n, true) : n.toString();
|
||||
};
|
||||
|
||||
const subgraph_data = ({
|
||||
[SimulateSubgraph.count]: convertedData.map(d => ({ ...d, y: d.count })),
|
||||
|
@ -82,6 +126,94 @@ export function renderSimulationChart(
|
|||
[SimulateSubgraph.memorized]: convertedData.map(d => ({ ...d, y: d.memorized })),
|
||||
})[subgraph];
|
||||
|
||||
const xMin = today;
|
||||
const xMax = max(subgraph_data, d => d.x);
|
||||
|
||||
const x = scaleTime()
|
||||
.domain([xMin, xMax!])
|
||||
.range([bounds.marginLeft, bounds.width - bounds.marginRight]);
|
||||
|
||||
const yTickFormat = (n: number): string => {
|
||||
return subgraph == SimulateSubgraph.time ? timeSpan(n, true) : n.toString();
|
||||
};
|
||||
|
||||
const formatY: (value: number) => string = ({
|
||||
[SimulateSubgraph.time]: timeSpan,
|
||||
[SimulateSubgraph.count]: (value: number) => tr.statisticsReviews({ reviews: Math.round(value) }),
|
||||
[SimulateSubgraph.memorized]: (value: number) =>
|
||||
tr.statisticsMemorized({ memorized: Math.round(value).toFixed(0) }),
|
||||
})[subgraph];
|
||||
|
||||
const perDay = ({
|
||||
[SimulateSubgraph.count]: tr.statisticsReviewsPerDay,
|
||||
[SimulateSubgraph.time]: ({ count }: { count: number }) => timeSpan(count),
|
||||
[SimulateSubgraph.memorized]: tr.statisticsCardsPerDay,
|
||||
})[subgraph];
|
||||
|
||||
function legendMouseMove(e: MouseEvent, d: number) {
|
||||
const data = subgraph_data.filter(datum => datum.label == d);
|
||||
|
||||
const total = subgraph == SimulateSubgraph.memorized
|
||||
? data[data.length - 1].memorized - data[0].memorized
|
||||
: sumBy(data, d => d.y);
|
||||
const average = total / (data?.length || 1);
|
||||
|
||||
showTooltip(
|
||||
`#${d}:<br/>
|
||||
${tr.statisticsAverage()}: ${perDay({ count: average })}<br/>
|
||||
${tr.statisticsTotal()}: ${formatY(total)}`,
|
||||
e.pageX,
|
||||
e.pageY,
|
||||
);
|
||||
}
|
||||
|
||||
function formatX(date: Date) {
|
||||
const days = +((date.getTime() - Date.now()) / (60 * 60 * 24 * 1000)).toFixed();
|
||||
return `Date: ${localizedDate(date)}<br>In ${days} Days<br>`;
|
||||
}
|
||||
|
||||
return _renderSimulationChart(
|
||||
svgElem,
|
||||
bounds,
|
||||
subgraph_data,
|
||||
x,
|
||||
formatY,
|
||||
formatX,
|
||||
legendMouseMove,
|
||||
yTickFormat,
|
||||
undefined,
|
||||
);
|
||||
}
|
||||
|
||||
function _renderSimulationChart<T extends { x: any; y: any; label: number }>(
|
||||
svgElem: SVGElement,
|
||||
bounds: GraphBounds,
|
||||
subgraph_data: T[],
|
||||
x: any,
|
||||
formatY: (n: T["y"]) => string,
|
||||
formatX: (n: T["x"]) => string,
|
||||
legendMouseMove: (e: MouseEvent, d: number) => void,
|
||||
yTickFormat?: (n: number) => string,
|
||||
xTickFormat?: (n: number) => string,
|
||||
): TableDatum[] {
|
||||
const svg = select(svgElem);
|
||||
svg.selectAll(".lines").remove();
|
||||
svg.selectAll(".hover-columns").remove();
|
||||
svg.selectAll(".focus-line").remove();
|
||||
svg.selectAll(".legend").remove();
|
||||
if (subgraph_data.length == 0) {
|
||||
setDataAvailable(svg, false);
|
||||
return [];
|
||||
}
|
||||
const trans = svg.transition().duration(600) as any;
|
||||
|
||||
svg.select<SVGGElement>(".x-ticks")
|
||||
.call((selection) =>
|
||||
selection.transition(trans).call(axisBottom(x).ticks(7).tickSizeOuter(0).tickFormat(xTickFormat as any))
|
||||
)
|
||||
.attr("direction", "ltr");
|
||||
// y scale
|
||||
|
||||
const yMax = max(subgraph_data, d => d.y)!;
|
||||
const y = scaleLinear()
|
||||
.range([bounds.height - bounds.marginBottom, bounds.marginTop])
|
||||
|
@ -110,7 +242,7 @@ export function renderSimulationChart(
|
|||
.attr("fill", "currentColor");
|
||||
|
||||
// x lines
|
||||
const points = subgraph_data.map((d) => [x(d.date), y(d.y), d.label]);
|
||||
const points = subgraph_data.map((d) => [x(d.x), y(d.y), d.label]);
|
||||
const groups = rollup(points, v => Object.assign(v, { z: v[0][2] }), d => d[2]);
|
||||
|
||||
const color = schemeCategory10;
|
||||
|
@ -157,13 +289,6 @@ export function renderSimulationChart(
|
|||
hideTooltip();
|
||||
});
|
||||
|
||||
const formatY: (value: number) => string = ({
|
||||
[SimulateSubgraph.time]: timeSpan,
|
||||
[SimulateSubgraph.count]: (value: number) => tr.statisticsReviews({ reviews: Math.round(value) }),
|
||||
[SimulateSubgraph.memorized]: (value: number) =>
|
||||
tr.statisticsMemorized({ memorized: Math.round(value).toFixed(0) }),
|
||||
})[subgraph];
|
||||
|
||||
function mousemove(event: MouseEvent, d: any): void {
|
||||
pointer(event, document.body);
|
||||
const date = x.invert(d[0]);
|
||||
|
@ -182,8 +307,7 @@ export function renderSimulationChart(
|
|||
|
||||
focusLine.attr("x1", d[0]).attr("x2", d[0]).style("opacity", 1);
|
||||
|
||||
const days = +((date.getTime() - Date.now()) / (60 * 60 * 24 * 1000)).toFixed();
|
||||
let tooltipContent = `Date: ${localizedDate(date)}<br>In ${days} Days<br>`;
|
||||
let tooltipContent = formatX(date);
|
||||
for (const [key, value] of Object.entries(groupData)) {
|
||||
const path = svg.select(`path[data-group="${key}"]`);
|
||||
const hidden = path.classed("hidden");
|
||||
|
@ -212,29 +336,6 @@ export function renderSimulationChart(
|
|||
.on("mousemove", legendMouseMove)
|
||||
.on("mouseout", hideTooltip);
|
||||
|
||||
const perDay = ({
|
||||
[SimulateSubgraph.count]: tr.statisticsReviewsPerDay,
|
||||
[SimulateSubgraph.time]: ({ count }: { count: number }) => timeSpan(count),
|
||||
[SimulateSubgraph.memorized]: tr.statisticsCardsPerDay,
|
||||
})[subgraph];
|
||||
|
||||
function legendMouseMove(e: MouseEvent, d: number) {
|
||||
const data = subgraph_data.filter(datum => datum.label == d);
|
||||
|
||||
const total = subgraph == SimulateSubgraph.memorized
|
||||
? data[data.length - 1].memorized - data[0].memorized
|
||||
: sumBy(data, d => d.y);
|
||||
const average = total / (data?.length || 1);
|
||||
|
||||
showTooltip(
|
||||
`#${d}:<br/>
|
||||
${tr.statisticsAverage()}: ${perDay({ count: average })}<br/>
|
||||
${tr.statisticsTotal()}: ${formatY(total)}`,
|
||||
e.pageX,
|
||||
e.pageY,
|
||||
);
|
||||
}
|
||||
|
||||
legend.append("rect")
|
||||
.attr("x", bounds.width - bounds.marginRight + 36)
|
||||
.attr("width", 12)
|
||||
|
|
|
@ -103,6 +103,8 @@ function initCanvas(): fabric.Canvas {
|
|||
// snap rotation around 0 by +-3deg
|
||||
fabric.Object.prototype.snapAngle = 360;
|
||||
fabric.Object.prototype.snapThreshold = 3;
|
||||
// populate canvas.targets with subtargets during mouse events
|
||||
fabric.Group.prototype.subTargetCheck = true;
|
||||
// disable rotation when selecting
|
||||
canvas.on("selection:created", () => {
|
||||
const g = canvas.getActiveObject();
|
||||
|
|
|
@ -105,21 +105,6 @@ export const unGroupShapes = (canvas: fabric.Canvas): void => {
|
|||
redraw(canvas);
|
||||
};
|
||||
|
||||
/** Check for the target within a (potentially nested) group
|
||||
* NOTE: assumes that masks do not overlap */
|
||||
export const findTargetInGroup = (group: fabric.Group, p: fabric.Point): fabric.Object | undefined => {
|
||||
if (!group) { return; }
|
||||
const point = fabric.util.transformPoint(p, fabric.util.invertTransform(group.calcOwnMatrix()));
|
||||
for (const shape of group.getObjects()) {
|
||||
if (shape instanceof fabric.Group) {
|
||||
const ret = findTargetInGroup(shape, point);
|
||||
if (ret) { return ret; }
|
||||
} else if (shape.containsPoint(point)) {
|
||||
return shape;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const copyItem = (canvas: fabric.Canvas): void => {
|
||||
const activeObject = canvas.getActiveObject();
|
||||
if (!activeObject) {
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
import { fabric } from "fabric";
|
||||
|
||||
import { get, type Readable } from "svelte/store";
|
||||
import { findTargetInGroup, stopDraw } from "./lib";
|
||||
import { stopDraw } from "./lib";
|
||||
import { undoStack } from "./tool-undo-redo";
|
||||
|
||||
export const fillMask = (canvas: fabric.Canvas, colourStore: Readable<string>): void => {
|
||||
|
@ -17,9 +17,7 @@ export const fillMask = (canvas: fabric.Canvas, colourStore: Readable<string>):
|
|||
stopDraw(canvas);
|
||||
|
||||
canvas.on("mouse:down", function(o) {
|
||||
const target = o.target instanceof fabric.Group
|
||||
? findTargetInGroup(o.target, canvas.getPointer(o.e) as fabric.Point)
|
||||
: o.target;
|
||||
const target = o.target instanceof fabric.Group ? canvas.targets[0] : o.target;
|
||||
const colour = get(colourStore);
|
||||
if (!target || target.fill === colour) { return; }
|
||||
target.fill = colour;
|
||||
|
|
Loading…
Reference in a new issue