diff --git a/.cargo/config.toml b/.cargo/config.toml index 3fbb3be1b..49aaa3a6c 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -10,3 +10,6 @@ PYTHONDONTWRITEBYTECODE = "1" # prevent junk files on Windows [term] color = "always" + +[target.'cfg(all(target_env = "msvc", target_os = "windows"))'] +rustflags = ["-C", "target-feature=+crt-static"] diff --git a/.version b/.version index 45a520da7..ce73bf7c0 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -25.06b7 +25.08b1 diff --git a/CONTRIBUTORS b/CONTRIBUTORS index fc3bc44e6..c22bc764a 100644 --- a/CONTRIBUTORS +++ b/CONTRIBUTORS @@ -233,6 +233,8 @@ Spiritual Father Emmanuel Ferdman Sunong2008 Marvin Kopf +Kevin Nakamura + ******************** The text of the 3 clause BSD license follows: diff --git a/Cargo.lock b/Cargo.lock index 654437d33..86787124a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -94,6 +94,7 @@ dependencies = [ "axum", "axum-client-ip", "axum-extra", + "bitflags 2.9.1", "blake3", "bytes", "chrono", @@ -3548,6 +3549,7 @@ dependencies = [ "embed-resource", "libc", "libc-stdhandle", + "serde_json", "widestring", "windows 0.61.3", ] diff --git a/Cargo.toml b/Cargo.toml index a22badd97..db5753893 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -60,6 +60,7 @@ async-trait = "0.1.88" axum = { version = "0.8.4", features = ["multipart", "macros"] } axum-client-ip = "1.1.3" axum-extra = { version = "0.10.1", features = ["typed-header"] } +bitflags = "2.9.1" blake3 = "1.8.2" bytes = "1.10.1" camino = "1.1.10" diff --git a/README.md b/README.md index 3bdcc2db3..04d5603a7 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Anki +# Anki® [![Build status](https://badge.buildkite.com/c9edf020a4aec976f9835e54751cc5409d843adbb66d043bd3.svg?branch=main)](https://buildkite.com/ankitects/anki-ci) diff --git a/build/configure/src/python.rs b/build/configure/src/python.rs index 9d5e9057e..e43bceeb3 100644 --- a/build/configure/src/python.rs +++ b/build/configure/src/python.rs @@ -1,8 +1,6 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use std::env; - use anyhow::Result; use ninja_gen::action::BuildAction; use ninja_gen::archives::Platform; @@ -125,7 +123,14 @@ impl BuildAction for BuildWheel { } fn files(&mut self, build: &mut impl FilesHandle) { - build.add_inputs("uv", inputs![":uv_binary"]); + if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") { + let uv_path = + std::env::var("UV_BINARY").expect("UV_BINARY must be set in OFFLINE_BUILD mode"); + build.add_inputs("uv", inputs![uv_path]); + } else { + build.add_inputs("uv", inputs![":uv_binary"]); + } + build.add_inputs("", &self.deps); // Set the project directory based on which package we're building @@ -222,15 +227,19 @@ struct Sphinx { impl BuildAction for Sphinx { fn command(&self) -> &str { - if env::var("OFFLINE_BUILD").is_err() { - "$uv sync --extra sphinx && $python python/sphinx/build.py" - } else { + if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") { "$python python/sphinx/build.py" + } else { + "$uv sync --extra sphinx && $python python/sphinx/build.py" } } fn files(&mut self, build: &mut impl FilesHandle) { - if env::var("OFFLINE_BUILD").is_err() { + if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") { + let uv_path = + std::env::var("UV_BINARY").expect("UV_BINARY must be set in OFFLINE_BUILD mode"); + build.add_inputs("uv", inputs![uv_path]); + } else { build.add_inputs("uv", inputs![":uv_binary"]); // Set environment variable to use the existing pyenv build.add_variable("pyenv_path", "$builddir/pyenv"); diff --git a/build/ninja_gen/Cargo.toml b/build/ninja_gen/Cargo.toml index cacab6a7b..5e5a4f736 100644 --- a/build/ninja_gen/Cargo.toml +++ b/build/ninja_gen/Cargo.toml @@ -35,3 +35,7 @@ path = "src/bin/update_uv.rs" [[bin]] name = "update_protoc" path = "src/bin/update_protoc.rs" + +[[bin]] +name = "update_node" +path = "src/bin/update_node.rs" diff --git a/build/ninja_gen/src/bin/update_node.rs b/build/ninja_gen/src/bin/update_node.rs new file mode 100644 index 000000000..32dbf6d4a --- /dev/null +++ b/build/ninja_gen/src/bin/update_node.rs @@ -0,0 +1,268 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use std::error::Error; +use std::fs; +use std::path::Path; + +use regex::Regex; +use reqwest::blocking::Client; +use serde_json::Value; + +#[derive(Debug)] +struct NodeRelease { + version: String, + files: Vec, +} + +#[derive(Debug)] +struct NodeFile { + filename: String, + url: String, +} + +fn main() -> Result<(), Box> { + let release_info = fetch_node_release_info()?; + let new_text = generate_node_archive_function(&release_info)?; + update_node_text(&new_text)?; + println!("Node.js archive function updated successfully!"); + Ok(()) +} + +fn fetch_node_release_info() -> Result> { + let client = Client::new(); + + // Get the Node.js release info + let response = client + .get("https://nodejs.org/dist/index.json") + .header("User-Agent", "anki-build-updater") + .send()?; + + let releases: Vec = response.json()?; + + // Find the latest LTS release + let latest = releases + .iter() + .find(|release| { + // LTS releases have a non-false "lts" field + release["lts"].as_str().is_some() && release["lts"] != false + }) + .ok_or("No LTS releases found")?; + + let version = latest["version"] + .as_str() + .ok_or("Version not found")? + .to_string(); + + let files = latest["files"] + .as_array() + .ok_or("Files array not found")? + .iter() + .map(|f| f.as_str().unwrap_or("")) + .collect::>(); + + let lts_name = latest["lts"].as_str().unwrap_or("unknown"); + println!("Found Node.js LTS version: {version} ({lts_name})"); + + // Map platforms to their expected file keys and full filenames + let platform_mapping = vec![ + ( + "linux-x64", + "linux-x64", + format!("node-{version}-linux-x64.tar.xz"), + ), + ( + "linux-arm64", + "linux-arm64", + format!("node-{version}-linux-arm64.tar.xz"), + ), + ( + "darwin-x64", + "osx-x64-tar", + format!("node-{version}-darwin-x64.tar.xz"), + ), + ( + "darwin-arm64", + "osx-arm64-tar", + format!("node-{version}-darwin-arm64.tar.xz"), + ), + ( + "win-x64", + "win-x64-zip", + format!("node-{version}-win-x64.zip"), + ), + ( + "win-arm64", + "win-arm64-zip", + format!("node-{version}-win-arm64.zip"), + ), + ]; + + let mut node_files = Vec::new(); + + for (platform, file_key, filename) in platform_mapping { + // Check if this file exists in the release + if files.contains(&file_key) { + let url = format!("https://nodejs.org/dist/{version}/{filename}"); + node_files.push(NodeFile { + filename: filename.clone(), + url, + }); + println!("Found file for {platform}: {filename} (key: {file_key})"); + } else { + return Err( + format!("File not found for {platform} (key: {file_key}): {filename}").into(), + ); + } + } + + Ok(NodeRelease { + version, + files: node_files, + }) +} + +fn generate_node_archive_function(release: &NodeRelease) -> Result> { + let client = Client::new(); + + // Fetch the SHASUMS256.txt file once + println!("Fetching SHA256 checksums..."); + let shasums_url = format!("https://nodejs.org/dist/{}/SHASUMS256.txt", release.version); + let shasums_response = client + .get(&shasums_url) + .header("User-Agent", "anki-build-updater") + .send()?; + let shasums_text = shasums_response.text()?; + + // Create a mapping from filename patterns to platform names - using the exact + // patterns we stored in files + let platform_mapping = vec![ + ("linux-x64.tar.xz", "LinuxX64"), + ("linux-arm64.tar.xz", "LinuxArm"), + ("darwin-x64.tar.xz", "MacX64"), + ("darwin-arm64.tar.xz", "MacArm"), + ("win-x64.zip", "WindowsX64"), + ("win-arm64.zip", "WindowsArm"), + ]; + + let mut platform_blocks = Vec::new(); + + for (file_pattern, platform_name) in platform_mapping { + // Find the file that ends with this pattern + if let Some(file) = release + .files + .iter() + .find(|f| f.filename.ends_with(file_pattern)) + { + // Find the SHA256 for this file + let sha256 = shasums_text + .lines() + .find(|line| line.contains(&file.filename)) + .and_then(|line| line.split_whitespace().next()) + .ok_or_else(|| format!("SHA256 not found for {}", file.filename))?; + + println!( + "Found SHA256 for {}: {} => {}", + platform_name, file.filename, sha256 + ); + + let block = format!( + " Platform::{} => OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }},", + platform_name, file.url, sha256 + ); + platform_blocks.push(block); + } else { + return Err(format!( + "File not found for platform {platform_name}: no file ending with {file_pattern}" + ) + .into()); + } + } + + let function = format!( + "pub fn node_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}", + platform_blocks.join("\n") + ); + + Ok(function) +} + +fn update_node_text(new_function: &str) -> Result<(), Box> { + let node_rs_content = read_node_rs()?; + + // Regex to match the entire node_archive function with proper multiline + // matching + let re = Regex::new( + r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}", + )?; + + let updated_content = re.replace(&node_rs_content, new_function); + + write_node_rs(&updated_content)?; + Ok(()) +} + +fn read_node_rs() -> Result> { + // Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs + let manifest_dir = + std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?; + let path = Path::new(&manifest_dir).join("src").join("node.rs"); + Ok(fs::read_to_string(path)?) +} + +fn write_node_rs(content: &str) -> Result<(), Box> { + // Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs + let manifest_dir = + std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?; + let path = Path::new(&manifest_dir).join("src").join("node.rs"); + fs::write(path, content)?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_regex_replacement() { + let sample_content = r#"Some other code +pub fn node_archive(platform: Platform) -> OnlineArchive { + match platform { + Platform::LinuxX64 => OnlineArchive { + url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz", + sha256: "old_hash", + }, + Platform::MacX64 => OnlineArchive { + url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz", + sha256: "old_hash", + }, + } +} + +More code here"#; + + let new_function = r#"pub fn node_archive(platform: Platform) -> OnlineArchive { + match platform { + Platform::LinuxX64 => OnlineArchive { + url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-linux-x64.tar.xz", + sha256: "new_hash", + }, + Platform::MacX64 => OnlineArchive { + url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-darwin-x64.tar.xz", + sha256: "new_hash", + }, + } +}"#; + + let re = Regex::new( + r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}" + ).unwrap(); + + let result = re.replace(sample_content, new_function); + assert!(result.contains("v21.0.0")); + assert!(result.contains("new_hash")); + assert!(!result.contains("old_hash")); + assert!(result.contains("Some other code")); + assert!(result.contains("More code here")); + } +} diff --git a/build/ninja_gen/src/node.rs b/build/ninja_gen/src/node.rs index 10b3e6184..b7b66225b 100644 --- a/build/ninja_gen/src/node.rs +++ b/build/ninja_gen/src/node.rs @@ -19,28 +19,28 @@ use crate::input::BuildInput; pub fn node_archive(platform: Platform) -> OnlineArchive { match platform { Platform::LinuxX64 => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz", - sha256: "822780369d0ea309e7d218e41debbd1a03f8cdf354ebf8a4420e89f39cc2e612", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-x64.tar.xz", + sha256: "325c0f1261e0c61bcae369a1274028e9cfb7ab7949c05512c5b1e630f7e80e12", }, Platform::LinuxArm => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-arm64.tar.xz", - sha256: "f6df68c6793244071f69023a9b43a0cf0b13d65cbe86d55925c28e4134d9aafb", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-arm64.tar.xz", + sha256: "140aee84be6774f5fb3f404be72adbe8420b523f824de82daeb5ab218dab7b18", }, Platform::MacX64 => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz", - sha256: "d4b4ab81ebf1f7aab09714f834992f27270ad0079600da00c8110f8950ca6c5a", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-x64.tar.xz", + sha256: "f79de1f64df4ac68493a344bb5ab7d289d0275271e87b543d1278392c9de778a", }, Platform::MacArm => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-arm64.tar.xz", - sha256: "f18a7438723d48417f5e9be211a2f3c0520ffbf8e02703469e5153137ca0f328", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-arm64.tar.xz", + sha256: "cc9cc294eaf782dd93c8c51f460da610cc35753c6a9947411731524d16e97914", }, Platform::WindowsX64 => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-x64.zip", - sha256: "893115cd92ad27bf178802f15247115e93c0ef0c753b93dca96439240d64feb5", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-x64.zip", + sha256: "721ab118a3aac8584348b132767eadf51379e0616f0db802cc1e66d7f0d98f85", }, Platform::WindowsArm => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-arm64.zip", - sha256: "89c1f7034dcd6ff5c17f2af61232a96162a1902f862078347dcf274a938b6142", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-arm64.zip", + sha256: "78355dc9ca117bb71d3f081e4b1b281855e2b134f3939bb0ca314f7567b0e621", }, } } diff --git a/ftl/core-repo b/ftl/core-repo index 4a65d6012..a9216499b 160000 --- a/ftl/core-repo +++ b/ftl/core-repo @@ -1 +1 @@ -Subproject commit 4a65d6012ac022a35f5c80c80b2b665447b6a525 +Subproject commit a9216499ba1fb1538cfd740c698adaaa3410fd4b diff --git a/ftl/core/deck-config.ftl b/ftl/core/deck-config.ftl index de2dbac95..01eac3369 100644 --- a/ftl/core/deck-config.ftl +++ b/ftl/core/deck-config.ftl @@ -426,7 +426,7 @@ deck-config-desired-retention-tooltip = values will greatly increase your workload, and lower values can be demoralizing when you forget a lot of material. deck-config-desired-retention-tooltip2 = - The workload values provided by the tooltip are a rough approximation. For a greater level of accuracy, use the simulator. + The workload values provided by the info box are a rough approximation. For a greater level of accuracy, use the simulator. deck-config-historical-retention-tooltip = When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will assume that when you did those old reviews, you remembered 90% of the material. If your old retention @@ -514,6 +514,7 @@ deck-config-advanced-settings = Advanced Settings deck-config-smooth-graph = Smooth graph deck-config-suspend-leeches = Suspend leeches deck-config-save-options-to-preset = Save Changes to Preset +deck-config-save-options-to-preset-confirm = Overwrite the options in your current preset with the options that are currently set in the simulator? # Radio button in the FSRS simulation diagram (Deck options -> FSRS) selecting # to show the total number of cards that can be recalled or retrieved on a # specific date. diff --git a/ftl/core/importing.ftl b/ftl/core/importing.ftl index 3b9f7c401..b23d12654 100644 --- a/ftl/core/importing.ftl +++ b/ftl/core/importing.ftl @@ -48,6 +48,7 @@ importing-merge-notetypes-help = Warning: This will require a one-way sync, and may mark existing notes as modified. importing-mnemosyne-20-deck-db = Mnemosyne 2.0 Deck (*.db) importing-multicharacter-separators-are-not-supported-please = Multi-character separators are not supported. Please enter one character only. +importing-new-deck-will-be-created = A new deck will be created: { $name } importing-notes-added-from-file = Notes added from file: { $val } importing-notes-found-in-file = Notes found in file: { $val } importing-notes-skipped-as-theyre-already-in = Notes skipped, as up-to-date copies are already in your collection: { $val } diff --git a/ftl/core/statistics.ftl b/ftl/core/statistics.ftl index c3a2bb613..8da1aace8 100644 --- a/ftl/core/statistics.ftl +++ b/ftl/core/statistics.ftl @@ -99,9 +99,9 @@ statistics-counts-relearning-cards = Relearning statistics-counts-title = Card Counts statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards -## True Retention represents your actual retention rate from past reviews, in -## comparison to the "desired retention" parameter of FSRS, which forecasts -## future retention. True Retention is the percentage of all reviewed cards +## Retention rate represents your actual retention rate from past reviews, in +## comparison to the "desired retention" setting of FSRS, which forecasts +## future retention. Retention rate is the percentage of all reviewed cards ## that were marked as "Hard," "Good," or "Easy" within a specific time period. ## ## Most of these strings are used as column / row headings in a table. @@ -112,9 +112,9 @@ statistics-counts-separate-suspended-buried-cards = Separate suspended/buried ca ## N.B. Stats cards may be very small on mobile devices and when the Stats ## window is certain sizes. -statistics-true-retention-title = True Retention +statistics-true-retention-title = Retention rate statistics-true-retention-subtitle = Pass rate of cards with an interval ≥ 1 day. -statistics-true-retention-tooltip = If you are using FSRS, your true retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data. +statistics-true-retention-tooltip = If you are using FSRS, your retention rate is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data. statistics-true-retention-range = Range statistics-true-retention-pass = Pass statistics-true-retention-fail = Fail diff --git a/ftl/qt-repo b/ftl/qt-repo index f42461a64..a1134ab59 160000 --- a/ftl/qt-repo +++ b/ftl/qt-repo @@ -1 +1 @@ -Subproject commit f42461a6438cbe844150f543128d79a669bc4ef2 +Subproject commit a1134ab59d3d23468af2968741aa1f21d16ff308 diff --git a/package.json b/package.json index dc6f6bd2c..fc0a6e1b3 100644 --- a/package.json +++ b/package.json @@ -19,8 +19,8 @@ "@poppanator/sveltekit-svg": "^5.0.0", "@sqltools/formatter": "^1.2.2", "@sveltejs/adapter-static": "^3.0.0", - "@sveltejs/kit": "^2.20.7", - "@sveltejs/vite-plugin-svelte": "4.0.0", + "@sveltejs/kit": "^2.22.2", + "@sveltejs/vite-plugin-svelte": "5.1", "@types/bootstrap": "^5.0.12", "@types/codemirror": "^5.60.0", "@types/d3": "^7.0.0", @@ -30,7 +30,7 @@ "@types/jqueryui": "^1.12.13", "@types/lodash-es": "^4.17.4", "@types/marked": "^5.0.0", - "@types/node": "^20", + "@types/node": "^22", "@typescript-eslint/eslint-plugin": "^5.60.1", "@typescript-eslint/parser": "^5.60.1", "caniuse-lite": "^1.0.30001431", @@ -48,16 +48,16 @@ "prettier": "^3.4.2", "prettier-plugin-svelte": "^3.3.2", "sass": "<1.77", - "svelte": "^5.17.3", - "svelte-check": "^3.4.4", - "svelte-preprocess": "^5.0.4", + "svelte": "^5.34.9", + "svelte-check": "^4.2.2", + "svelte-preprocess": "^6.0.3", "svelte-preprocess-esbuild": "^3.0.1", "svgo": "^3.2.0", "tslib": "^2.0.3", - "tsx": "^3.12.0", + "tsx": "^4.8.1", "typescript": "^5.0.4", - "vite": "5.4.19", - "vitest": "^2" + "vite": "6", + "vitest": "^3" }, "dependencies": { "@bufbuild/protobuf": "^1.2.1", @@ -82,7 +82,8 @@ }, "resolutions": { "canvas": "npm:empty-npm-package@1.0.0", - "cookie": "0.7.0" + "cookie": "0.7.0", + "vite": "6" }, "browserslist": [ "defaults", diff --git a/proto/anki/cards.proto b/proto/anki/cards.proto index c120440e8..5c9838571 100644 --- a/proto/anki/cards.proto +++ b/proto/anki/cards.proto @@ -51,6 +51,7 @@ message Card { optional FsrsMemoryState memory_state = 20; optional float desired_retention = 21; optional float decay = 22; + optional int64 last_review_time_secs = 23; string custom_data = 19; } diff --git a/proto/anki/config.proto b/proto/anki/config.proto index d61f139d6..ea115f0fc 100644 --- a/proto/anki/config.proto +++ b/proto/anki/config.proto @@ -56,6 +56,7 @@ message ConfigKey { RENDER_LATEX = 25; LOAD_BALANCER_ENABLED = 26; FSRS_SHORT_TERM_WITH_STEPS_ENABLED = 27; + FSRS_LEGACY_EVALUATE = 28; } enum String { SET_DUE_BROWSER = 0; diff --git a/proto/anki/deck_config.proto b/proto/anki/deck_config.proto index 831283931..9dae49c6a 100644 --- a/proto/anki/deck_config.proto +++ b/proto/anki/deck_config.proto @@ -236,6 +236,7 @@ message DeckConfigsForUpdate { bool new_cards_ignore_review_limit = 7; bool fsrs = 8; bool fsrs_health_check = 11; + bool fsrs_legacy_evaluate = 12; bool apply_all_parent_limits = 9; uint32 days_since_last_fsrs_optimize = 10; } diff --git a/proto/anki/import_export.proto b/proto/anki/import_export.proto index 88a7ad163..3273a57bb 100644 --- a/proto/anki/import_export.proto +++ b/proto/anki/import_export.proto @@ -176,9 +176,12 @@ message CsvMetadata { // to determine the number of columns. repeated string column_labels = 5; oneof deck { + // id of an existing deck int64 deck_id = 6; // One-based. 0 means n/a. uint32 deck_column = 7; + // name of new deck to be created + string deck_name = 17; } oneof notetype { // One notetype for all rows with given column mapping. diff --git a/proto/anki/scheduler.proto b/proto/anki/scheduler.proto index 5e568aa92..01f092a39 100644 --- a/proto/anki/scheduler.proto +++ b/proto/anki/scheduler.proto @@ -56,6 +56,8 @@ service SchedulerService { rpc SimulateFsrsReview(SimulateFsrsReviewRequest) returns (SimulateFsrsReviewResponse); rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse); + rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest) + returns (EvaluateParamsResponse); rpc ComputeMemoryState(cards.CardId) returns (ComputeMemoryStateResponse); // The number of days the calculated interval was fuzzed by on the previous // review (if any). Utilized by the FSRS add-on. @@ -402,31 +404,6 @@ message SimulateFsrsReviewRequest { repeated float easy_days_percentages = 10; deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11; optional uint32 suspend_after_lapse_count = 12; - // For CMRR - message CMRRTarget { - message Memorized { - float loss_aversion = 1; - }; - - message Stability {}; - - message FutureMemorized { - int32 days = 1; - }; - - message AverageFutureMemorized { - int32 days = 1; - }; - - oneof kind { - Memorized memorized = 1; - Stability stability = 2; - FutureMemorized future_memorized = 3; - AverageFutureMemorized average_future_memorized = 4; - }; - }; - - optional CMRRTarget target = 13; } message SimulateFsrsReviewResponse { @@ -467,6 +444,12 @@ message EvaluateParamsRequest { uint32 num_of_relearning_steps = 3; } +message EvaluateParamsLegacyRequest { + repeated float params = 1; + string search = 2; + int64 ignore_revlogs_before_ms = 3; +} + message EvaluateParamsResponse { float log_loss = 1; float rmse_bins = 2; diff --git a/pylib/anki/cards.py b/pylib/anki/cards.py index 02807ae73..854d4ed18 100644 --- a/pylib/anki/cards.py +++ b/pylib/anki/cards.py @@ -49,6 +49,7 @@ class Card(DeprecatedNamesMixin): memory_state: FSRSMemoryState | None desired_retention: float | None decay: float | None + last_review_time: int | None def __init__( self, @@ -103,6 +104,11 @@ class Card(DeprecatedNamesMixin): card.desired_retention if card.HasField("desired_retention") else None ) self.decay = card.decay if card.HasField("decay") else None + self.last_review_time = ( + card.last_review_time_secs + if card.HasField("last_review_time_secs") + else None + ) def _to_backend_card(self) -> cards_pb2.Card: # mtime & usn are set by backend diff --git a/pylib/anki/lang.py b/pylib/anki/lang.py index 3cbb60319..9ff8dcd9e 100644 --- a/pylib/anki/lang.py +++ b/pylib/anki/lang.py @@ -73,6 +73,7 @@ langs = sorted( ("ଓଡ଼ିଆ", "or_OR"), ("Filipino", "tl"), ("ئۇيغۇر", "ug"), + ("Oʻzbek", "uz_UZ"), ] ) @@ -123,6 +124,7 @@ compatMap = { "th": "th_TH", "tr": "tr_TR", "uk": "uk_UA", + "uz": "uz_UZ", "vi": "vi_VN", } diff --git a/qt/aqt/about.py b/qt/aqt/about.py index fb90a9355..228d3cfeb 100644 --- a/qt/aqt/about.py +++ b/qt/aqt/about.py @@ -66,7 +66,8 @@ def show(mw: aqt.AnkiQt) -> QDialog: # WebView contents ###################################################################### abouttext = "
" - abouttext += f"

{tr.about_anki_is_a_friendly_intelligent_spaced()}" + lede = tr.about_anki_is_a_friendly_intelligent_spaced().replace("Anki", "Anki®") + abouttext += f"

{lede}" abouttext += f"

{tr.about_anki_is_licensed_under_the_agpl3()}" abouttext += f"

{tr.about_version(val=version_with_build())}
" abouttext += ("Python %s Qt %s PyQt %s
") % ( @@ -223,6 +224,7 @@ def show(mw: aqt.AnkiQt) -> QDialog: "Mukunda Madhav Dey", "Adnane Taghi", "Anon_0000", + "Bilolbek Normuminov", ) ) diff --git a/qt/aqt/errors.py b/qt/aqt/errors.py index af1036acd..a6d9251e2 100644 --- a/qt/aqt/errors.py +++ b/qt/aqt/errors.py @@ -23,25 +23,36 @@ from aqt.utils import openHelp, showWarning, supportText, tooltip, tr if TYPE_CHECKING: from aqt.main import AnkiQt +# so we can be non-modal/non-blocking, without Python deallocating the message +# box ahead of time +_mbox: QMessageBox | None = None + def show_exception(*, parent: QWidget, exception: Exception) -> None: "Present a caught exception to the user using a pop-up." if isinstance(exception, Interrupted): # nothing to do return + global _mbox + error_lines = [] + help_page = HelpPage.TROUBLESHOOTING if isinstance(exception, BackendError): if exception.context: - print(exception.context) + error_lines.append(exception.context) if exception.backtrace: - print(exception.backtrace) - showWarning(str(exception), parent=parent, help=exception.help_page) + error_lines.append(exception.backtrace) + if exception.help_page is not None: + help_page = exception.help_page else: # if the error is not originating from the backend, dump # a traceback to the console to aid in debugging - traceback.print_exception( - None, exception, exception.__traceback__, file=sys.stdout + error_lines = traceback.format_exception( + None, exception, exception.__traceback__ ) - showWarning(str(exception), parent=parent) + error_text = "\n".join(error_lines) + print(error_lines) + _mbox = _init_message_box(str(exception), error_text, help_page) + _mbox.show() def is_chromium_cert_error(error: str) -> bool: @@ -158,9 +169,39 @@ if not os.environ.get("DEBUG"): sys.excepthook = excepthook -# so we can be non-modal/non-blocking, without Python deallocating the message -# box ahead of time -_mbox: QMessageBox | None = None + +def _init_message_box( + user_text: str, debug_text: str, help_page=HelpPage.TROUBLESHOOTING +): + global _mbox + + _mbox = QMessageBox() + _mbox.setWindowTitle("Anki") + _mbox.setText(user_text) + _mbox.setIcon(QMessageBox.Icon.Warning) + _mbox.setTextFormat(Qt.TextFormat.PlainText) + + def show_help(): + openHelp(help_page) + + def copy_debug_info(): + QApplication.clipboard().setText(debug_text) + tooltip(tr.errors_copied_to_clipboard(), parent=_mbox) + + help = _mbox.addButton(QMessageBox.StandardButton.Help) + if debug_text: + debug_info = _mbox.addButton( + tr.errors_copy_debug_info_button(), QMessageBox.ButtonRole.ActionRole + ) + debug_info.disconnect() + debug_info.clicked.connect(copy_debug_info) + cancel = _mbox.addButton(QMessageBox.StandardButton.Cancel) + cancel.setText(tr.actions_close()) + + help.disconnect() + help.clicked.connect(show_help) + + return _mbox class ErrorHandler(QObject): @@ -252,33 +293,7 @@ class ErrorHandler(QObject): user_text += "\n\n" + self._addonText(error) debug_text += addon_debug_info() - def show_troubleshooting(): - openHelp(HelpPage.TROUBLESHOOTING) - - def copy_debug_info(): - QApplication.clipboard().setText(debug_text) - tooltip(tr.errors_copied_to_clipboard(), parent=_mbox) - - global _mbox - _mbox = QMessageBox() - _mbox.setWindowTitle("Anki") - _mbox.setText(user_text) - _mbox.setIcon(QMessageBox.Icon.Warning) - _mbox.setTextFormat(Qt.TextFormat.PlainText) - - troubleshooting = _mbox.addButton( - tr.errors_troubleshooting_button(), QMessageBox.ButtonRole.ActionRole - ) - debug_info = _mbox.addButton( - tr.errors_copy_debug_info_button(), QMessageBox.ButtonRole.ActionRole - ) - cancel = _mbox.addButton(QMessageBox.StandardButton.Cancel) - cancel.setText(tr.actions_close()) - - troubleshooting.disconnect() - troubleshooting.clicked.connect(show_troubleshooting) - debug_info.disconnect() - debug_info.clicked.connect(copy_debug_info) + _mbox = _init_message_box(user_text, debug_text) if self.fatal_error_encountered: _mbox.exec() diff --git a/qt/aqt/main.py b/qt/aqt/main.py index 4e0b82dd5..c707d1b2a 100644 --- a/qt/aqt/main.py +++ b/qt/aqt/main.py @@ -1309,7 +1309,7 @@ title="{}" {}>{}""".format( if not askUser(tr.qt_misc_open_anki_launcher()): return - from aqt.update import update_and_restart + from aqt.package import update_and_restart update_and_restart() @@ -1394,7 +1394,7 @@ title="{}" {}>{}""".format( ########################################################################## def setupMenus(self) -> None: - from aqt.update import have_launcher + from aqt.package import launcher_executable m = self.form @@ -1426,7 +1426,7 @@ title="{}" {}>{}""".format( qconnect(m.actionEmptyCards.triggered, self.onEmptyCards) qconnect(m.actionNoteTypes.triggered, self.onNoteTypes) qconnect(m.action_upgrade_downgrade.triggered, self.on_upgrade_downgrade) - if not have_launcher(): + if not launcher_executable(): m.action_upgrade_downgrade.setVisible(False) qconnect(m.actionPreferences.triggered, self.onPrefs) diff --git a/qt/aqt/mediasrv.py b/qt/aqt/mediasrv.py index 1f9f1ad29..35ae517e0 100644 --- a/qt/aqt/mediasrv.py +++ b/qt/aqt/mediasrv.py @@ -891,7 +891,7 @@ exposed_backend_list = [ "compute_fsrs_params", "compute_optimal_retention", "set_wants_abort", - "evaluate_params", + "evaluate_params_legacy", "get_optimal_retention_parameters", "simulate_fsrs_review", # DeckConfigService diff --git a/qt/aqt/package.py b/qt/aqt/package.py index d6236c4cd..968218741 100644 --- a/qt/aqt/package.py +++ b/qt/aqt/package.py @@ -5,10 +5,13 @@ from __future__ import annotations +import contextlib +import os import subprocess +import sys from pathlib import Path -from anki.utils import is_mac +from anki.utils import is_mac, is_win # ruff: noqa: F401 @@ -65,3 +68,105 @@ def first_run_setup() -> None: # Wait for both commands to complete for proc in processes: proc.wait() + + +def uv_binary() -> str | None: + """Return the path to the uv binary.""" + return os.environ.get("ANKI_LAUNCHER_UV") + + +def launcher_root() -> str | None: + """Return the path to the launcher root directory (AnkiProgramFiles).""" + return os.environ.get("UV_PROJECT") + + +def venv_binary(cmd: str) -> str | None: + """Return the path to a binary in the launcher's venv.""" + root = launcher_root() + if not root: + return None + + root_path = Path(root) + if is_win: + binary_path = root_path / ".venv" / "Scripts" / cmd + else: + binary_path = root_path / ".venv" / "bin" / cmd + + return str(binary_path) + + +def add_python_requirements(reqs: list[str]) -> tuple[bool, str]: + """Add Python requirements to the launcher venv using uv add. + + Returns (success, output)""" + + binary = uv_binary() + if not binary: + return (False, "Not in packaged build.") + + uv_cmd = [binary, "add"] + reqs + result = subprocess.run(uv_cmd, capture_output=True, text=True, check=False) + + if result.returncode == 0: + root = launcher_root() + if root: + sync_marker = Path(root) / ".sync_complete" + sync_marker.touch() + + return (True, result.stdout) + else: + return (False, result.stderr) + + +def launcher_executable() -> str | None: + """Return the path to the Anki launcher executable.""" + return os.getenv("ANKI_LAUNCHER") + + +def trigger_launcher_run() -> None: + """Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run.""" + try: + root = launcher_root() + if not root: + return + + pyproject_path = Path(root) / "pyproject.toml" + + if pyproject_path.exists(): + # Touch the file to update its mtime + pyproject_path.touch() + except Exception as e: + print(e) + + +def update_and_restart() -> None: + """Update and restart Anki using the launcher.""" + from aqt import mw + + launcher = launcher_executable() + assert launcher + + trigger_launcher_run() + + with contextlib.suppress(ResourceWarning): + env = os.environ.copy() + # fixes a bug where launcher fails to appear if opening it + # straight after updating + if "GNOME_TERMINAL_SCREEN" in env: + del env["GNOME_TERMINAL_SCREEN"] + creationflags = 0 + if sys.platform == "win32": + creationflags = ( + subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS + ) + subprocess.Popen( + [launcher], + start_new_session=True, + stdin=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + env=env, + creationflags=creationflags, + ) + + mw.app.quit() diff --git a/qt/aqt/update.py b/qt/aqt/update.py index 61fec8e6b..e5794eead 100644 --- a/qt/aqt/update.py +++ b/qt/aqt/update.py @@ -3,16 +3,17 @@ from __future__ import annotations -import contextlib -import os -import subprocess -from pathlib import Path - import aqt from anki.buildinfo import buildhash from anki.collection import CheckForUpdateResponse, Collection -from anki.utils import dev_mode, int_time, int_version, is_mac, is_win, plat_desc +from anki.utils import dev_mode, int_time, int_version, plat_desc from aqt.operations import QueryOp +from aqt.package import ( + launcher_executable as _launcher_executable, +) +from aqt.package import ( + update_and_restart as _update_and_restart, +) from aqt.qt import * from aqt.utils import openLink, show_warning, showText, tr @@ -84,67 +85,7 @@ def prompt_to_update(mw: aqt.AnkiQt, ver: str) -> None: # ignore this update mw.pm.meta["suppressUpdate"] = ver elif ret == QMessageBox.StandardButton.Yes: - if have_launcher(): - update_and_restart() + if _launcher_executable(): + _update_and_restart() else: openLink(aqt.appWebsiteDownloadSection) - - -def _anki_launcher_path() -> str | None: - return os.getenv("ANKI_LAUNCHER") - - -def have_launcher() -> bool: - return _anki_launcher_path() is not None - - -def update_and_restart() -> None: - from aqt import mw - - launcher = _anki_launcher_path() - assert launcher - - _trigger_launcher_run() - - with contextlib.suppress(ResourceWarning): - env = os.environ.copy() - creationflags = 0 - if sys.platform == "win32": - creationflags = ( - subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS - ) - subprocess.Popen( - [launcher], - start_new_session=True, - stdin=subprocess.DEVNULL, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - env=env, - creationflags=creationflags, - ) - - mw.app.quit() - - -def _trigger_launcher_run() -> None: - """Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run.""" - try: - # Get the local data directory equivalent to Rust's dirs::data_local_dir() - if is_win: - from .winpaths import get_local_appdata - - data_dir = Path(get_local_appdata()) - elif is_mac: - data_dir = Path.home() / "Library" / "Application Support" - else: # Linux - data_dir = Path( - os.environ.get("XDG_DATA_HOME", Path.home() / ".local" / "share") - ) - - pyproject_path = data_dir / "AnkiProgramFiles" / "pyproject.toml" - - if pyproject_path.exists(): - # Touch the file to update its mtime - pyproject_path.touch() - except Exception as e: - print(e) diff --git a/qt/hatch_build.py b/qt/hatch_build.py index aaf345842..1bc9eccd7 100644 --- a/qt/hatch_build.py +++ b/qt/hatch_build.py @@ -67,16 +67,12 @@ class CustomBuildHook(BuildHookInterface): def _should_exclude(self, path: Path) -> bool: """Check if a file should be excluded from the wheel.""" - path_str = str(path) - # Exclude __pycache__ - if "/__pycache__/" in path_str: + if "/__pycache__/" in str(path): return True if path.suffix in [".ui", ".scss", ".map", ".ts"]: return True if path.name.startswith("tsconfig"): return True - if "/aqt/data" in path_str: - return True return False diff --git a/qt/launcher/Cargo.toml b/qt/launcher/Cargo.toml index fd6f2230c..7de321a29 100644 --- a/qt/launcher/Cargo.toml +++ b/qt/launcher/Cargo.toml @@ -13,6 +13,10 @@ anki_process.workspace = true anyhow.workspace = true camino.workspace = true dirs.workspace = true +serde_json.workspace = true + +[target.'cfg(all(unix, not(target_os = "macos")))'.dependencies] +libc.workspace = true [target.'cfg(windows)'.dependencies] windows.workspace = true diff --git a/qt/launcher/addon/__init__.py b/qt/launcher/addon/__init__.py index fb0168d14..63a2cc5a9 100644 --- a/qt/launcher/addon/__init__.py +++ b/qt/launcher/addon/__init__.py @@ -8,29 +8,88 @@ import os import subprocess import sys from pathlib import Path +from typing import Any -import aqt.sound from anki.utils import pointVersion from aqt import mw from aqt.qt import QAction from aqt.utils import askUser, is_mac, is_win, showInfo -def _anki_launcher_path() -> str | None: +def launcher_executable() -> str | None: + """Return the path to the Anki launcher executable.""" return os.getenv("ANKI_LAUNCHER") -def have_launcher() -> bool: - return _anki_launcher_path() is not None +def uv_binary() -> str | None: + """Return the path to the uv binary.""" + return os.environ.get("ANKI_LAUNCHER_UV") + + +def launcher_root() -> str | None: + """Return the path to the launcher root directory (AnkiProgramFiles).""" + return os.environ.get("UV_PROJECT") + + +def venv_binary(cmd: str) -> str | None: + """Return the path to a binary in the launcher's venv.""" + root = launcher_root() + if not root: + return None + + root_path = Path(root) + if is_win: + binary_path = root_path / ".venv" / "Scripts" / cmd + else: + binary_path = root_path / ".venv" / "bin" / cmd + + return str(binary_path) + + +def add_python_requirements(reqs: list[str]) -> tuple[bool, str]: + """Add Python requirements to the launcher venv using uv add. + + Returns (success, output)""" + + binary = uv_binary() + if not binary: + return (False, "Not in packaged build.") + + uv_cmd = [binary, "add"] + reqs + result = subprocess.run(uv_cmd, capture_output=True, text=True, check=False) + + if result.returncode == 0: + root = launcher_root() + if root: + sync_marker = Path(root) / ".sync_complete" + sync_marker.touch() + return (True, result.stdout) + else: + return (False, result.stderr) + + +def trigger_launcher_run() -> None: + """Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run.""" + try: + root = launcher_root() + if not root: + return + + pyproject_path = Path(root) / "pyproject.toml" + + if pyproject_path.exists(): + # Touch the file to update its mtime + pyproject_path.touch() + except Exception as e: + print(e) def update_and_restart() -> None: - from aqt import mw - - launcher = _anki_launcher_path() + """Update and restart Anki using the launcher.""" + launcher = launcher_executable() assert launcher - _trigger_launcher_run() + trigger_launcher_run() with contextlib.suppress(ResourceWarning): env = os.environ.copy() @@ -52,30 +111,6 @@ def update_and_restart() -> None: mw.app.quit() -def _trigger_launcher_run() -> None: - """Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run.""" - try: - # Get the local data directory equivalent to Rust's dirs::data_local_dir() - if is_win: - from aqt.winpaths import get_local_appdata - - data_dir = Path(get_local_appdata()) - elif is_mac: - data_dir = Path.home() / "Library" / "Application Support" - else: # Linux - data_dir = Path( - os.environ.get("XDG_DATA_HOME", Path.home() / ".local" / "share") - ) - - pyproject_path = data_dir / "AnkiProgramFiles" / "pyproject.toml" - - if pyproject_path.exists(): - # Touch the file to update its mtime - pyproject_path.touch() - except Exception as e: - print(e) - - def confirm_then_upgrade(): if not askUser("Change to a different Anki version?"): return @@ -116,10 +151,18 @@ def _packagedCmd(cmd: list[str]) -> tuple[Any, dict[str, str]]: return cmd, env +def on_addon_config(): + showInfo( + "This add-on is automatically added when installing older Anki versions, so that they work with the launcher. You can remove it if you wish." + ) + + def setup(): + mw.addonManager.setConfigAction(__name__, on_addon_config) + if pointVersion() >= 250600: return - if not have_launcher(): + if not launcher_executable(): return # Add action to tools menu @@ -129,7 +172,21 @@ def setup(): # Monkey-patch audio tools to use anki-audio if is_win or is_mac: + import aqt + import aqt.sound + aqt.sound._packagedCmd = _packagedCmd + # Inject launcher functions into launcher module + import aqt.package + + aqt.package.launcher_executable = launcher_executable + aqt.package.update_and_restart = update_and_restart + aqt.package.trigger_launcher_run = trigger_launcher_run + aqt.package.uv_binary = uv_binary + aqt.package.launcher_root = launcher_root + aqt.package.venv_binary = venv_binary + aqt.package.add_python_requirements = add_python_requirements + setup() diff --git a/qt/launcher/lin/build.sh b/qt/launcher/lin/build.sh index de96a1b50..7bd78c27d 100755 --- a/qt/launcher/lin/build.sh +++ b/qt/launcher/lin/build.sh @@ -13,7 +13,7 @@ HOST_ARCH=$(uname -m) # Define output paths OUTPUT_DIR="../../../out/launcher" -LAUNCHER_DIR="$OUTPUT_DIR/anki-launcher" +LAUNCHER_DIR="$OUTPUT_DIR/anki-linux" # Clean existing output directory rm -rf "$LAUNCHER_DIR" @@ -61,6 +61,7 @@ done # Copy additional files from parent directory cp ../pyproject.toml "$LAUNCHER_DIR/" cp ../../../.python-version "$LAUNCHER_DIR/" +cp ../versions.py "$LAUNCHER_DIR/" # Set executable permissions chmod +x \ @@ -75,10 +76,9 @@ chmod +x \ # Set proper permissions and create tarball chmod -R a+r "$LAUNCHER_DIR" -# Create tarball using the same options as the Rust template ZSTD="zstd -c --long -T0 -18" -TRANSFORM="s%^.%anki-launcher%S" -TARBALL="$OUTPUT_DIR/anki-launcher.tar.zst" +TRANSFORM="s%^.%anki-linux%S" +TARBALL="$OUTPUT_DIR/anki-linux.tar.zst" tar -I "$ZSTD" --transform "$TRANSFORM" -cf "$TARBALL" -C "$LAUNCHER_DIR" . diff --git a/qt/launcher/mac/Info.plist b/qt/launcher/mac/Info.plist index 59b67605f..ac0ab2f09 100644 --- a/qt/launcher/mac/Info.plist +++ b/qt/launcher/mac/Info.plist @@ -7,7 +7,9 @@ CFBundleShortVersionString 1.0 LSMinimumSystemVersion - 11 + 12 + LSApplicationCategoryType + public.app-category.education CFBundleDocumentTypes diff --git a/qt/launcher/mac/build.sh b/qt/launcher/mac/build.sh index 0ec39ad8f..470b5cd25 100755 --- a/qt/launcher/mac/build.sh +++ b/qt/launcher/mac/build.sh @@ -35,6 +35,7 @@ cp Info.plist "$APP_LAUNCHER/Contents/" cp icon/Assets.car "$APP_LAUNCHER/Contents/Resources/" cp ../pyproject.toml "$APP_LAUNCHER/Contents/Resources/" cp ../../../.python-version "$APP_LAUNCHER/Contents/Resources/" +cp ../versions.py "$APP_LAUNCHER/Contents/Resources/" # Codesign for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do diff --git a/qt/launcher/pyproject.toml b/qt/launcher/pyproject.toml index 2a45626c7..cc521b432 100644 --- a/qt/launcher/pyproject.toml +++ b/qt/launcher/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "anki-launcher" -version = "0.1.0" +version = "1.0.0" description = "UV-based launcher for Anki." requires-python = ">=3.9" dependencies = [ diff --git a/qt/launcher/src/bin/build_win.rs b/qt/launcher/src/bin/build_win.rs index fc9082bf2..96688f190 100644 --- a/qt/launcher/src/bin/build_win.rs +++ b/qt/launcher/src/bin/build_win.rs @@ -139,6 +139,9 @@ fn copy_files(output_dir: &Path) -> Result<()> { output_dir.join(".python-version"), )?; + // Copy versions.py + copy_file("../versions.py", output_dir.join("versions.py"))?; + Ok(()) } diff --git a/qt/launcher/src/main.rs b/qt/launcher/src/main.rs index c8cd7e052..5679f8f71 100644 --- a/qt/launcher/src/main.rs +++ b/qt/launcher/src/main.rs @@ -22,6 +22,7 @@ use anki_process::CommandExt as AnkiCommandExt; use anyhow::Context; use anyhow::Result; +use crate::platform::ensure_os_supported; use crate::platform::ensure_terminal_shown; use crate::platform::get_exe_and_resources_dirs; use crate::platform::get_uv_binary_name; @@ -46,6 +47,7 @@ struct State { uv_lock_path: std::path::PathBuf, sync_complete_marker: std::path::PathBuf, previous_version: Option, + resources_dir: std::path::PathBuf, } #[derive(Debug, Clone)] @@ -54,6 +56,12 @@ pub enum VersionKind { Uv(String), } +#[derive(Debug)] +pub struct Releases { + pub latest: Vec, + pub all: Vec, +} + #[derive(Debug, Clone)] pub enum MainMenuChoice { Latest, @@ -99,6 +107,7 @@ fn run() -> Result<()> { uv_lock_path: uv_install_root.join("uv.lock"), sync_complete_marker: uv_install_root.join(".sync_complete"), previous_version: None, + resources_dir, }; // Check for uninstall request from Windows uninstaller @@ -110,12 +119,6 @@ fn run() -> Result<()> { // Create install directory and copy project files in create_dir_all(&state.uv_install_root)?; - let had_user_pyproj = state.user_pyproject_path.exists(); - if !had_user_pyproj { - // during initial launcher testing, enable betas by default - write_file(&state.prerelease_marker, "")?; - } - copy_if_newer(&state.dist_pyproject_path, &state.user_pyproject_path)?; copy_if_newer( &state.dist_python_version_path, @@ -132,7 +135,7 @@ fn run() -> Result<()> { if !pyproject_has_changed { // If venv is already up to date, launch Anki normally let args: Vec = std::env::args().skip(1).collect(); - let cmd = build_python_command(&state.uv_install_root, &args)?; + let cmd = build_python_command(&state, &args)?; launch_anki_normally(cmd)?; return Ok(()); } @@ -143,16 +146,23 @@ fn run() -> Result<()> { print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top println!("\x1B[1mAnki Launcher\x1B[0m\n"); + ensure_os_supported()?; + check_versions(&mut state); - main_menu_loop(&state)?; + let first_run = !state.uv_install_root.join(".venv").exists(); + if first_run { + handle_version_install_or_update(&state, MainMenuChoice::Latest)?; + } else { + main_menu_loop(&state)?; + } // Write marker file to indicate we've completed the sync process write_sync_marker(&state.sync_complete_marker)?; #[cfg(target_os = "macos")] { - let cmd = build_python_command(&state.uv_install_root, &[])?; + let cmd = build_python_command(&state, &[])?; platform::mac::prepare_for_launch_after_update(cmd, &uv_install_root)?; } @@ -225,9 +235,90 @@ fn check_versions(state: &mut State) { } } +fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Result<()> { + update_pyproject_for_version(choice.clone(), state)?; + + // Extract current version before syncing (but don't write to file yet) + let previous_version_to_save = extract_aqt_version(&state.uv_path, &state.uv_install_root); + + // Remove sync marker before attempting sync + let _ = remove_file(&state.sync_complete_marker); + + println!("\x1B[1mUpdating Anki...\x1B[0m\n"); + + let python_version_trimmed = if state.user_python_version_path.exists() { + let python_version = read_file(&state.user_python_version_path)?; + let python_version_str = + String::from_utf8(python_version).context("Invalid UTF-8 in .python-version")?; + Some(python_version_str.trim().to_string()) + } else { + None + }; + + // `uv sync` sometimes does not pull in Python automatically + // This might be system/platform specific and/or a uv bug. + let mut command = Command::new(&state.uv_path); + command + .current_dir(&state.uv_install_root) + .env("UV_CACHE_DIR", &state.uv_cache_dir) + .env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir) + .args(["python", "install", "--managed-python"]); + + // Add python version if .python-version file exists + if let Some(version) = &python_version_trimmed { + command.args([version]); + } + + command.ensure_success().context("Python install failed")?; + + // Sync the venv + let mut command = Command::new(&state.uv_path); + command + .current_dir(&state.uv_install_root) + .env("UV_CACHE_DIR", &state.uv_cache_dir) + .env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir) + .args(["sync", "--upgrade", "--managed-python"]); + + // Add python version if .python-version file exists + if let Some(version) = &python_version_trimmed { + command.args(["--python", version]); + } + + if state.no_cache_marker.exists() { + command.env("UV_NO_CACHE", "1"); + } + + match command.ensure_success() { + Ok(_) => { + // Sync succeeded + if matches!(&choice, MainMenuChoice::Version(VersionKind::PyOxidizer(_))) { + inject_helper_addon(&state.uv_install_root)?; + } + + // Now that sync succeeded, save the previous version + if let Some(current_version) = previous_version_to_save { + let previous_version_path = state.uv_install_root.join("previous-version"); + if let Err(e) = write_file(&previous_version_path, ¤t_version) { + println!("Warning: Could not save previous version: {e}"); + } + } + + Ok(()) + } + Err(e) => { + // If sync fails due to things like a missing wheel on pypi, + // we need to remove the lockfile or uv will cache the bad result. + let _ = remove_file(&state.uv_lock_path); + println!("Install failed: {e:#}"); + println!(); + Err(e.into()) + } + } +} + fn main_menu_loop(state: &State) -> Result<()> { loop { - let menu_choice = get_main_menu_choice(state); + let menu_choice = get_main_menu_choice(state)?; match menu_choice { MainMenuChoice::Quit => std::process::exit(0), @@ -270,77 +361,10 @@ fn main_menu_loop(state: &State) -> Result<()> { continue; } choice @ (MainMenuChoice::Latest | MainMenuChoice::Version(_)) => { - // For other choices, update project files and sync - update_pyproject_for_version( - choice.clone(), - state.dist_pyproject_path.clone(), - state.user_pyproject_path.clone(), - state.dist_python_version_path.clone(), - state.user_python_version_path.clone(), - )?; - - // Extract current version before syncing (but don't write to file yet) - let previous_version_to_save = - extract_aqt_version(&state.uv_path, &state.uv_install_root); - - // Remove sync marker before attempting sync - let _ = remove_file(&state.sync_complete_marker); - - // Sync the venv - let mut command = Command::new(&state.uv_path); - command - .current_dir(&state.uv_install_root) - .env("UV_CACHE_DIR", &state.uv_cache_dir) - .env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir) - .args(["sync", "--upgrade", "--managed-python"]); - - // Add python version if .python-version file exists - if state.user_python_version_path.exists() { - let python_version = read_file(&state.user_python_version_path)?; - let python_version_str = String::from_utf8(python_version) - .context("Invalid UTF-8 in .python-version")?; - let python_version_trimmed = python_version_str.trim(); - command.args(["--python", python_version_trimmed]); - } - - // Set UV_PRERELEASE=allow if beta mode is enabled - if state.prerelease_marker.exists() { - command.env("UV_PRERELEASE", "allow"); - } - - if state.no_cache_marker.exists() { - command.env("UV_NO_CACHE", "1"); - } - - println!("\x1B[1mUpdating Anki...\x1B[0m\n"); - - match command.ensure_success() { - Ok(_) => { - // Sync succeeded - if matches!(&choice, MainMenuChoice::Version(VersionKind::PyOxidizer(_))) { - inject_helper_addon(&state.uv_install_root)?; - } - - // Now that sync succeeded, save the previous version - if let Some(current_version) = previous_version_to_save { - let previous_version_path = - state.uv_install_root.join("previous-version"); - if let Err(e) = write_file(&previous_version_path, ¤t_version) { - println!("Warning: Could not save previous version: {e}"); - } - } - - break; - } - Err(e) => { - // If sync fails due to things like a missing wheel on pypi, - // we need to remove the lockfile or uv will cache the bad result. - let _ = remove_file(&state.uv_lock_path); - println!("Install failed: {e:#}"); - println!(); - continue; - } + if handle_version_install_or_update(state, choice.clone()).is_err() { + continue; } + break; } } } @@ -356,16 +380,18 @@ fn write_sync_marker(sync_complete_marker: &std::path::Path) -> Result<()> { Ok(()) } -fn get_main_menu_choice(state: &State) -> MainMenuChoice { +fn get_main_menu_choice(state: &State) -> Result { loop { - println!("1) Latest Anki (just press enter)"); + println!("1) Latest Anki (press Enter)"); println!("2) Choose a version"); if let Some(current_version) = &state.current_version { - println!("3) Keep existing version ({current_version})"); + let normalized_current = normalize_version(current_version); + println!("3) Keep existing version ({normalized_current})"); } if let Some(prev_version) = &state.previous_version { if state.current_version.as_ref() != Some(prev_version) { - println!("4) Revert to previous version ({prev_version})"); + let normalized_prev = normalize_version(prev_version); + println!("4) Revert to previous version ({normalized_prev})"); } } println!(); @@ -392,9 +418,14 @@ fn get_main_menu_choice(state: &State) -> MainMenuChoice { println!(); - return match input { + return Ok(match input { "" | "1" => MainMenuChoice::Latest, - "2" => MainMenuChoice::Version(get_version_kind()), + "2" => { + match get_version_kind(state)? { + Some(version_kind) => MainMenuChoice::Version(version_kind), + None => continue, // Return to main menu + } + } "3" => { if state.current_version.is_some() { MainMenuChoice::KeepExisting @@ -407,7 +438,7 @@ fn get_main_menu_choice(state: &State) -> MainMenuChoice { if let Some(prev_version) = &state.previous_version { if state.current_version.as_ref() != Some(prev_version) { if let Some(version_kind) = parse_version_kind(prev_version) { - return MainMenuChoice::Version(version_kind); + return Ok(MainMenuChoice::Version(version_kind)); } } } @@ -422,51 +453,243 @@ fn get_main_menu_choice(state: &State) -> MainMenuChoice { println!("Invalid input. Please try again."); continue; } - }; + }); } } -fn get_version_kind() -> VersionKind { - loop { - println!("Enter the version you want to install:"); - print!("> "); - let _ = stdout().flush(); +fn get_version_kind(state: &State) -> Result> { + println!("Please wait..."); - let mut input = String::new(); - let _ = stdin().read_line(&mut input); - let input = input.trim(); + let releases = get_releases(state)?; + let releases_str = releases + .latest + .iter() + .map(|v| v.as_str()) + .collect::>() + .join(", "); + println!("Latest releases: {releases_str}"); - if input.is_empty() { - println!("Please enter a version."); - continue; + println!("Enter the version you want to install:"); + print!("> "); + let _ = stdout().flush(); + + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + let input = input.trim(); + + if input.is_empty() { + return Ok(None); + } + + // Normalize the input version for comparison + let normalized_input = normalize_version(input); + + // Check if the version exists in the available versions + let version_exists = releases.all.iter().any(|v| v == &normalized_input); + + match (parse_version_kind(input), version_exists) { + (Some(version_kind), true) => { + println!(); + Ok(Some(version_kind)) } - - match parse_version_kind(input) { - Some(version_kind) => { - println!(); - return version_kind; - } - None => { - println!("Invalid version format. Please enter a version like 24.10 or 25.06.1 (minimum 2.1.50)"); - continue; - } + (None, true) => { + println!("Versions before 2.1.50 can't be installed."); + Ok(None) + } + _ => { + println!("Invalid version.\n"); + Ok(None) } } } -fn update_pyproject_for_version( - menu_choice: MainMenuChoice, - dist_pyproject_path: std::path::PathBuf, - user_pyproject_path: std::path::PathBuf, - dist_python_version_path: std::path::PathBuf, - user_python_version_path: std::path::PathBuf, -) -> Result<()> { +fn with_only_latest_patch(versions: &[String]) -> Vec { + // Only show the latest patch release for a given (major, minor) + let mut seen_major_minor = std::collections::HashSet::new(); + versions + .iter() + .filter(|v| { + let (major, minor, _, _) = parse_version_for_filtering(v); + if major == 2 { + return true; + } + let major_minor = (major, minor); + if seen_major_minor.contains(&major_minor) { + false + } else { + seen_major_minor.insert(major_minor); + true + } + }) + .cloned() + .collect() +} + +fn parse_version_for_filtering(version_str: &str) -> (u32, u32, u32, bool) { + // Remove any build metadata after + + let version_str = version_str.split('+').next().unwrap_or(version_str); + + // Check for prerelease markers + let is_prerelease = ["a", "b", "rc", "alpha", "beta"] + .iter() + .any(|marker| version_str.to_lowercase().contains(marker)); + + // Extract numeric parts (stop at first non-digit/non-dot character) + let numeric_end = version_str + .find(|c: char| !c.is_ascii_digit() && c != '.') + .unwrap_or(version_str.len()); + let numeric_part = &version_str[..numeric_end]; + + let parts: Vec<&str> = numeric_part.split('.').collect(); + + let major = parts.first().and_then(|s| s.parse().ok()).unwrap_or(0); + let minor = parts.get(1).and_then(|s| s.parse().ok()).unwrap_or(0); + let patch = parts.get(2).and_then(|s| s.parse().ok()).unwrap_or(0); + + (major, minor, patch, is_prerelease) +} + +fn normalize_version(version: &str) -> String { + let (major, minor, patch, _is_prerelease) = parse_version_for_filtering(version); + + if major <= 2 { + // Don't transform versions <= 2.x + return version.to_string(); + } + + // For versions > 2, pad the minor version with leading zero if < 10 + let normalized_minor = if minor < 10 { + format!("0{minor}") + } else { + minor.to_string() + }; + + // Find any prerelease suffix + let mut prerelease_suffix = ""; + + // Look for prerelease markers after the numeric part + let numeric_end = version + .find(|c: char| !c.is_ascii_digit() && c != '.') + .unwrap_or(version.len()); + if numeric_end < version.len() { + let suffix_part = &version[numeric_end..]; + let suffix_lower = suffix_part.to_lowercase(); + + for marker in ["alpha", "beta", "rc", "a", "b"] { + if suffix_lower.starts_with(marker) { + prerelease_suffix = &version[numeric_end..]; + break; + } + } + } + + // Reconstruct the version + if version.matches('.').count() >= 2 { + format!("{major}.{normalized_minor}.{patch}{prerelease_suffix}") + } else { + format!("{major}.{normalized_minor}{prerelease_suffix}") + } +} + +fn filter_and_normalize_versions( + all_versions: Vec, + include_prereleases: bool, +) -> Vec { + let mut valid_versions: Vec = all_versions + .into_iter() + .map(|v| normalize_version(&v)) + .collect(); + + // Reverse to get chronological order (newest first) + valid_versions.reverse(); + + if !include_prereleases { + valid_versions.retain(|v| { + let (_, _, _, is_prerelease) = parse_version_for_filtering(v); + !is_prerelease + }); + } + + valid_versions +} + +fn fetch_versions(state: &State) -> Result> { + let versions_script = state.resources_dir.join("versions.py"); + + let mut cmd = Command::new(&state.uv_path); + cmd.current_dir(&state.uv_install_root) + .args(["run", "--no-project"]) + .arg(&versions_script); + + let output = cmd.utf8_output()?; + let versions = serde_json::from_str(&output.stdout).context("Failed to parse versions JSON")?; + Ok(versions) +} + +fn get_releases(state: &State) -> Result { + let include_prereleases = state.prerelease_marker.exists(); + let all_versions = fetch_versions(state)?; + let all_versions = filter_and_normalize_versions(all_versions, include_prereleases); + + let latest_patches = with_only_latest_patch(&all_versions); + let latest_releases: Vec = latest_patches.into_iter().take(5).collect(); + Ok(Releases { + latest: latest_releases, + all: all_versions, + }) +} + +fn apply_version_kind(version_kind: &VersionKind, state: &State) -> Result<()> { + let content = read_file(&state.dist_pyproject_path)?; + let content_str = String::from_utf8(content).context("Invalid UTF-8 in pyproject.toml")?; + let updated_content = match version_kind { + VersionKind::PyOxidizer(version) => { + // Replace package name and add PyQt6 dependencies + content_str.replace( + "anki-release", + &format!( + concat!( + "aqt[qt6]=={}\",\n", + " \"anki-audio==0.1.0; sys.platform == 'win32' or sys.platform == 'darwin'\",\n", + " \"pyqt6==6.6.1\",\n", + " \"pyqt6-qt6==6.6.2\",\n", + " \"pyqt6-webengine==6.6.0\",\n", + " \"pyqt6-webengine-qt6==6.6.2\",\n", + " \"pyqt6_sip==13.6.0" + ), + version + ), + ) + } + VersionKind::Uv(version) => content_str.replace( + "anki-release", + &format!("anki-release=={version}\",\n \"anki=={version}\",\n \"aqt=={version}"), + ), + }; + write_file(&state.user_pyproject_path, &updated_content)?; + + // Update .python-version based on version kind + match version_kind { + VersionKind::PyOxidizer(_) => { + write_file(&state.user_python_version_path, "3.9")?; + } + VersionKind::Uv(_) => { + copy_file( + &state.dist_python_version_path, + &state.user_python_version_path, + )?; + } + } + Ok(()) +} + +fn update_pyproject_for_version(menu_choice: MainMenuChoice, state: &State) -> Result<()> { match menu_choice { MainMenuChoice::Latest => { - let content = read_file(&dist_pyproject_path)?; - write_file(&user_pyproject_path, &content)?; - let python_version_content = read_file(&dist_python_version_path)?; - write_file(&user_python_version_path, &python_version_content)?; + // Get the latest release version and create a VersionKind for it + let releases = get_releases(state)?; + let latest_version = releases.latest.first().context("No latest version found")?; + apply_version_kind(&VersionKind::Uv(latest_version.clone()), state)?; } MainMenuChoice::KeepExisting => { // Do nothing - keep existing pyproject.toml and .python-version @@ -481,43 +704,7 @@ fn update_pyproject_for_version( unreachable!(); } MainMenuChoice::Version(version_kind) => { - let content = read_file(&dist_pyproject_path)?; - let content_str = - String::from_utf8(content).context("Invalid UTF-8 in pyproject.toml")?; - let updated_content = match &version_kind { - VersionKind::PyOxidizer(version) => { - // Replace package name and add PyQt6 dependencies - content_str.replace( - "anki-release", - &format!( - concat!( - "aqt[qt6]=={}\",\n", - " \"anki-audio==0.1.0; sys.platform == 'win32' or sys.platform == 'darwin'\",\n", - " \"pyqt6==6.6.1\",\n", - " \"pyqt6-qt6==6.6.2\",\n", - " \"pyqt6-webengine==6.6.0\",\n", - " \"pyqt6-webengine-qt6==6.6.2\",\n", - " \"pyqt6_sip==13.6.0" - ), - version - ), - ) - } - VersionKind::Uv(version) => { - content_str.replace("anki-release", &format!("anki-release=={version}")) - } - }; - write_file(&user_pyproject_path, &updated_content)?; - - // Update .python-version based on version kind - match &version_kind { - VersionKind::PyOxidizer(_) => { - write_file(&user_python_version_path, "3.9")?; - } - VersionKind::Uv(_) => { - copy_file(&dist_python_version_path, &user_python_version_path)?; - } - } + apply_version_kind(&version_kind, state)?; } MainMenuChoice::Quit => { std::process::exit(0); @@ -671,24 +858,54 @@ fn handle_uninstall(state: &State) -> Result { Ok(true) } -fn build_python_command(uv_install_root: &std::path::Path, args: &[String]) -> Result { +fn build_python_command(state: &State, args: &[String]) -> Result { let python_exe = if cfg!(target_os = "windows") { let show_console = std::env::var("ANKI_CONSOLE").is_ok(); if show_console { - uv_install_root.join(".venv/Scripts/python.exe") + state.uv_install_root.join(".venv/Scripts/python.exe") } else { - uv_install_root.join(".venv/Scripts/pythonw.exe") + state.uv_install_root.join(".venv/Scripts/pythonw.exe") } } else { - uv_install_root.join(".venv/bin/python") + state.uv_install_root.join(".venv/bin/python") }; - let mut cmd = Command::new(python_exe); + let mut cmd = Command::new(&python_exe); cmd.args(["-c", "import aqt, sys; sys.argv[0] = 'Anki'; aqt.run()"]); cmd.args(args); // tell the Python code it was invoked by the launcher, and updating is // available cmd.env("ANKI_LAUNCHER", std::env::current_exe()?.utf8()?.as_str()); + // Set UV and Python paths for the Python code + cmd.env("ANKI_LAUNCHER_UV", state.uv_path.utf8()?.as_str()); + cmd.env("UV_PROJECT", state.uv_install_root.utf8()?.as_str()); + Ok(cmd) } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_normalize_version() { + // Test versions <= 2.x (should not be transformed) + assert_eq!(normalize_version("2.1.50"), "2.1.50"); + + // Test basic versions > 2 with zero-padding + assert_eq!(normalize_version("25.7"), "25.07"); + assert_eq!(normalize_version("25.07"), "25.07"); + assert_eq!(normalize_version("25.10"), "25.10"); + assert_eq!(normalize_version("24.6.1"), "24.06.1"); + assert_eq!(normalize_version("24.06.1"), "24.06.1"); + + // Test prerelease versions + assert_eq!(normalize_version("25.7a1"), "25.07a1"); + assert_eq!(normalize_version("25.7.1a1"), "25.07.1a1"); + + // Test versions with patch = 0 + assert_eq!(normalize_version("25.7.0"), "25.07.0"); + assert_eq!(normalize_version("25.7.0a1"), "25.07.0a1"); + } +} diff --git a/qt/launcher/src/platform/mod.rs b/qt/launcher/src/platform/mod.rs index bbb42df10..50a303656 100644 --- a/qt/launcher/src/platform/mod.rs +++ b/qt/launcher/src/platform/mod.rs @@ -128,3 +128,10 @@ pub fn ensure_terminal_shown() -> Result<()> { print!("\x1b]2;Anki Launcher\x07"); Ok(()) } + +pub fn ensure_os_supported() -> Result<()> { + #[cfg(all(unix, not(target_os = "macos")))] + unix::ensure_glibc_supported()?; + + Ok(()) +} diff --git a/qt/launcher/src/platform/unix.rs b/qt/launcher/src/platform/unix.rs index f37ec81eb..5e4bddda3 100644 --- a/qt/launcher/src/platform/unix.rs +++ b/qt/launcher/src/platform/unix.rs @@ -9,15 +9,22 @@ use anyhow::Result; pub fn relaunch_in_terminal() -> Result<()> { let current_exe = std::env::current_exe().context("Failed to get current executable path")?; - // Try terminals in order of preference + // Try terminals in roughly most specific to least specific. + // First, try commonly used terminals for riced systems. + // Second, try common defaults. + // Finally, try x11 compatibility terminals. let terminals = [ - ("x-terminal-emulator", vec!["-e"]), - ("gnome-terminal", vec!["--"]), - ("konsole", vec!["-e"]), - ("xfce4-terminal", vec!["-e"]), + // commonly used for riced systems ("alacritty", vec!["-e"]), ("kitty", vec![]), ("foot", vec![]), + // the user's default terminal in Debian/Ubuntu + ("x-terminal-emulator", vec!["-e"]), + // default installs for the most common distros + ("xfce4-terminal", vec!["-e"]), + ("gnome-terminal", vec!["-e"]), + ("konsole", vec!["-e"]), + // x11-compatibility terminals ("urxvt", vec!["-e"]), ("xterm", vec!["-e"]), ]; @@ -65,3 +72,34 @@ pub fn finalize_uninstall() { let mut input = String::new(); let _ = stdin().read_line(&mut input); } + +pub fn ensure_glibc_supported() -> Result<()> { + use std::ffi::CStr; + let get_glibc_version = || -> Option<(u32, u32)> { + let version_ptr = unsafe { libc::gnu_get_libc_version() }; + if version_ptr.is_null() { + return None; + } + + let version_cstr = unsafe { CStr::from_ptr(version_ptr) }; + let version_str = version_cstr.to_str().ok()?; + + // Parse version string (format: "2.36" or "2.36.1") + let version_parts: Vec<&str> = version_str.split('.').collect(); + if version_parts.len() < 2 { + return None; + } + + let major: u32 = version_parts[0].parse().ok()?; + let minor: u32 = version_parts[1].parse().ok()?; + + Some((major, minor)) + }; + + let (major, minor) = get_glibc_version().unwrap_or_default(); + if major < 2 || (major == 2 && minor < 36) { + anyhow::bail!("Anki requires a modern Linux distro with glibc 2.36 or later."); + } + + Ok(()) +} diff --git a/qt/launcher/versions.py b/qt/launcher/versions.py new file mode 100644 index 000000000..02e16ba69 --- /dev/null +++ b/qt/launcher/versions.py @@ -0,0 +1,39 @@ +# Copyright: Ankitects Pty Ltd and contributors +# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +import json +import sys +import urllib.request + + +def main(): + """Fetch and return all versions from PyPI, sorted by upload time.""" + url = "https://pypi.org/pypi/aqt/json" + + try: + with urllib.request.urlopen(url, timeout=30) as response: + data = json.loads(response.read().decode("utf-8")) + releases = data.get("releases", {}) + + # Create list of (version, upload_time) tuples + version_times = [] + for version, files in releases.items(): + if files: # Only include versions that have files + # Use the upload time of the first file for each version + upload_time = files[0].get("upload_time_iso_8601") + if upload_time: + version_times.append((version, upload_time)) + + # Sort by upload time + version_times.sort(key=lambda x: x[1]) + + # Extract just the version names + versions = [version for version, _ in version_times] + print(json.dumps(versions)) + except Exception as e: + print(f"Error fetching versions: {e}", file=sys.stderr) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/qt/pyproject.toml b/qt/pyproject.toml index bd59ff330..8802d3025 100644 --- a/qt/pyproject.toml +++ b/qt/pyproject.toml @@ -37,14 +37,14 @@ qt67 = [ "pyqt6-webengine-qt6==6.7.3", "pyqt6_sip==13.10.2", ] -qt69 = [ +qt = [ "pyqt6==6.9.1", "pyqt6-qt6==6.9.1", "pyqt6-webengine==6.9.0", "pyqt6-webengine-qt6==6.9.1", "pyqt6_sip==13.10.2", ] -qt = [ +qt68 = [ "pyqt6==6.8.0", "pyqt6-qt6==6.8.1", "pyqt6-webengine==6.8.0", @@ -58,7 +58,7 @@ conflicts = [ { extra = "qt" }, { extra = "qt66" }, { extra = "qt67" }, - { extra = "qt69" }, + { extra = "qt68" }, ], ] @@ -72,9 +72,12 @@ build-backend = "hatchling.build" [project.scripts] anki = "aqt:run" +[project.gui-scripts] +ankiw = "aqt:run" + [tool.hatch.build.targets.wheel] packages = ["aqt"] -exclude = ["**/*.scss", "**/*.ui"] +exclude = ["aqt/data", "**/*.ui"] [tool.hatch.version] source = "code" diff --git a/rslib/Cargo.toml b/rslib/Cargo.toml index d3c7e215f..a1d24cc87 100644 --- a/rslib/Cargo.toml +++ b/rslib/Cargo.toml @@ -48,6 +48,7 @@ async-trait.workspace = true axum.workspace = true axum-client-ip.workspace = true axum-extra.workspace = true +bitflags.workspace = true blake3.workspace = true bytes.workspace = true chrono.workspace = true diff --git a/rslib/src/backend/config.rs b/rslib/src/backend/config.rs index 349f2d9af..b6e81ce2a 100644 --- a/rslib/src/backend/config.rs +++ b/rslib/src/backend/config.rs @@ -39,6 +39,7 @@ impl From for BoolKey { BoolKeyProto::RenderLatex => BoolKey::RenderLatex, BoolKeyProto::LoadBalancerEnabled => BoolKey::LoadBalancerEnabled, BoolKeyProto::FsrsShortTermWithStepsEnabled => BoolKey::FsrsShortTermWithStepsEnabled, + BoolKeyProto::FsrsLegacyEvaluate => BoolKey::FsrsLegacyEvaluate, } } } diff --git a/rslib/src/browser_table.rs b/rslib/src/browser_table.rs index 85b0572d3..c297f2bac 100644 --- a/rslib/src/browser_table.rs +++ b/rslib/src/browser_table.rs @@ -128,7 +128,9 @@ impl Card { /// This uses card.due and card.ivl to infer the elapsed time. If 'set due /// date' or an add-on has changed the due date, this won't be accurate. pub(crate) fn days_since_last_review(&self, timing: &SchedTimingToday) -> Option { - if !self.is_due_in_days() { + if let Some(last_review_time) = self.last_review_time { + Some(timing.next_day_at.elapsed_days_since(last_review_time) as u32) + } else if !self.is_due_in_days() { Some( (timing.next_day_at.0 as u32).saturating_sub(self.original_or_current_due() as u32) / 86_400, diff --git a/rslib/src/card/mod.rs b/rslib/src/card/mod.rs index 598ac602b..b6b9ce807 100644 --- a/rslib/src/card/mod.rs +++ b/rslib/src/card/mod.rs @@ -96,6 +96,7 @@ pub struct Card { pub(crate) memory_state: Option, pub(crate) desired_retention: Option, pub(crate) decay: Option, + pub(crate) last_review_time: Option, /// JSON object or empty; exposed through the reviewer for persisting custom /// state pub(crate) custom_data: String, @@ -147,6 +148,7 @@ impl Default for Card { memory_state: None, desired_retention: None, decay: None, + last_review_time: None, custom_data: String::new(), } } diff --git a/rslib/src/card/service.rs b/rslib/src/card/service.rs index 8f1421f25..cc3fc6b05 100644 --- a/rslib/src/card/service.rs +++ b/rslib/src/card/service.rs @@ -107,6 +107,7 @@ impl TryFrom for Card { memory_state: c.memory_state.map(Into::into), desired_retention: c.desired_retention, decay: c.decay, + last_review_time: c.last_review_time_secs.map(TimestampSecs), custom_data: c.custom_data, }) } @@ -136,6 +137,7 @@ impl From for anki_proto::cards::Card { memory_state: c.memory_state.map(Into::into), desired_retention: c.desired_retention, decay: c.decay, + last_review_time_secs: c.last_review_time.map(|t| t.0), custom_data: c.custom_data, } } diff --git a/rslib/src/cloze.rs b/rslib/src/cloze.rs index 208a2f4ed..02919dc12 100644 --- a/rslib/src/cloze.rs +++ b/rslib/src/cloze.rs @@ -25,6 +25,9 @@ use crate::latex::contains_latex; use crate::template::RenderContext; use crate::text::strip_html_preserving_entities; +static CLOZE: LazyLock = + LazyLock::new(|| Regex::new(r"(?s)\{\{c\d+::(.*?)(::.*?)?\}\}").unwrap()); + static MATHJAX: LazyLock = LazyLock::new(|| { Regex::new( r"(?xsi) @@ -453,6 +456,10 @@ pub fn cloze_number_in_fields(fields: impl IntoIterator>) -> Ha set } +pub(crate) fn strip_clozes(text: &str) -> Cow<'_, str> { + CLOZE.replace_all(text, "$1") +} + fn strip_html_inside_mathjax(text: &str) -> Cow { MATHJAX.replace_all(text, |caps: &Captures| -> String { format!( @@ -610,6 +617,16 @@ mod test { ); } + #[test] + fn strip_clozes_regex() { + assert_eq!( + strip_clozes( + r#"The {{c1::moon::🌛}} {{c2::orbits::this hint has "::" in it}} the {{c3::🌏}}."# + ), + "The moon orbits the 🌏." + ); + } + #[test] fn mathjax_html() { // escaped angle brackets should be preserved diff --git a/rslib/src/config/bool.rs b/rslib/src/config/bool.rs index 39273b931..c76787cb0 100644 --- a/rslib/src/config/bool.rs +++ b/rslib/src/config/bool.rs @@ -41,6 +41,7 @@ pub enum BoolKey { WithDeckConfigs, Fsrs, FsrsHealthCheck, + FsrsLegacyEvaluate, LoadBalancerEnabled, FsrsShortTermWithStepsEnabled, #[strum(to_string = "normalize_note_text")] diff --git a/rslib/src/deckconfig/update.rs b/rslib/src/deckconfig/update.rs index 128e43770..9eb3b595f 100644 --- a/rslib/src/deckconfig/update.rs +++ b/rslib/src/deckconfig/update.rs @@ -74,6 +74,7 @@ impl Collection { apply_all_parent_limits: self.get_config_bool(BoolKey::ApplyAllParentLimits), fsrs: self.get_config_bool(BoolKey::Fsrs), fsrs_health_check: self.get_config_bool(BoolKey::FsrsHealthCheck), + fsrs_legacy_evaluate: self.get_config_bool(BoolKey::FsrsLegacyEvaluate), days_since_last_fsrs_optimize, }) } diff --git a/rslib/src/decks/remove.rs b/rslib/src/decks/remove.rs index befb770f8..a3bc78209 100644 --- a/rslib/src/decks/remove.rs +++ b/rslib/src/decks/remove.rs @@ -28,7 +28,7 @@ impl Collection { let card_count = match deck.kind { DeckKind::Normal(_) => self.delete_all_cards_in_normal_deck(deck.id)?, DeckKind::Filtered(_) => { - self.return_all_cards_in_filtered_deck(deck.id)?; + self.return_all_cards_in_filtered_deck(deck)?; 0 } }; diff --git a/rslib/src/import_export/text/csv/import.rs b/rslib/src/import_export/text/csv/import.rs index f8422f6bd..e45bbca1b 100644 --- a/rslib/src/import_export/text/csv/import.rs +++ b/rslib/src/import_export/text/csv/import.rs @@ -61,6 +61,7 @@ impl CsvDeckExt for CsvDeck { match self { Self::DeckId(did) => NameOrId::Id(*did), Self::DeckColumn(_) => NameOrId::default(), + Self::DeckName(name) => NameOrId::Name(name.into()), } } @@ -68,6 +69,7 @@ impl CsvDeckExt for CsvDeck { match self { Self::DeckId(_) => None, Self::DeckColumn(column) => Some(*column as usize), + Self::DeckName(_) => None, } } } diff --git a/rslib/src/import_export/text/csv/metadata.rs b/rslib/src/import_export/text/csv/metadata.rs index 7e2f64f5e..d505c60d2 100644 --- a/rslib/src/import_export/text/csv/metadata.rs +++ b/rslib/src/import_export/text/csv/metadata.rs @@ -163,6 +163,8 @@ impl Collection { "deck" => { if let Ok(Some(did)) = self.deck_id_by_name_or_id(&NameOrId::parse(value)) { metadata.deck = Some(CsvDeck::DeckId(did.0)); + } else if !value.is_empty() { + metadata.deck = Some(CsvDeck::DeckName(value.to_string())); } } "notetype column" => { @@ -626,6 +628,7 @@ pub(in crate::import_export) mod test { pub trait CsvMetadataTestExt { fn defaults_for_testing() -> Self; fn unwrap_deck_id(&self) -> i64; + fn unwrap_deck_name(&self) -> &str; fn unwrap_notetype_id(&self) -> i64; fn unwrap_notetype_map(&self) -> &[u32]; } @@ -660,6 +663,13 @@ pub(in crate::import_export) mod test { } } + fn unwrap_deck_name(&self) -> &str { + match &self.deck { + Some(CsvDeck::DeckName(name)) => name, + _ => panic!("no deck name"), + } + } + fn unwrap_notetype_id(&self) -> i64 { match self.notetype { Some(CsvNotetype::GlobalNotetype(ref nt)) => nt.id, @@ -683,8 +693,11 @@ pub(in crate::import_export) mod test { metadata!(col, format!("#deck:{deck_id}\n")).unwrap_deck_id(), deck_id ); + // unknown deck + assert_eq!(metadata!(col, "#deck:foo\n").unwrap_deck_name(), "foo"); + assert_eq!(metadata!(col, "#deck:1234\n").unwrap_deck_name(), "1234"); // fallback - assert_eq!(metadata!(col, "#deck:foo\n").unwrap_deck_id(), 1); + assert_eq!(metadata!(col, "#deck:\n").unwrap_deck_id(), 1); assert_eq!(metadata!(col, "\n").unwrap_deck_id(), 1); } @@ -726,8 +739,8 @@ pub(in crate::import_export) mod test { numeric_deck_2_id ); assert_eq!( - metadata!(col, format!("#deck:1234\n")).unwrap_deck_id(), - 1 // default deck + metadata!(col, format!("#deck:1234\n")).unwrap_deck_name(), + "1234" ); } diff --git a/rslib/src/import_export/text/import.rs b/rslib/src/import_export/text/import.rs index f28c27ca3..202189eb6 100644 --- a/rslib/src/import_export/text/import.rs +++ b/rslib/src/import_export/text/import.rs @@ -147,7 +147,7 @@ impl Duplicate { } impl DeckIdsByNameOrId { - fn new(col: &mut Collection, default: &NameOrId) -> Result { + fn new(col: &mut Collection, default: &NameOrId, usn: Usn) -> Result { let names: HashMap, DeckId> = col .get_all_normal_deck_names(false)? .into_iter() @@ -160,6 +160,13 @@ impl DeckIdsByNameOrId { default: None, }; new.default = new.get(default); + if new.default.is_none() && *default != NameOrId::default() { + let mut deck = Deck::new_normal(); + deck.name = NativeDeckName::from_human_name(default.to_string()); + col.add_deck_inner(&mut deck, usn)?; + new.insert(deck.id, deck.human_name()); + new.default = Some(deck.id); + } Ok(new) } @@ -193,7 +200,7 @@ impl<'a> Context<'a> { NameOrId::default(), col.notetype_by_name_or_id(&data.default_notetype)?, ); - let deck_ids = DeckIdsByNameOrId::new(col, &data.default_deck)?; + let deck_ids = DeckIdsByNameOrId::new(col, &data.default_deck, usn)?; let existing_checksums = ExistingChecksums::new(col, data.match_scope)?; let existing_guids = col.storage.all_notes_by_guid()?; @@ -274,6 +281,9 @@ impl<'a> Context<'a> { deck.name = NativeDeckName::from_human_name(name); self.col.add_deck_inner(&mut deck, self.usn)?; self.deck_ids.insert(deck.id, deck.human_name()); + if name.is_empty() { + self.deck_ids.default = Some(deck.id); + } Some(deck.id) } else { None diff --git a/rslib/src/import_export/text/mod.rs b/rslib/src/import_export/text/mod.rs index e9e2da766..fdf94971a 100644 --- a/rslib/src/import_export/text/mod.rs +++ b/rslib/src/import_export/text/mod.rs @@ -83,6 +83,15 @@ impl From for NameOrId { } } +impl std::fmt::Display for NameOrId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + NameOrId::Id(did) => write!(f, "{did}"), + NameOrId::Name(name) => write!(f, "{name}"), + } + } +} + impl ForeignNote { pub(crate) fn into_log_note(self) -> LogNote { LogNote { diff --git a/rslib/src/scheduler/answering/mod.rs b/rslib/src/scheduler/answering/mod.rs index ce6720d3d..bfe0eafaf 100644 --- a/rslib/src/scheduler/answering/mod.rs +++ b/rslib/src/scheduler/answering/mod.rs @@ -228,28 +228,31 @@ impl Collection { /// Return the next states that will be applied for each answer button. pub fn get_scheduling_states(&mut self, cid: CardId) -> Result { let card = self.storage.get_card(cid)?.or_not_found(cid)?; - let deck = self.get_deck(card.deck_id)?.or_not_found(card.deck_id)?; - - let note_id = deck - .config_id() - .map(|deck_config_id| self.get_deck_config(deck_config_id, false)) - .transpose()? - .flatten() - .map(|deck_config| deck_config.inner.bury_reviews) - .unwrap_or(false) - .then_some(card.note_id); + let note_id = card.note_id; let ctx = self.card_state_updater(card)?; let current = ctx.current_card_state(); - let load_balancer_ctx = self.state.card_queues.as_ref().and_then(|card_queues| { - match card_queues.load_balancer.as_ref() { - None => None, - Some(load_balancer) => { - Some(load_balancer.review_context(note_id, deck.config_id()?)) - } + let load_balancer_ctx = if let Some(load_balancer) = self + .state + .card_queues + .as_ref() + .and_then(|card_queues| card_queues.load_balancer.as_ref()) + { + // Only get_deck_config when load balancer is enabled + if let Some(deck_config_id) = ctx.deck.config_id() { + let note_id = self + .get_deck_config(deck_config_id, false)? + .map(|deck_config| deck_config.inner.bury_reviews) + .unwrap_or(false) + .then_some(note_id); + Some(load_balancer.review_context(note_id, deck_config_id)) + } else { + None } - }); + } else { + None + }; let state_ctx = ctx.state_context(load_balancer_ctx); Ok(current.next_states(&state_ctx)) @@ -334,7 +337,14 @@ impl Collection { self.update_deck_stats_from_answer(usn, answer, &updater, original.queue)?; self.maybe_bury_siblings(&original, &updater.config)?; let timing = updater.timing; + let deckconfig_id = updater.deck.config_id(); let mut card = updater.into_card(); + if !matches!( + answer.current_state, + CardState::Filtered(FilteredState::Preview(_)) + ) { + card.last_review_time = Some(answer.answered_at.as_secs()); + } if let Some(data) = answer.custom_data.take() { card.custom_data = data; card.validate_custom_data()?; @@ -346,12 +356,14 @@ impl Collection { } if card.queue == CardQueue::Review { - let deck = self.get_deck(card.deck_id)?; - if let Some(card_queues) = self.state.card_queues.as_mut() { - if let Some(deckconfig_id) = deck.and_then(|deck| deck.config_id()) { - if let Some(load_balancer) = card_queues.load_balancer.as_mut() { - load_balancer.add_card(card.id, card.note_id, deckconfig_id, card.interval) - } + if let Some(load_balancer) = self + .state + .card_queues + .as_mut() + .and_then(|card_queues| card_queues.load_balancer.as_mut()) + { + if let Some(deckconfig_id) = deckconfig_id { + load_balancer.add_card(card.id, card.note_id, deckconfig_id, card.interval) } } } @@ -451,11 +463,14 @@ impl Collection { )?; card.set_memory_state(&fsrs, item, config.inner.historical_retention)?; } - let days_elapsed = self - .storage - .time_of_last_review(card.id)? - .map(|ts| timing.next_day_at.elapsed_days_since(ts)) - .unwrap_or_default() as u32; + let days_elapsed = if let Some(last_review_time) = card.last_review_time { + timing.next_day_at.elapsed_days_since(last_review_time) as u32 + } else { + self.storage + .time_of_last_review(card.id)? + .map(|ts| timing.next_day_at.elapsed_days_since(ts)) + .unwrap_or_default() as u32 + }; Some(fsrs.next_states( card.memory_state.map(Into::into), config.inner.desired_retention, diff --git a/rslib/src/scheduler/filtered/mod.rs b/rslib/src/scheduler/filtered/mod.rs index ad7979e3c..331e54e5d 100644 --- a/rslib/src/scheduler/filtered/mod.rs +++ b/rslib/src/scheduler/filtered/mod.rs @@ -64,7 +64,8 @@ impl Collection { pub fn empty_filtered_deck(&mut self, did: DeckId) -> Result> { self.transact(Op::EmptyFilteredDeck, |col| { - col.return_all_cards_in_filtered_deck(did) + let deck = col.get_deck(did)?.or_not_found(did)?; + col.return_all_cards_in_filtered_deck(&deck) }) } @@ -78,8 +79,11 @@ impl Collection { } impl Collection { - pub(crate) fn return_all_cards_in_filtered_deck(&mut self, did: DeckId) -> Result<()> { - let cids = self.storage.all_cards_in_single_deck(did)?; + pub(crate) fn return_all_cards_in_filtered_deck(&mut self, deck: &Deck) -> Result<()> { + if !deck.is_filtered() { + return Err(FilteredDeckError::FilteredDeckRequired.into()); + } + let cids = self.storage.all_cards_in_single_deck(deck.id)?; self.return_cards_to_home_deck(&cids) } @@ -195,7 +199,7 @@ impl Collection { timing, }; - self.return_all_cards_in_filtered_deck(deck.id)?; + self.return_all_cards_in_filtered_deck(deck)?; self.build_filtered_deck(ctx) } diff --git a/rslib/src/scheduler/fsrs/params.rs b/rslib/src/scheduler/fsrs/params.rs index 76bc206be..63bdebe79 100644 --- a/rslib/src/scheduler/fsrs/params.rs +++ b/rslib/src/scheduler/fsrs/params.rs @@ -299,6 +299,33 @@ impl Collection { .is_ok() })?) } + + pub fn evaluate_params_legacy( + &mut self, + params: &Params, + search: &str, + ignore_revlogs_before: TimestampMillis, + ) -> Result { + let timing = self.timing_today()?; + let mut anki_progress = self.new_progress_handler::(); + let guard = self.search_cards_into_table(search, SortMode::NoOrder)?; + let revlogs: Vec = guard + .col + .storage + .get_revlog_entries_for_searched_cards_in_card_order()?; + let (items, review_count) = + fsrs_items_for_training(revlogs, timing.next_day_at, ignore_revlogs_before); + anki_progress.state.reviews = review_count as u32; + let fsrs = FSRS::new(Some(params))?; + Ok(fsrs.evaluate(items, |ip| { + anki_progress + .update(false, |p| { + p.total_iterations = ip.total as u32; + p.current_iteration = ip.current as u32; + }) + .is_ok() + })?) + } } #[derive(Default, Clone, Copy, Debug)] diff --git a/rslib/src/scheduler/fsrs/retention.rs b/rslib/src/scheduler/fsrs/retention.rs index 29f6b490d..4c21623bb 100644 --- a/rslib/src/scheduler/fsrs/retention.rs +++ b/rslib/src/scheduler/fsrs/retention.rs @@ -1,9 +1,7 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use anki_proto::scheduler::simulate_fsrs_review_request::cmrr_target::Kind; use anki_proto::scheduler::SimulateFsrsReviewRequest; use fsrs::extract_simulator_config; -use fsrs::SimulationResult; use fsrs::SimulatorConfig; use fsrs::FSRS; @@ -16,115 +14,14 @@ pub struct ComputeRetentionProgress { pub total: u32, } -pub fn average_r_power_forgetting_curve( - learn_span: usize, - cards: &[fsrs::Card], - offset: f32, - decay: f32, -) -> f32 { - let factor = 0.9_f32.powf(1.0 / decay) - 1.0; - let exp = decay + 1.0; - let den_factor = factor * exp; - - // Closure equivalent to the inner integral function - let integral_calc = |card: &fsrs::Card| -> f32 { - // Performs element-wise: (s / den_factor) * (1.0 + factor * t / s).powf(exp) - let t1 = learn_span as f32 - card.last_date; - let t2 = t1 + offset; - (card.stability / den_factor) * (1.0 + factor * t2 / card.stability).powf(exp) - - (card.stability / den_factor) * (1.0 + factor * t1 / card.stability).powf(exp) - }; - - // Calculate integral difference and divide by time difference element-wise - cards.iter().map(integral_calc).sum::() / offset -} - impl Collection { pub fn compute_optimal_retention(&mut self, req: SimulateFsrsReviewRequest) -> Result { - // Helper macro to wrap the closure for "CMRRTargetFn"s - macro_rules! wrap { - ($f:expr) => { - Some(fsrs::CMRRTargetFn(std::sync::Arc::new($f))) - }; - } - - let target_type = req.target.unwrap().kind; - - let days_to_simulate = req.days_to_simulate as f32; - - let target = match target_type { - Some(Kind::Memorized(_)) => None, - Some(Kind::FutureMemorized(settings)) => { - wrap!(move |SimulationResult { - cards, - cost_per_day, - .. - }, - w| { - let total_cost = cost_per_day.iter().sum::(); - total_cost - / cards.iter().fold(0., |p, c| { - c.retention_on(w, days_to_simulate + settings.days as f32) + p - }) - }) - } - Some(Kind::AverageFutureMemorized(settings)) => { - wrap!(move |SimulationResult { - cards, - cost_per_day, - .. - }, - w| { - let total_cost = cost_per_day.iter().sum::(); - total_cost - / average_r_power_forgetting_curve( - days_to_simulate as usize, - cards, - settings.days as f32, - -w[20], - ) - }) - } - Some(Kind::Stability(_)) => { - wrap!(move |SimulationResult { - cards, - cost_per_day, - .. - }, - w| { - let total_cost = cost_per_day.iter().sum::(); - total_cost - / cards.iter().fold(0., |p, c| { - p + (c.retention_on(w, days_to_simulate) * c.stability) - }) - }) - } - None => None, - }; - let mut anki_progress = self.new_progress_handler::(); let fsrs = FSRS::new(None)?; if req.days_to_simulate == 0 { invalid_input!("no days to simulate") } - let (mut config, cards) = self.simulate_request_to_config(&req)?; - - if let Some(Kind::Memorized(settings)) = target_type { - let loss_aversion = settings.loss_aversion; - - config.relearning_step_transitions[0][0] *= loss_aversion; - config.relearning_step_transitions[1][0] *= loss_aversion; - config.relearning_step_transitions[2][0] *= loss_aversion; - - config.learning_step_transitions[0][0] *= loss_aversion; - config.learning_step_transitions[1][0] *= loss_aversion; - config.learning_step_transitions[2][0] *= loss_aversion; - - config.state_rating_costs[0][0] *= loss_aversion; - config.state_rating_costs[1][0] *= loss_aversion; - config.state_rating_costs[2][0] *= loss_aversion; - } - + let (config, cards) = self.simulate_request_to_config(&req)?; Ok(fsrs .optimal_retention( &config, @@ -137,7 +34,7 @@ impl Collection { .is_ok() }, Some(cards), - target, + None, )? .clamp(0.7, 0.95)) } diff --git a/rslib/src/scheduler/reviews.rs b/rslib/src/scheduler/reviews.rs index 06390e57d..f8d433f42 100644 --- a/rslib/src/scheduler/reviews.rs +++ b/rslib/src/scheduler/reviews.rs @@ -36,15 +36,21 @@ impl Card { let new_due = (today + days_from_today) as i32; let fsrs_enabled = self.memory_state.is_some(); let new_interval = if fsrs_enabled { - let due = self.original_or_current_due(); - let due_diff = if is_unix_epoch_timestamp(due) { - let offset = (due as i64 - next_day_start) / 86_400; - let due = (today as i64 + offset) as i32; - new_due - due + if let Some(last_review_time) = self.last_review_time { + let elapsed_days = + TimestampSecs(next_day_start).elapsed_days_since(last_review_time); + elapsed_days as u32 + days_from_today } else { - new_due - due - }; - self.interval.saturating_add_signed(due_diff) + let due = self.original_or_current_due(); + let due_diff = if is_unix_epoch_timestamp(due) { + let offset = (due as i64 - next_day_start) / 86_400; + let due = (today as i64 + offset) as i32; + new_due - due + } else { + new_due - due + }; + self.interval.saturating_add_signed(due_diff) + } } else if force_reset || !matches!(self.ctype, CardType::Review | CardType::Relearn) { days_from_today.max(1) } else { diff --git a/rslib/src/scheduler/service/mod.rs b/rslib/src/scheduler/service/mod.rs index 993fd1dbe..43d694e4f 100644 --- a/rslib/src/scheduler/service/mod.rs +++ b/rslib/src/scheduler/service/mod.rs @@ -307,6 +307,21 @@ impl crate::services::SchedulerService for Collection { }) } + fn evaluate_params_legacy( + &mut self, + input: scheduler::EvaluateParamsLegacyRequest, + ) -> Result { + let ret = self.evaluate_params_legacy( + &input.params, + &input.search, + input.ignore_revlogs_before_ms.into(), + )?; + Ok(scheduler::EvaluateParamsResponse { + log_loss: ret.log_loss, + rmse_bins: ret.rmse_bins, + }) + } + fn get_optimal_retention_parameters( &mut self, input: scheduler::GetOptimalRetentionParametersRequest, diff --git a/rslib/src/search/parser.rs b/rslib/src/search/parser.rs index 409862fce..ae166ef54 100644 --- a/rslib/src/search/parser.rs +++ b/rslib/src/search/parser.rs @@ -94,6 +94,7 @@ pub enum SearchNode { WholeCollection, Regex(String), NoCombining(String), + StripClozes(String), WordBoundary(String), CustomData(String), Preset(String), @@ -358,6 +359,7 @@ fn search_node_for_text_with_argument<'a>( "cid" => SearchNode::CardIds(check_id_list(val, key)?.into()), "re" => SearchNode::Regex(unescape_quotes(val)), "nc" => SearchNode::NoCombining(unescape(val)?), + "sc" => SearchNode::StripClozes(unescape(val)?), "w" => SearchNode::WordBoundary(unescape(val)?), "dupe" => parse_dupe(val)?, "has-cd" => SearchNode::CustomData(unescape(val)?), diff --git a/rslib/src/search/sqlwriter.rs b/rslib/src/search/sqlwriter.rs index 3aa216a4f..8528376cb 100644 --- a/rslib/src/search/sqlwriter.rs +++ b/rslib/src/search/sqlwriter.rs @@ -22,6 +22,7 @@ use crate::notes::field_checksum; use crate::notetype::NotetypeId; use crate::prelude::*; use crate::storage::ids_to_string; +use crate::storage::ProcessTextFlags; use crate::text::glob_matcher; use crate::text::is_glob; use crate::text::normalize_to_nfc; @@ -134,6 +135,7 @@ impl SqlWriter<'_> { self.write_unqualified( text, self.col.get_config_bool(BoolKey::IgnoreAccentsInSearch), + false, )? } SearchNode::SingleField { field, text, is_re } => { @@ -143,7 +145,14 @@ impl SqlWriter<'_> { self.write_dupe(*notetype_id, &self.norm_note(text))? } SearchNode::Regex(re) => self.write_regex(&self.norm_note(re), false)?, - SearchNode::NoCombining(text) => self.write_unqualified(&self.norm_note(text), true)?, + SearchNode::NoCombining(text) => { + self.write_unqualified(&self.norm_note(text), true, false)? + } + SearchNode::StripClozes(text) => self.write_unqualified( + &self.norm_note(text), + self.col.get_config_bool(BoolKey::IgnoreAccentsInSearch), + true, + )?, SearchNode::WordBoundary(text) => self.write_word_boundary(&self.norm_note(text))?, // other @@ -190,7 +199,12 @@ impl SqlWriter<'_> { Ok(()) } - fn write_unqualified(&mut self, text: &str, no_combining: bool) -> Result<()> { + fn write_unqualified( + &mut self, + text: &str, + no_combining: bool, + strip_clozes: bool, + ) -> Result<()> { let text = to_sql(text); let text = if no_combining { without_combining(&text) @@ -202,17 +216,37 @@ impl SqlWriter<'_> { self.args.push(text); let arg_idx = self.args.len(); - let sfld_expr = if no_combining { - "coalesce(without_combining(cast(n.sfld as text)), n.sfld)" + let mut process_text_flags = ProcessTextFlags::empty(); + if no_combining { + process_text_flags.insert(ProcessTextFlags::NoCombining); + } + if strip_clozes { + process_text_flags.insert(ProcessTextFlags::StripClozes); + } + + let (sfld_expr, flds_expr) = if !process_text_flags.is_empty() { + let bits = process_text_flags.bits(); + ( + Cow::from(format!( + "coalesce(process_text(cast(n.sfld as text), {bits}), n.sfld)" + )), + Cow::from(format!("coalesce(process_text(n.flds, {bits}), n.flds)")), + ) } else { - "n.sfld" - }; - let flds_expr = if no_combining { - "coalesce(without_combining(n.flds), n.flds)" - } else { - "n.flds" + (Cow::from("n.sfld"), Cow::from("n.flds")) }; + if strip_clozes { + let cloze_notetypes_only_clause = self + .col + .get_all_notetypes()? + .iter() + .filter(|nt| nt.is_cloze()) + .map(|nt| format!("n.mid = {}", nt.id)) + .join(" or "); + write!(self.sql, "({cloze_notetypes_only_clause}) and ").unwrap(); + } + if let Some(field_indicies_by_notetype) = self.included_fields_by_notetype()? { let field_idx_str = format!("' || ?{arg_idx} || '"); let other_idx_str = "%".to_string(); @@ -803,9 +837,12 @@ impl SqlWriter<'_> { fn write_regex(&mut self, word: &str, no_combining: bool) -> Result<()> { let flds_expr = if no_combining { - "coalesce(without_combining(n.flds), n.flds)" + Cow::from(format!( + "coalesce(process_text(n.flds, {}), n.flds)", + ProcessTextFlags::NoCombining.bits() + )) } else { - "n.flds" + Cow::from("n.flds") }; let word = if no_combining { without_combining(word) @@ -995,6 +1032,7 @@ impl SearchNode { SearchNode::Duplicates { .. } => RequiredTable::Notes, SearchNode::Regex(_) => RequiredTable::Notes, SearchNode::NoCombining(_) => RequiredTable::Notes, + SearchNode::StripClozes(_) => RequiredTable::Notes, SearchNode::WordBoundary(_) => RequiredTable::Notes, SearchNode::NotetypeId(_) => RequiredTable::Notes, SearchNode::Notetype(_) => RequiredTable::Notes, @@ -1299,6 +1337,9 @@ c.odue != 0 then c.odue else c.due end) != {days}) or (c.queue in (1,4) and "((c.did in (1) or c.odid in (1)))" ); assert_eq!(&s(ctx, "preset:typo").0, "(false)"); + + // strip clozes + assert_eq!(&s(ctx, "sc:abcdef").0, "((n.mid = 1581236385343) and (coalesce(process_text(cast(n.sfld as text), 2), n.sfld) like ?1 escape '\\' or coalesce(process_text(n.flds, 2), n.flds) like ?1 escape '\\'))"); } #[test] diff --git a/rslib/src/search/writer.rs b/rslib/src/search/writer.rs index 2158bffba..3bbe6fd0a 100644 --- a/rslib/src/search/writer.rs +++ b/rslib/src/search/writer.rs @@ -91,6 +91,7 @@ fn write_search_node(node: &SearchNode) -> String { WholeCollection => "deck:*".to_string(), Regex(s) => maybe_quote(&format!("re:{s}")), NoCombining(s) => maybe_quote(&format!("nc:{s}")), + StripClozes(s) => maybe_quote(&format!("sc:{s}")), WordBoundary(s) => maybe_quote(&format!("w:{s}")), CustomData(k) => maybe_quote(&format!("has-cd:{k}")), Preset(s) => maybe_quote(&format!("preset:{s}")), diff --git a/rslib/src/stats/card.rs b/rslib/src/stats/card.rs index b04539717..fdab209c8 100644 --- a/rslib/src/stats/card.rs +++ b/rslib/src/stats/card.rs @@ -30,11 +30,14 @@ impl Collection { let (average_secs, total_secs) = average_and_total_secs_strings(&revlog); let timing = self.timing_today()?; - let seconds_elapsed = self - .storage - .time_of_last_review(card.id)? - .map(|ts| timing.now.elapsed_secs_since(ts)) - .unwrap_or_default() as u32; + let seconds_elapsed = if let Some(last_review_time) = card.last_review_time { + timing.now.elapsed_secs_since(last_review_time) as u32 + } else { + self.storage + .time_of_last_review(card.id)? + .map(|ts| timing.now.elapsed_secs_since(ts)) + .unwrap_or_default() as u32 + }; let fsrs_retrievability = card .memory_state .zip(Some(seconds_elapsed)) diff --git a/rslib/src/storage/card/data.rs b/rslib/src/storage/card/data.rs index 6545a6c60..aeee0fbb5 100644 --- a/rslib/src/storage/card/data.rs +++ b/rslib/src/storage/card/data.rs @@ -47,6 +47,12 @@ pub(crate) struct CardData { deserialize_with = "default_on_invalid" )] pub(crate) decay: Option, + #[serde( + rename = "lrt", + skip_serializing_if = "Option::is_none", + deserialize_with = "default_on_invalid" + )] + pub(crate) last_review_time: Option, /// A string representation of a JSON object storing optional data /// associated with the card, so v3 custom scheduling code can persist @@ -63,6 +69,7 @@ impl CardData { fsrs_difficulty: card.memory_state.as_ref().map(|m| m.difficulty), fsrs_desired_retention: card.desired_retention, decay: card.decay, + last_review_time: card.last_review_time, custom_data: card.custom_data.clone(), } } @@ -169,6 +176,7 @@ mod test { fsrs_difficulty: Some(1.234567), fsrs_desired_retention: Some(0.987654), decay: Some(0.123456), + last_review_time: None, custom_data: "".to_string(), }; assert_eq!( diff --git a/rslib/src/storage/card/mod.rs b/rslib/src/storage/card/mod.rs index 0205aef0d..35a229e93 100644 --- a/rslib/src/storage/card/mod.rs +++ b/rslib/src/storage/card/mod.rs @@ -97,6 +97,7 @@ fn row_to_card(row: &Row) -> result::Result { memory_state: data.memory_state(), desired_retention: data.fsrs_desired_retention, decay: data.decay, + last_review_time: data.last_review_time, custom_data: data.custom_data, }) } diff --git a/rslib/src/storage/mod.rs b/rslib/src/storage/mod.rs index 948bc30e4..015f4fdc7 100644 --- a/rslib/src/storage/mod.rs +++ b/rslib/src/storage/mod.rs @@ -19,6 +19,7 @@ mod upgrades; use std::fmt::Write; +pub(crate) use sqlite::ProcessTextFlags; pub(crate) use sqlite::SqliteStorage; #[derive(Debug, Clone, Copy, PartialEq, Eq)] diff --git a/rslib/src/storage/sqlite.rs b/rslib/src/storage/sqlite.rs index e9ae55a3b..e4b6f60f0 100644 --- a/rslib/src/storage/sqlite.rs +++ b/rslib/src/storage/sqlite.rs @@ -9,6 +9,7 @@ use std::hash::Hasher; use std::path::Path; use std::sync::Arc; +use bitflags::bitflags; use fnv::FnvHasher; use fsrs::FSRS; use fsrs::FSRS5_DEFAULT_DECAY; @@ -24,6 +25,7 @@ use super::upgrades::SCHEMA_MAX_VERSION; use super::upgrades::SCHEMA_MIN_VERSION; use super::upgrades::SCHEMA_STARTING_VERSION; use super::SchemaVersion; +use crate::cloze::strip_clozes; use crate::config::schema11::schema11_config_as_string; use crate::error::DbErrorKind; use crate::prelude::*; @@ -31,6 +33,7 @@ use crate::scheduler::timing::local_minutes_west_for_stamp; use crate::scheduler::timing::v1_creation_date; use crate::storage::card::data::CardData; use crate::text::without_combining; +use crate::text::CowMapping; fn unicase_compare(s1: &str, s2: &str) -> Ordering { UniCase::new(s1).cmp(&UniCase::new(s2)) @@ -74,7 +77,7 @@ fn open_or_create_collection_db(path: &Path) -> Result { add_regexp_function(&db)?; add_regexp_fields_function(&db)?; add_regexp_tags_function(&db)?; - add_without_combining_function(&db)?; + add_process_text_function(&db)?; add_fnvhash_function(&db)?; add_extract_original_position_function(&db)?; add_extract_custom_data_function(&db)?; @@ -111,17 +114,28 @@ fn add_field_index_function(db: &Connection) -> rusqlite::Result<()> { ) } -fn add_without_combining_function(db: &Connection) -> rusqlite::Result<()> { +bitflags! { + pub(crate) struct ProcessTextFlags: u8 { + const NoCombining = 1; + const StripClozes = 1 << 1; + } +} + +fn add_process_text_function(db: &Connection) -> rusqlite::Result<()> { db.create_scalar_function( - "without_combining", - 1, + "process_text", + 2, FunctionFlags::SQLITE_DETERMINISTIC, |ctx| { - let text = ctx.get_raw(0).as_str()?; - Ok(match without_combining(text) { - Cow::Borrowed(_) => None, - Cow::Owned(o) => Some(o), - }) + let mut text = Cow::from(ctx.get_raw(0).as_str()?); + let opt = ProcessTextFlags::from_bits_truncate(ctx.get_raw(1).as_i64()? as u8); + if opt.contains(ProcessTextFlags::StripClozes) { + text = text.map_cow(strip_clozes); + } + if opt.contains(ProcessTextFlags::NoCombining) { + text = text.map_cow(without_combining); + } + Ok(text.get_owned()) }, ) } @@ -314,7 +328,13 @@ fn add_extract_fsrs_retrievability(db: &Connection) -> rusqlite::Result<()> { let Ok(due) = ctx.get_raw(1).as_i64() else { return Ok(None); }; - let days_elapsed = if due > 365_000 { + let days_elapsed = if let Some(last_review_time) = card_data.last_review_time { + // Use last_review_time to calculate days_elapsed + let Ok(next_day_at) = ctx.get_raw(4).as_i64() else { + return Ok(None); + }; + (next_day_at as u32).saturating_sub(last_review_time.0 as u32) / 86_400 + } else if due > 365_000 { // (re)learning card in seconds let Ok(next_day_at) = ctx.get_raw(4).as_i64() else { return Ok(None); @@ -382,6 +402,14 @@ fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result desired_retrievability = desired_retrievability.max(0.0001); let decay = card_data.decay.unwrap_or(FSRS5_DEFAULT_DECAY); + let days_elapsed = if let Some(last_review_time) = + card_data.last_review_time + { + TimestampSecs(next_day_at).elapsed_days_since(last_review_time) as u32 + } else { + days_elapsed + }; + let current_retrievability = FSRS::new(None) .unwrap() .current_retrievability(state.into(), days_elapsed, decay) diff --git a/rslib/src/sync/collection/chunks.rs b/rslib/src/sync/collection/chunks.rs index 9d74ddb6c..7873c89c1 100644 --- a/rslib/src/sync/collection/chunks.rs +++ b/rslib/src/sync/collection/chunks.rs @@ -333,6 +333,7 @@ impl From for Card { memory_state: data.memory_state(), desired_retention: data.fsrs_desired_retention, decay: data.decay, + last_review_time: data.last_review_time, custom_data: data.custom_data, } } diff --git a/tools/run-qt6.9 b/tools/run-qt6.8 similarity index 70% rename from tools/run-qt6.9 rename to tools/run-qt6.8 index 6576b6c81..1628a1e33 100755 --- a/tools/run-qt6.9 +++ b/tools/run-qt6.8 @@ -4,6 +4,6 @@ set -e ./ninja extract:uv -export PYENV=./out/pyenv69 -UV_PROJECT_ENVIRONMENT=$PYENV ./out/extracted/uv/uv sync --all-packages --extra qt69 +export PYENV=./out/pyenv68 +UV_PROJECT_ENVIRONMENT=$PYENV ./out/extracted/uv/uv sync --all-packages --extra qt68 ./run $* diff --git a/ts/lib/components/HelpModal.svelte b/ts/lib/components/HelpModal.svelte index 5fff619df..cf6292537 100644 --- a/ts/lib/components/HelpModal.svelte +++ b/ts/lib/components/HelpModal.svelte @@ -181,6 +181,11 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html margin-bottom: 1.5rem; } + .modal { + z-index: 1066; + background-color: rgba($color: black, $alpha: 0.5); + } + .modal-title { margin-inline-end: 0.75rem; } diff --git a/ts/lib/components/HelpSection.svelte b/ts/lib/components/HelpSection.svelte index 71b45fb6c..b52486865 100644 --- a/ts/lib/components/HelpSection.svelte +++ b/ts/lib/components/HelpSection.svelte @@ -8,6 +8,8 @@ import Row from "./Row.svelte"; import type { HelpItem } from "./types"; + import { mdiEarth } from "./icons"; + import Icon from "./Icon.svelte"; export let item: HelpItem; @@ -21,6 +23,11 @@ {/if} {#if item.help} + {#if item.global} +

+ +
+ {/if} {@html renderMarkdown(item.help)} {:else} {@html renderMarkdown( @@ -54,4 +61,12 @@ color: var(--fg-subtle); font-size: small; } + + .icon { + display: inline-block; + width: 1em; + fill: currentColor; + margin-right: 0.25em; + margin-bottom: 1.25em; + } diff --git a/ts/lib/components/RevertButton.svelte b/ts/lib/components/RevertButton.svelte index a1d6af06d..08376e7e6 100644 --- a/ts/lib/components/RevertButton.svelte +++ b/ts/lib/components/RevertButton.svelte @@ -76,7 +76,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html } .hide :global(.badge) { - opacity: 0; + display: none; cursor: initial; } diff --git a/ts/lib/components/types.ts b/ts/lib/components/types.ts index 9a2105d9e..2f94a2778 100644 --- a/ts/lib/components/types.ts +++ b/ts/lib/components/types.ts @@ -9,6 +9,7 @@ export type HelpItem = { help?: string; url?: string; sched?: HelpItemScheduler; + global?: boolean; }; export enum HelpItemScheduler { diff --git a/ts/lib/editable/Mathjax.svelte b/ts/lib/editable/Mathjax.svelte index 7be3c986d..a270e6c06 100644 --- a/ts/lib/editable/Mathjax.svelte +++ b/ts/lib/editable/Mathjax.svelte @@ -38,7 +38,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import { pageTheme } from "$lib/sveltelib/theme"; import { convertMathjax, unescapeSomeEntities } from "./mathjax"; - import { ChangeTimer } from "./change-timer"; + import { CooldownTimer } from "./cooldown-timer"; export let mathjax: string; export let block: boolean; @@ -46,25 +46,23 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html let converted: string, title: string; - const debouncedMathjax = writable(mathjax); - const debouncer = new ChangeTimer(); - $: debouncer.schedule(() => debouncedMathjax.set(mathjax), 500); + const debouncer = new CooldownTimer(500); - $: { + $: debouncer.schedule(() => { const cache = getCache($pageTheme.isDark, fontSize); - const entry = cache.get($debouncedMathjax); + const entry = cache.get(mathjax); if (entry) { [converted, title] = entry; } else { const entry = convertMathjax( - unescapeSomeEntities($debouncedMathjax), + unescapeSomeEntities(mathjax), $pageTheme.isDark, fontSize, ); [converted, title] = entry; - cache.set($debouncedMathjax, entry); + cache.set(mathjax, entry); } - } + }); $: empty = title === "MathJax"; $: encoded = encodeURIComponent(converted); diff --git a/ts/lib/editable/cooldown-timer.ts b/ts/lib/editable/cooldown-timer.ts new file mode 100644 index 000000000..892e2b05f --- /dev/null +++ b/ts/lib/editable/cooldown-timer.ts @@ -0,0 +1,31 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +export class CooldownTimer { + private executing = false; + private queuedAction: (() => void) | null = null; + private delay: number; + + constructor(delayMs: number) { + this.delay = delayMs; + } + + schedule(action: () => void): void { + if (this.executing) { + this.queuedAction = action; + } else { + this.executing = true; + action(); + setTimeout(this.#pop.bind(this), this.delay); + } + } + + #pop(): void { + this.executing = false; + if (this.queuedAction) { + const action = this.queuedAction; + this.queuedAction = null; + this.schedule(action); + } + } +} diff --git a/ts/lib/sass/base.scss b/ts/lib/sass/base.scss index d28659ed8..571890102 100644 --- a/ts/lib/sass/base.scss +++ b/ts/lib/sass/base.scss @@ -44,6 +44,7 @@ html { } body { + font-family: inherit; overflow-x: hidden; &:not(.isMac), &:not(.isMac) * { diff --git a/ts/licenses.json b/ts/licenses.json index 2e88336b3..412d1dae3 100644 --- a/ts/licenses.json +++ b/ts/licenses.json @@ -95,8 +95,8 @@ "repository": "https://github.com/TooTallNate/node-agent-base", "publisher": "Nathan Rajlich", "email": "nathan@tootallnate.net", - "path": "node_modules/http-proxy-agent/node_modules/agent-base", - "licenseFile": "node_modules/http-proxy-agent/node_modules/agent-base/README.md" + "path": "node_modules/https-proxy-agent/node_modules/agent-base", + "licenseFile": "node_modules/https-proxy-agent/node_modules/agent-base/README.md" }, "asynckit@0.4.0": { "licenses": "MIT", diff --git a/ts/routes/deck-options/AdvancedOptions.svelte b/ts/routes/deck-options/AdvancedOptions.svelte index 31c3f0d4c..fb892b7ec 100644 --- a/ts/routes/deck-options/AdvancedOptions.svelte +++ b/ts/routes/deck-options/AdvancedOptions.svelte @@ -82,6 +82,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html title: tr.deckConfigCustomScheduling(), help: tr.deckConfigCustomSchedulingTooltip(), url: "https://faqs.ankiweb.net/the-2021-scheduler.html#add-ons-and-custom-scheduling", + global: true, }, }; const helpSections: HelpItem[] = Object.values(settings); diff --git a/ts/routes/deck-options/DailyLimits.svelte b/ts/routes/deck-options/DailyLimits.svelte index 9b156ca00..ea403c1f4 100644 --- a/ts/routes/deck-options/DailyLimits.svelte +++ b/ts/routes/deck-options/DailyLimits.svelte @@ -133,14 +133,15 @@ }, newCardsIgnoreReviewLimit: { title: tr.deckConfigNewCardsIgnoreReviewLimit(), - help: newCardsIgnoreReviewLimitHelp, url: HelpPage.DeckOptions.newCardsday, + global: true, }, applyAllParentLimits: { title: tr.deckConfigApplyAllParentLimits(), help: applyAllParentLimitsHelp, url: HelpPage.DeckOptions.newCardsday, + global: true, }, }; const helpSections: HelpItem[] = Object.values(settings); diff --git a/ts/routes/deck-options/EasyDaysInput.svelte b/ts/routes/deck-options/EasyDaysInput.svelte index a7c13e2e2..fb5d9cd2d 100644 --- a/ts/routes/deck-options/EasyDaysInput.svelte +++ b/ts/routes/deck-options/EasyDaysInput.svelte @@ -20,53 +20,64 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -
- - - - - - - - - - - {#each easyDays as day, index} - - - - - {/each} - -
- {tr.deckConfigEasyDaysMinimum()} - - {tr.deckConfigEasyDaysReduced()} - - {tr.deckConfigEasyDaysNormal()} -
{day} - -
+
+
+ + {tr.deckConfigEasyDaysMinimum()} + {tr.deckConfigEasyDaysReduced()} + {tr.deckConfigEasyDaysNormal()} + + {#each easyDays as day, index} + {day} +
+ +
+ {/each} +
- diff --git a/ts/routes/deck-options/FsrsOptions.svelte b/ts/routes/deck-options/FsrsOptions.svelte index fadaeba67..706407889 100644 --- a/ts/routes/deck-options/FsrsOptions.svelte +++ b/ts/routes/deck-options/FsrsOptions.svelte @@ -7,14 +7,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html ComputeRetentionProgress, type ComputeParamsProgress, } from "@generated/anki/collection_pb"; - import { - SimulateFsrsReviewRequest, - SimulateFsrsReviewRequest_CMRRTarget, - SimulateFsrsReviewRequest_CMRRTarget_Memorized, - } from "@generated/anki/scheduler_pb"; + import { SimulateFsrsReviewRequest } from "@generated/anki/scheduler_pb"; import { computeFsrsParams, - evaluateParams, + evaluateParamsLegacy, getRetentionWorkload, setWantsAbort, } from "@generated/backend"; @@ -99,14 +95,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html newCardsIgnoreReviewLimit: $newCardsIgnoreReviewLimit, easyDaysPercentages: $config.easyDaysPercentages, reviewOrder: $config.reviewOrder, - target: new SimulateFsrsReviewRequest_CMRRTarget({ - kind: { - case: "memorized", - value: new SimulateFsrsReviewRequest_CMRRTarget_Memorized({ - lossAversion: 1.6, - }), - }, - }), }); const DESIRED_RETENTION_LOW_THRESHOLD = 0.8; @@ -256,10 +244,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html const search = $config.paramSearch ? $config.paramSearch : defaultparamSearch; - const resp = await evaluateParams({ + const resp = await evaluateParamsLegacy({ search, ignoreRevlogsBeforeMs: getIgnoreRevlogsBeforeMs(), - numOfRelearningSteps: $config.relearnSteps.length, + params: fsrsParams($config), }); if (computeParamsProgress) { computeParamsProgress.current = computeParamsProgress.total; @@ -373,8 +361,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html {tr.deckConfigOptimizeButton()} {/if} - {#if false} - + {#if state.legacyEvaluate} + + {#if optimalRetention} + {estimatedRetention(optimalRetention)} + {#if optimalRetention - $config.desiredRetention >= 0.01} + + {/if} + {/if} + {#if computingRetention} - {tr.actionsCancel()} - {:else} - {tr.deckConfigComputeButton()} +
{computeRetentionProgressString}
{/if} - - - {#if optimalRetention} - {estimatedRetention(optimalRetention)} - {#if optimalRetention - $config.desiredRetention >= 0.01} - - {/if} - {/if} - - {#if computingRetention} -
{computeRetentionProgressString}
- {/if} - - - - - {"Target: "} - - - - - {#if simulateFsrsRequest.target?.kind.case === "memorized"} - - - {"Fail Cost Multiplier: "} - - - {/if} - - {#if simulateFsrsRequest.target?.kind.case === "futureMemorized" || simulateFsrsRequest.target?.kind.case === "averageFutureMemorized"} - - - {"Days after simulation end: "} - - - {/if} - - + +