diff --git a/.gitignore b/.gitignore index 768716ca4..ccac21aa2 100644 --- a/.gitignore +++ b/.gitignore @@ -19,4 +19,4 @@ yarn-error.log ts/.svelte-kit .yarn .claude/settings.local.json -CLAUDE.local.md +.claude/user.md diff --git a/.isort.cfg b/.isort.cfg deleted file mode 100644 index a26991a95..000000000 --- a/.isort.cfg +++ /dev/null @@ -1,4 +0,0 @@ -[settings] -py_version=39 -known_first_party=anki,aqt,tests -profile=black diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index 2413cc6c4..000000000 --- a/.pylintrc +++ /dev/null @@ -1,48 +0,0 @@ -[MASTER] -ignore-patterns=.*_pb2.* -persistent = no -extension-pkg-whitelist=orjson,PyQt6 -init-hook="import sys; sys.path.extend(['pylib/anki/_vendor', 'out/qt'])" - -[REPORTS] -output-format=colorized - -[MESSAGES CONTROL] -disable= - R, - line-too-long, - too-many-lines, - missing-function-docstring, - missing-module-docstring, - missing-class-docstring, - import-outside-toplevel, - wrong-import-position, - wrong-import-order, - fixme, - unused-wildcard-import, - attribute-defined-outside-init, - redefined-builtin, - wildcard-import, - broad-except, - bare-except, - unused-argument, - unused-variable, - redefined-outer-name, - global-statement, - protected-access, - arguments-differ, - arguments-renamed, - consider-using-f-string, - invalid-name, - broad-exception-raised - -[BASIC] -good-names = - id, - tr, - db, - ok, - ip, - -[IMPORTS] -ignored-modules = anki.*_pb2, anki.sync_pb2, win32file,pywintypes,socket,win32pipe,pyaudio,anki.scheduler_pb2,anki.notetypes_pb2 diff --git a/.ruff.toml b/.ruff.toml index fb6ffa2d8..4fa1ffea6 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -1,2 +1,91 @@ -target-version = "py39" -extend-exclude = [] +lint.select = [ + "E", # pycodestyle errors + "F", # Pyflakes errors + "PL", # Pylint rules + "I", # Isort rules + "ARG", + # "UP", # pyupgrade + # "B", # flake8-bugbear + # "SIM", # flake8-simplify +] + +extend-exclude = ["*_pb2.py", "*_pb2.pyi"] + +lint.ignore = [ + # Docstring rules (missing-*-docstring in pylint) + "D100", # Missing docstring in public module + "D101", # Missing docstring in public class + "D103", # Missing docstring in public function + + # Import rules (wrong-import-* in pylint) + "E402", # Module level import not at top of file + "E501", # Line too long + + # pycodestyle rules + "E741", # ambiguous-variable-name + + # Comment rules (fixme in pylint) + "FIX002", # Line contains TODO + + # Pyflakes rules + "F402", # import-shadowed-by-loop-var + "F403", # undefined-local-with-import-star + "F405", # undefined-local-with-import-star-usage + + # Naming rules (invalid-name in pylint) + "N801", # Class name should use CapWords convention + "N802", # Function name should be lowercase + "N803", # Argument name should be lowercase + "N806", # Variable in function should be lowercase + "N811", # Constant imported as non-constant + "N812", # Lowercase imported as non-lowercase + "N813", # Camelcase imported as lowercase + "N814", # Camelcase imported as constant + "N815", # Variable in class scope should not be mixedCase + "N816", # Variable in global scope should not be mixedCase + "N817", # CamelCase imported as acronym + "N818", # Error suffix in exception names + + # Pylint rules + "PLW0603", # global-statement + "PLW2901", # redefined-loop-name + "PLC0415", # import-outside-top-level + "PLR2004", # magic-value-comparison + + # Exception handling (broad-except, bare-except in pylint) + "BLE001", # Do not catch blind exception + + # Argument rules (unused-argument in pylint) + "ARG001", # Unused function argument + "ARG002", # Unused method argument + "ARG005", # Unused lambda argument + + # Access rules (protected-access in pylint) + "SLF001", # Private member accessed + + # String formatting (consider-using-f-string in pylint) + "UP032", # Use f-string instead of format call + + # Exception rules (broad-exception-raised in pylint) + "TRY301", # Abstract raise to an inner function + + # Builtin shadowing (redefined-builtin in pylint) + "A001", # Variable shadows a Python builtin + "A002", # Argument shadows a Python builtin + "A003", # Class attribute shadows a Python builtin +] + +[lint.per-file-ignores] +"**/anki/*_pb2.py" = ["ALL"] + +[lint.pep8-naming] +ignore-names = ["id", "tr", "db", "ok", "ip"] + +[lint.pylint] +max-args = 12 +max-returns = 10 +max-branches = 35 +max-statements = 125 + +[lint.isort] +known-first-party = ["anki", "aqt", "tests"] diff --git a/.version b/.version index a38238a29..6ee14a7b9 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -25.06b5 +25.07.1 diff --git a/.vscode.dist/extensions.json b/.vscode.dist/extensions.json index bb449cc57..eb13662d6 100644 --- a/.vscode.dist/extensions.json +++ b/.vscode.dist/extensions.json @@ -2,7 +2,7 @@ "recommendations": [ "dprint.dprint", "ms-python.python", - "ms-python.black-formatter", + "charliermarsh.ruff", "rust-lang.rust-analyzer", "svelte.svelte-vscode", "zxh404.vscode-proto3", diff --git a/.vscode.dist/settings.json b/.vscode.dist/settings.json index ab91e06ab..0da294c38 100644 --- a/.vscode.dist/settings.json +++ b/.vscode.dist/settings.json @@ -18,7 +18,7 @@ "out/qt", "qt" ], - "python.formatting.provider": "black", + "python.formatting.provider": "charliermarsh.ruff", "python.linting.mypyEnabled": false, "python.analysis.diagnosticSeverityOverrides": { "reportMissingModuleSource": "none" diff --git a/CLAUDE.md b/CLAUDE.md index 6ec6db642..fa58b805b 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -21,7 +21,7 @@ Please do this as a final step before marking a task as completed. During development, you can build/check subsections of our code: - Rust: 'cargo check' -- Python: './tools/dmypy' +- Python: './tools/dmypy', and if wheel-related, './ninja wheels' - TypeScript/Svelte: './ninja check:svelte' Be mindful that some changes (such as modifications to .proto files) may @@ -80,3 +80,7 @@ when possible. in rslib, use error/mod.rs's AnkiError/Result and snafu. In our other Rust modules, prefer anyhow + additional context where appropriate. Unwrapping in build scripts/tests is fine. + +## Individual preferences + +See @.claude/user.md diff --git a/CONTRIBUTORS b/CONTRIBUTORS index d334540fb..327e37f27 100644 --- a/CONTRIBUTORS +++ b/CONTRIBUTORS @@ -63,6 +63,7 @@ Jakub Kaczmarzyk Akshara Balachandra lukkea David Allison +David Allison <62114487+david-allison@users.noreply.github.com> Tsung-Han Yu Piotr Kubowicz RumovZ @@ -232,6 +233,7 @@ Spiritual Father Emmanuel Ferdman Sunong2008 Marvin Kopf +Kevin Nakamura ******************** The text of the 3 clause BSD license follows: diff --git a/Cargo.lock b/Cargo.lock index 03f9e63c8..26006790b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -94,6 +94,7 @@ dependencies = [ "axum", "axum-client-ip", "axum-extra", + "bitflags 2.9.1", "blake3", "bytes", "chrono", @@ -3543,11 +3544,13 @@ dependencies = [ "anki_io", "anki_process", "anyhow", + "camino", "dirs 6.0.0", "embed-resource", "libc", "libc-stdhandle", - "winapi", + "widestring", + "windows 0.61.3", ] [[package]] @@ -7374,6 +7377,12 @@ dependencies = [ "winsafe", ] +[[package]] +name = "widestring" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd7cf3379ca1aac9eea11fba24fd7e315d621f8dfe35c8d7d2be8b793726e07d" + [[package]] name = "winapi" version = "0.3.9" diff --git a/Cargo.toml b/Cargo.toml index d2ce2ce2a..db5753893 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -60,6 +60,7 @@ async-trait = "0.1.88" axum = { version = "0.8.4", features = ["multipart", "macros"] } axum-client-ip = "1.1.3" axum-extra = { version = "0.10.1", features = ["typed-header"] } +bitflags = "2.9.1" blake3 = "1.8.2" bytes = "1.10.1" camino = "1.1.10" @@ -138,8 +139,9 @@ unic-ucd-category = "0.9.0" unicode-normalization = "0.1.24" walkdir = "2.5.0" which = "8.0.0" -winapi = { version = "0.3", features = ["wincon", "errhandlingapi", "consoleapi"] } -windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams"] } +widestring = "1.1.0" +winapi = { version = "0.3", features = ["wincon", "winreg"] } +windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_Foundation", "Win32_UI_Shell"] } wiremock = "0.6.3" xz2 = "0.1.7" zip = { version = "4.1.0", default-features = false, features = ["deflate", "time"] } diff --git a/LICENSE b/LICENSE index 033dc2a0a..456a7cfd6 100644 --- a/LICENSE +++ b/LICENSE @@ -6,8 +6,6 @@ The following included source code items use a license other than AGPL3: In the pylib folder: - * The SuperMemo importer: GPL3 and 0BSD. - * The Pauker importer: BSD-3. * statsbg.py: CC BY 4.0. In the qt folder: diff --git a/README.md b/README.md index 3bdcc2db3..04d5603a7 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Anki +# Anki® [![Build status](https://badge.buildkite.com/c9edf020a4aec976f9835e54751cc5409d843adbb66d043bd3.svg?branch=main)](https://buildkite.com/ankitects/anki-ci) diff --git a/build/configure/src/aqt.rs b/build/configure/src/aqt.rs index 0a9b32270..83be77e91 100644 --- a/build/configure/src/aqt.rs +++ b/build/configure/src/aqt.rs @@ -342,7 +342,12 @@ fn build_wheel(build: &mut Build) -> Result<()> { name: "aqt", version: anki_version(), platform: None, - deps: inputs![":qt:aqt", glob!("qt/aqt/**"), "qt/pyproject.toml"], + deps: inputs![ + ":qt:aqt", + glob!("qt/aqt/**"), + "qt/pyproject.toml", + "qt/hatch_build.py" + ], }, ) } diff --git a/build/configure/src/pylib.rs b/build/configure/src/pylib.rs index bcef1ecc4..21820ae8b 100644 --- a/build/configure/src/pylib.rs +++ b/build/configure/src/pylib.rs @@ -68,7 +68,8 @@ pub fn build_pylib(build: &mut Build) -> Result<()> { deps: inputs![ ":pylib:anki", glob!("pylib/anki/**"), - "pylib/pyproject.toml" + "pylib/pyproject.toml", + "pylib/hatch_build.py" ], }, )?; diff --git a/build/configure/src/python.rs b/build/configure/src/python.rs index 474a55f31..9d5e9057e 100644 --- a/build/configure/src/python.rs +++ b/build/configure/src/python.rs @@ -7,17 +7,14 @@ use anyhow::Result; use ninja_gen::action::BuildAction; use ninja_gen::archives::Platform; use ninja_gen::build::FilesHandle; -use ninja_gen::command::RunCommand; use ninja_gen::copy::CopyFiles; use ninja_gen::glob; -use ninja_gen::hashmap; use ninja_gen::input::BuildInput; use ninja_gen::inputs; use ninja_gen::python::python_format; use ninja_gen::python::PythonEnvironment; -use ninja_gen::python::PythonLint; use ninja_gen::python::PythonTypecheck; -use ninja_gen::rsync::RsyncFiles; +use ninja_gen::python::RuffCheck; use ninja_gen::Build; /// Normalize version string by removing leading zeros from numeric parts @@ -51,7 +48,7 @@ fn normalize_version(version: &str) -> String { part.to_string() } else { let normalized_prefix = numeric_prefix.parse::().unwrap_or(0).to_string(); - format!("{}{}", normalized_prefix, rest) + format!("{normalized_prefix}{rest}") } } }) @@ -60,14 +57,7 @@ fn normalize_version(version: &str) -> String { } pub fn setup_venv(build: &mut Build) -> Result<()> { - let extra_binary_exports = &[ - "mypy", - "black", - "isort", - "pylint", - "pytest", - "protoc-gen-mypy", - ]; + let extra_binary_exports = &["mypy", "ruff", "pytest", "protoc-gen-mypy"]; build.add_action( "pyenv", PythonEnvironment { @@ -200,60 +190,26 @@ pub fn check_python(build: &mut Build) -> Result<()> { }, )?; - add_pylint(build)?; - - Ok(()) -} - -fn add_pylint(build: &mut Build) -> Result<()> { - // pylint does not support PEP420 implicit namespaces split across import paths, - // so we need to merge our pylib sources and generated files before invoking it, - // and add a top-level __init__.py + let ruff_folders = &["qt/aqt", "ftl", "pylib/tools", "tools", "python"]; + let ruff_deps = inputs![ + glob!["{pylib,ftl,qt,python,tools}/**/*.py"], + ":pylib:anki", + ":qt:aqt" + ]; build.add_action( - "check:pylint:copy_pylib", - RsyncFiles { - inputs: inputs![":pylib:anki"], - target_folder: "pylint/anki", - strip_prefix: "$builddir/pylib/anki", - // avoid copying our large rsbridge binary - extra_args: "--links", + "check:ruff", + RuffCheck { + folders: ruff_folders, + deps: ruff_deps.clone(), + check_only: true, }, )?; build.add_action( - "check:pylint:copy_pylib", - RsyncFiles { - inputs: inputs![glob!["pylib/anki/**"]], - target_folder: "pylint/anki", - strip_prefix: "pylib/anki", - extra_args: "", - }, - )?; - build.add_action( - "check:pylint:copy_pylib", - RunCommand { - command: ":pyenv:bin", - args: "$script $out", - inputs: hashmap! { "script" => inputs!["python/mkempty.py"] }, - outputs: hashmap! { "out" => vec!["pylint/anki/__init__.py"] }, - }, - )?; - build.add_action( - "check:pylint", - PythonLint { - folders: &[ - "$builddir/pylint/anki", - "qt/aqt", - "ftl", - "pylib/tools", - "tools", - "python", - ], - pylint_ini: inputs![".pylintrc"], - deps: inputs![ - ":check:pylint:copy_pylib", - ":qt:aqt", - glob!("{pylib/tools,ftl,qt,python,tools}/**/*.py") - ], + "fix:ruff", + RuffCheck { + folders: ruff_folders, + deps: ruff_deps, + check_only: false, }, )?; diff --git a/build/configure/src/rust.rs b/build/configure/src/rust.rs index 1ff0fc97c..758752fa6 100644 --- a/build/configure/src/rust.rs +++ b/build/configure/src/rust.rs @@ -169,7 +169,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> { pub fn check_rust(build: &mut Build) -> Result<()> { let inputs = inputs![ - glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,tools/workspace-hack/**}"), + glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**}"), "Cargo.lock", "Cargo.toml", "rust-toolchain.toml", diff --git a/build/ninja_gen/Cargo.toml b/build/ninja_gen/Cargo.toml index cacab6a7b..5e5a4f736 100644 --- a/build/ninja_gen/Cargo.toml +++ b/build/ninja_gen/Cargo.toml @@ -35,3 +35,7 @@ path = "src/bin/update_uv.rs" [[bin]] name = "update_protoc" path = "src/bin/update_protoc.rs" + +[[bin]] +name = "update_node" +path = "src/bin/update_node.rs" diff --git a/build/ninja_gen/src/bin/update_node.rs b/build/ninja_gen/src/bin/update_node.rs new file mode 100644 index 000000000..32dbf6d4a --- /dev/null +++ b/build/ninja_gen/src/bin/update_node.rs @@ -0,0 +1,268 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use std::error::Error; +use std::fs; +use std::path::Path; + +use regex::Regex; +use reqwest::blocking::Client; +use serde_json::Value; + +#[derive(Debug)] +struct NodeRelease { + version: String, + files: Vec, +} + +#[derive(Debug)] +struct NodeFile { + filename: String, + url: String, +} + +fn main() -> Result<(), Box> { + let release_info = fetch_node_release_info()?; + let new_text = generate_node_archive_function(&release_info)?; + update_node_text(&new_text)?; + println!("Node.js archive function updated successfully!"); + Ok(()) +} + +fn fetch_node_release_info() -> Result> { + let client = Client::new(); + + // Get the Node.js release info + let response = client + .get("https://nodejs.org/dist/index.json") + .header("User-Agent", "anki-build-updater") + .send()?; + + let releases: Vec = response.json()?; + + // Find the latest LTS release + let latest = releases + .iter() + .find(|release| { + // LTS releases have a non-false "lts" field + release["lts"].as_str().is_some() && release["lts"] != false + }) + .ok_or("No LTS releases found")?; + + let version = latest["version"] + .as_str() + .ok_or("Version not found")? + .to_string(); + + let files = latest["files"] + .as_array() + .ok_or("Files array not found")? + .iter() + .map(|f| f.as_str().unwrap_or("")) + .collect::>(); + + let lts_name = latest["lts"].as_str().unwrap_or("unknown"); + println!("Found Node.js LTS version: {version} ({lts_name})"); + + // Map platforms to their expected file keys and full filenames + let platform_mapping = vec![ + ( + "linux-x64", + "linux-x64", + format!("node-{version}-linux-x64.tar.xz"), + ), + ( + "linux-arm64", + "linux-arm64", + format!("node-{version}-linux-arm64.tar.xz"), + ), + ( + "darwin-x64", + "osx-x64-tar", + format!("node-{version}-darwin-x64.tar.xz"), + ), + ( + "darwin-arm64", + "osx-arm64-tar", + format!("node-{version}-darwin-arm64.tar.xz"), + ), + ( + "win-x64", + "win-x64-zip", + format!("node-{version}-win-x64.zip"), + ), + ( + "win-arm64", + "win-arm64-zip", + format!("node-{version}-win-arm64.zip"), + ), + ]; + + let mut node_files = Vec::new(); + + for (platform, file_key, filename) in platform_mapping { + // Check if this file exists in the release + if files.contains(&file_key) { + let url = format!("https://nodejs.org/dist/{version}/{filename}"); + node_files.push(NodeFile { + filename: filename.clone(), + url, + }); + println!("Found file for {platform}: {filename} (key: {file_key})"); + } else { + return Err( + format!("File not found for {platform} (key: {file_key}): {filename}").into(), + ); + } + } + + Ok(NodeRelease { + version, + files: node_files, + }) +} + +fn generate_node_archive_function(release: &NodeRelease) -> Result> { + let client = Client::new(); + + // Fetch the SHASUMS256.txt file once + println!("Fetching SHA256 checksums..."); + let shasums_url = format!("https://nodejs.org/dist/{}/SHASUMS256.txt", release.version); + let shasums_response = client + .get(&shasums_url) + .header("User-Agent", "anki-build-updater") + .send()?; + let shasums_text = shasums_response.text()?; + + // Create a mapping from filename patterns to platform names - using the exact + // patterns we stored in files + let platform_mapping = vec![ + ("linux-x64.tar.xz", "LinuxX64"), + ("linux-arm64.tar.xz", "LinuxArm"), + ("darwin-x64.tar.xz", "MacX64"), + ("darwin-arm64.tar.xz", "MacArm"), + ("win-x64.zip", "WindowsX64"), + ("win-arm64.zip", "WindowsArm"), + ]; + + let mut platform_blocks = Vec::new(); + + for (file_pattern, platform_name) in platform_mapping { + // Find the file that ends with this pattern + if let Some(file) = release + .files + .iter() + .find(|f| f.filename.ends_with(file_pattern)) + { + // Find the SHA256 for this file + let sha256 = shasums_text + .lines() + .find(|line| line.contains(&file.filename)) + .and_then(|line| line.split_whitespace().next()) + .ok_or_else(|| format!("SHA256 not found for {}", file.filename))?; + + println!( + "Found SHA256 for {}: {} => {}", + platform_name, file.filename, sha256 + ); + + let block = format!( + " Platform::{} => OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }},", + platform_name, file.url, sha256 + ); + platform_blocks.push(block); + } else { + return Err(format!( + "File not found for platform {platform_name}: no file ending with {file_pattern}" + ) + .into()); + } + } + + let function = format!( + "pub fn node_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}", + platform_blocks.join("\n") + ); + + Ok(function) +} + +fn update_node_text(new_function: &str) -> Result<(), Box> { + let node_rs_content = read_node_rs()?; + + // Regex to match the entire node_archive function with proper multiline + // matching + let re = Regex::new( + r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}", + )?; + + let updated_content = re.replace(&node_rs_content, new_function); + + write_node_rs(&updated_content)?; + Ok(()) +} + +fn read_node_rs() -> Result> { + // Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs + let manifest_dir = + std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?; + let path = Path::new(&manifest_dir).join("src").join("node.rs"); + Ok(fs::read_to_string(path)?) +} + +fn write_node_rs(content: &str) -> Result<(), Box> { + // Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs + let manifest_dir = + std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?; + let path = Path::new(&manifest_dir).join("src").join("node.rs"); + fs::write(path, content)?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_regex_replacement() { + let sample_content = r#"Some other code +pub fn node_archive(platform: Platform) -> OnlineArchive { + match platform { + Platform::LinuxX64 => OnlineArchive { + url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz", + sha256: "old_hash", + }, + Platform::MacX64 => OnlineArchive { + url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz", + sha256: "old_hash", + }, + } +} + +More code here"#; + + let new_function = r#"pub fn node_archive(platform: Platform) -> OnlineArchive { + match platform { + Platform::LinuxX64 => OnlineArchive { + url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-linux-x64.tar.xz", + sha256: "new_hash", + }, + Platform::MacX64 => OnlineArchive { + url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-darwin-x64.tar.xz", + sha256: "new_hash", + }, + } +}"#; + + let re = Regex::new( + r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}" + ).unwrap(); + + let result = re.replace(sample_content, new_function); + assert!(result.contains("v21.0.0")); + assert!(result.contains("new_hash")); + assert!(!result.contains("old_hash")); + assert!(result.contains("Some other code")); + assert!(result.contains("More code here")); + } +} diff --git a/build/ninja_gen/src/bin/update_protoc.rs b/build/ninja_gen/src/bin/update_protoc.rs index 224dbaa50..3a8f06b8b 100644 --- a/build/ninja_gen/src/bin/update_protoc.rs +++ b/build/ninja_gen/src/bin/update_protoc.rs @@ -72,12 +72,11 @@ fn fetch_protoc_release_info() -> Result> { "MacArm" => continue, // Skip MacArm since it's handled with MacX64 "WindowsX64" => "Platform::WindowsX64 | Platform::WindowsArm", "WindowsArm" => continue, // Skip WindowsArm since it's handled with WindowsX64 - _ => &format!("Platform::{}", platform), + _ => &format!("Platform::{platform}"), }; match_blocks.push(format!( - " {} => {{\n OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }}\n }}", - match_pattern, download_url, sha256 + " {match_pattern} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}" )); } diff --git a/build/ninja_gen/src/bin/update_uv.rs b/build/ninja_gen/src/bin/update_uv.rs index 39cf87668..5a5d2d253 100644 --- a/build/ninja_gen/src/bin/update_uv.rs +++ b/build/ninja_gen/src/bin/update_uv.rs @@ -53,7 +53,7 @@ fn fetch_uv_release_info() -> Result> { // Find the corresponding .sha256 or .sha256sum asset let sha_asset = assets.iter().find(|a| { let name = a["name"].as_str().unwrap_or(""); - name == format!("{}.sha256", asset_name) || name == format!("{}.sha256sum", asset_name) + name == format!("{asset_name}.sha256") || name == format!("{asset_name}.sha256sum") }); if sha_asset.is_none() { eprintln!("No sha256 asset found for {asset_name}"); @@ -71,8 +71,7 @@ fn fetch_uv_release_info() -> Result> { let sha256 = sha_text.split_whitespace().next().unwrap_or(""); match_blocks.push(format!( - " Platform::{} => {{\n OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }}\n }}", - platform, download_url, sha256 + " Platform::{platform} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}" )); } @@ -135,10 +134,7 @@ mod tests { assert_eq!( updated_lines, original_lines - EXPECTED_LINES_REMOVED, - "Expected line count to decrease by exactly {} lines (original: {}, updated: {})", - EXPECTED_LINES_REMOVED, - original_lines, - updated_lines + "Expected line count to decrease by exactly {EXPECTED_LINES_REMOVED} lines (original: {original_lines}, updated: {updated_lines})" ); } } diff --git a/build/ninja_gen/src/build.rs b/build/ninja_gen/src/build.rs index df8ec82fb..ed416b000 100644 --- a/build/ninja_gen/src/build.rs +++ b/build/ninja_gen/src/build.rs @@ -300,7 +300,7 @@ impl BuildStatement<'_> { writeln!(buf, "build {outputs_str}: {action_name} {inputs_str}").unwrap(); for (key, value) in self.variables.iter().sorted() { - writeln!(buf, " {key} = {}", value).unwrap(); + writeln!(buf, " {key} = {value}").unwrap(); } writeln!(buf).unwrap(); @@ -476,7 +476,7 @@ impl FilesHandle for BuildStatement<'_> { let outputs = outputs.into_iter().map(|v| { let v = v.as_ref(); let v = if !v.starts_with("$builddir/") && !v.starts_with("$builddir\\") { - format!("$builddir/{}", v) + format!("$builddir/{v}") } else { v.to_owned() }; diff --git a/build/ninja_gen/src/node.rs b/build/ninja_gen/src/node.rs index 10b3e6184..b7b66225b 100644 --- a/build/ninja_gen/src/node.rs +++ b/build/ninja_gen/src/node.rs @@ -19,28 +19,28 @@ use crate::input::BuildInput; pub fn node_archive(platform: Platform) -> OnlineArchive { match platform { Platform::LinuxX64 => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz", - sha256: "822780369d0ea309e7d218e41debbd1a03f8cdf354ebf8a4420e89f39cc2e612", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-x64.tar.xz", + sha256: "325c0f1261e0c61bcae369a1274028e9cfb7ab7949c05512c5b1e630f7e80e12", }, Platform::LinuxArm => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-arm64.tar.xz", - sha256: "f6df68c6793244071f69023a9b43a0cf0b13d65cbe86d55925c28e4134d9aafb", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-arm64.tar.xz", + sha256: "140aee84be6774f5fb3f404be72adbe8420b523f824de82daeb5ab218dab7b18", }, Platform::MacX64 => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz", - sha256: "d4b4ab81ebf1f7aab09714f834992f27270ad0079600da00c8110f8950ca6c5a", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-x64.tar.xz", + sha256: "f79de1f64df4ac68493a344bb5ab7d289d0275271e87b543d1278392c9de778a", }, Platform::MacArm => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-arm64.tar.xz", - sha256: "f18a7438723d48417f5e9be211a2f3c0520ffbf8e02703469e5153137ca0f328", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-arm64.tar.xz", + sha256: "cc9cc294eaf782dd93c8c51f460da610cc35753c6a9947411731524d16e97914", }, Platform::WindowsX64 => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-x64.zip", - sha256: "893115cd92ad27bf178802f15247115e93c0ef0c753b93dca96439240d64feb5", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-x64.zip", + sha256: "721ab118a3aac8584348b132767eadf51379e0616f0db802cc1e66d7f0d98f85", }, Platform::WindowsArm => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-arm64.zip", - sha256: "89c1f7034dcd6ff5c17f2af61232a96162a1902f862078347dcf274a938b6142", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-arm64.zip", + sha256: "78355dc9ca117bb71d3f081e4b1b281855e2b134f3939bb0ca314f7567b0e621", }, } } diff --git a/build/ninja_gen/src/python.rs b/build/ninja_gen/src/python.rs index 7ac65e85f..541d6c96e 100644 --- a/build/ninja_gen/src/python.rs +++ b/build/ninja_gen/src/python.rs @@ -148,7 +148,7 @@ impl BuildAction for PythonEnvironment { // Add --python flag to extra_args if PYTHON_BINARY is set let mut args = self.extra_args.to_string(); if let Ok(python_binary) = env::var("PYTHON_BINARY") { - args = format!("--python {} {}", python_binary, args); + args = format!("--python {python_binary} {args}"); } build.add_variable("extra_args", args); } @@ -159,6 +159,10 @@ impl BuildAction for PythonEnvironment { } build.add_output_stamp(format!("{}/.stamp", self.venv_folder)); } + + fn check_output_timestamps(&self) -> bool { + true + } } pub struct PythonTypecheck { @@ -189,31 +193,19 @@ impl BuildAction for PythonTypecheck { struct PythonFormat<'a> { pub inputs: &'a BuildInput, pub check_only: bool, - pub isort_ini: &'a BuildInput, } impl BuildAction for PythonFormat<'_> { fn command(&self) -> &str { - "$black -t py39 -q $check --color $in && $ - $isort --color --settings-path $isort_ini $check $in" + "$ruff format $mode $in && $ruff check --select I --fix $in" } fn files(&mut self, build: &mut impl crate::build::FilesHandle) { build.add_inputs("in", self.inputs); - build.add_inputs("black", inputs![":pyenv:black"]); - build.add_inputs("isort", inputs![":pyenv:isort"]); + build.add_inputs("ruff", inputs![":pyenv:ruff"]); let hash = simple_hash(self.inputs); - build.add_env_var("BLACK_CACHE_DIR", "out/python/black.cache.{hash}"); - build.add_inputs("isort_ini", self.isort_ini); - build.add_variable( - "check", - if self.check_only { - "--diff --check" - } else { - "" - }, - ); + build.add_variable("mode", if self.check_only { "--check" } else { "" }); build.add_output_stamp(format!( "tests/python_format.{}.{hash}", @@ -223,13 +215,11 @@ impl BuildAction for PythonFormat<'_> { } pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Result<()> { - let isort_ini = &inputs![".isort.cfg"]; build.add_action( format!("check:format:python:{group}"), PythonFormat { inputs: &inputs, check_only: true, - isort_ini, }, )?; @@ -238,34 +228,39 @@ pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Resu PythonFormat { inputs: &inputs, check_only: false, - isort_ini, }, )?; Ok(()) } -pub struct PythonLint { +pub struct RuffCheck { pub folders: &'static [&'static str], - pub pylint_ini: BuildInput, pub deps: BuildInput, + pub check_only: bool, } -impl BuildAction for PythonLint { +impl BuildAction for RuffCheck { fn command(&self) -> &str { - "$pylint --rcfile $pylint_ini -sn -j $cpus $folders" + "$ruff check $folders $mode" } fn files(&mut self, build: &mut impl crate::build::FilesHandle) { build.add_inputs("", &self.deps); - build.add_inputs("pylint", inputs![":pyenv:pylint"]); - build.add_inputs("pylint_ini", &self.pylint_ini); + build.add_inputs("", inputs![".ruff.toml"]); + build.add_inputs("ruff", inputs![":pyenv:ruff"]); build.add_variable("folders", self.folders.join(" ")); - // On a 16 core system, values above 10 do not improve wall clock time, - // but waste extra cores that could be working on other tests. - build.add_variable("cpus", num_cpus::get().min(10).to_string()); + build.add_variable( + "mode", + if self.check_only { + "" + } else { + "--fix --unsafe-fixes" + }, + ); let hash = simple_hash(&self.deps); - build.add_output_stamp(format!("tests/python_lint.{hash}")); + let kind = if self.check_only { "check" } else { "fix" }; + build.add_output_stamp(format!("tests/python_ruff.{kind}.{hash}")); } } diff --git a/build/ninja_gen/src/render.rs b/build/ninja_gen/src/render.rs index a9a32cf84..dde307e73 100644 --- a/build/ninja_gen/src/render.rs +++ b/build/ninja_gen/src/render.rs @@ -30,12 +30,12 @@ impl Build { ) .unwrap(); for (key, value) in &self.variables { - writeln!(&mut buf, "{} = {}", key, value).unwrap(); + writeln!(&mut buf, "{key} = {value}").unwrap(); } buf.push('\n'); for (key, value) in &self.pools { - writeln!(&mut buf, "pool {}\n depth = {}", key, value).unwrap(); + writeln!(&mut buf, "pool {key}\n depth = {value}").unwrap(); } buf.push('\n'); diff --git a/build/runner/src/archive.rs b/build/runner/src/archive.rs index 8a78dd515..932b924e1 100644 --- a/build/runner/src/archive.rs +++ b/build/runner/src/archive.rs @@ -65,7 +65,7 @@ fn sha2_data(data: &[u8]) -> String { let mut digest = sha2::Sha256::new(); digest.update(data); let result = digest.finalize(); - format!("{:x}", result) + format!("{result:x}") } enum CompressionKind { diff --git a/build/runner/src/build.rs b/build/runner/src/build.rs index 5e3042aba..107be9783 100644 --- a/build/runner/src/build.rs +++ b/build/runner/src/build.rs @@ -67,7 +67,10 @@ pub fn run_build(args: BuildArgs) { "MYPY_CACHE_DIR", build_root.join("tests").join("mypy").into_string(), ) - .env("PYTHONPYCACHEPREFIX", build_root.join("pycache")) + .env( + "PYTHONPYCACHEPREFIX", + std::path::absolute(build_root.join("pycache")).unwrap(), + ) // commands will not show colors by default, as we do not provide a tty .env("FORCE_COLOR", "1") .env("MYPY_FORCE_COLOR", "1") @@ -135,7 +138,7 @@ fn setup_build_root() -> Utf8PathBuf { true }; if create { - println!("Switching build root to {}", new_target); + println!("Switching build root to {new_target}"); std::os::unix::fs::symlink(new_target, build_root).unwrap(); } } diff --git a/build/runner/src/pyenv.rs b/build/runner/src/pyenv.rs index 0bd5ec662..d64c8fb3f 100644 --- a/build/runner/src/pyenv.rs +++ b/build/runner/src/pyenv.rs @@ -35,7 +35,7 @@ pub fn setup_pyenv(args: PyenvArgs) { run_command( Command::new(args.uv_bin) .env("UV_PROJECT_ENVIRONMENT", args.pyenv_folder.clone()) - .args(["sync", "--frozen"]) + .args(["sync", "--locked"]) .args(args.extra_args), ); diff --git a/build/runner/src/run.rs b/build/runner/src/run.rs index bff88eb97..fd8877cd9 100644 --- a/build/runner/src/run.rs +++ b/build/runner/src/run.rs @@ -83,7 +83,7 @@ fn split_args(args: Vec) -> Vec> { pub fn run_command(command: &mut Command) { if let Err(err) = command.ensure_success() { - println!("{}", err); + println!("{err}"); std::process::exit(1); } } diff --git a/docs/development.md b/docs/development.md index c963aec02..defe9ef1e 100644 --- a/docs/development.md +++ b/docs/development.md @@ -85,7 +85,7 @@ When formatting issues are reported, they can be fixed with ./ninja format ``` -## Fixing eslint/copyright header issues +## Fixing ruff/eslint/copyright header issues ``` ./ninja fix diff --git a/docs/protobuf.md b/docs/protobuf.md index 29094fc65..75796b473 100644 --- a/docs/protobuf.md +++ b/docs/protobuf.md @@ -98,12 +98,6 @@ should preferably be assigned a number between 1 and 15. If a message contains Protobuf has an official Python implementation with an extensive [reference](https://developers.google.com/protocol-buffers/docs/reference/python-generated). -- Every message used in aqt or pylib must be added to the respective `.pylintrc` - to avoid failing type checks. The unqualified protobuf message's name must be - used, not an alias from `collection.py` for example. This should be taken into - account when choosing a message name in order to prevent skipping typechecking - a Python class of the same name. - ### Typescript Anki uses [protobuf-es](https://github.com/bufbuild/protobuf-es), which offers diff --git a/ftl/core-repo b/ftl/core-repo index 2f8c9d956..a9216499b 160000 --- a/ftl/core-repo +++ b/ftl/core-repo @@ -1 +1 @@ -Subproject commit 2f8c9d9566aef8b86e3326fe9ff007d594b7ec83 +Subproject commit a9216499ba1fb1538cfd740c698adaaa3410fd4b diff --git a/ftl/core/card-templates.ftl b/ftl/core/card-templates.ftl index 7ecda1968..edb2433f9 100644 --- a/ftl/core/card-templates.ftl +++ b/ftl/core/card-templates.ftl @@ -60,7 +60,6 @@ card-templates-this-will-create-card-proceed = } card-templates-type-boxes-warning = Only one typing box per card template is supported. card-templates-restore-to-default = Restore to Default -card-templates-restore-to-default-confirmation = This will reset all fields and templates in this note type to their default - values, removing any extra fields/templates and their content, and any custom styling. Do you wish to proceed? +card-templates-restore-to-default-confirmation = This will reset all fields and templates in this note type to their default values, removing any extra fields/templates and their content, and any custom styling. Do you wish to proceed? card-templates-restored-to-default = Note type has been restored to its original state. diff --git a/ftl/core/deck-config.ftl b/ftl/core/deck-config.ftl index 286e6bae8..a091dccef 100644 --- a/ftl/core/deck-config.ftl +++ b/ftl/core/deck-config.ftl @@ -425,6 +425,8 @@ deck-config-desired-retention-tooltip = less frequently, and you will forget more of them. Be conservative when adjusting this - higher values will greatly increase your workload, and lower values can be demoralizing when you forget a lot of material. +deck-config-desired-retention-tooltip2 = + The workload values provided by the info box are a rough approximation. For a greater level of accuracy, use the simulator. deck-config-historical-retention-tooltip = When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will assume that when you did those old reviews, you remembered 90% of the material. If your old retention diff --git a/ftl/core/importing.ftl b/ftl/core/importing.ftl index 70bc5f4d1..3b9f7c401 100644 --- a/ftl/core/importing.ftl +++ b/ftl/core/importing.ftl @@ -65,7 +65,6 @@ importing-with-deck-configs-help = If enabled, any deck options that the deck sharer included will also be imported. Otherwise, all decks will be assigned the default preset. importing-packaged-anki-deckcollection-apkg-colpkg-zip = Packaged Anki Deck/Collection (*.apkg *.colpkg *.zip) -importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz) # the '|' character importing-pipe = Pipe # Warning displayed when the csv import preview table is clipped (some columns were hidden) @@ -78,7 +77,6 @@ importing-rows-had-num1d-fields-expected-num2d = '{ $row }' had { $found } field importing-selected-file-was-not-in-utf8 = Selected file was not in UTF-8 format. Please see the importing section of the manual. importing-semicolon = Semicolon importing-skipped = Skipped -importing-supermemo-xml-export-xml = Supermemo XML export (*.xml) importing-tab = Tab importing-tag-modified-notes = Tag modified notes: importing-text-separated-by-tabs-or-semicolons = Text separated by tabs or semicolons (*) @@ -252,3 +250,5 @@ importing-importing-collection = Importing collection... importing-unable-to-import-filename = Unable to import { $filename }: file type not supported importing-notes-that-could-not-be-imported = Notes that could not be imported as note type has changed: { $val } importing-added = Added +importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz) +importing-supermemo-xml-export-xml = Supermemo XML export (*.xml) diff --git a/ftl/core/statistics.ftl b/ftl/core/statistics.ftl index c3a2bb613..8da1aace8 100644 --- a/ftl/core/statistics.ftl +++ b/ftl/core/statistics.ftl @@ -99,9 +99,9 @@ statistics-counts-relearning-cards = Relearning statistics-counts-title = Card Counts statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards -## True Retention represents your actual retention rate from past reviews, in -## comparison to the "desired retention" parameter of FSRS, which forecasts -## future retention. True Retention is the percentage of all reviewed cards +## Retention rate represents your actual retention rate from past reviews, in +## comparison to the "desired retention" setting of FSRS, which forecasts +## future retention. Retention rate is the percentage of all reviewed cards ## that were marked as "Hard," "Good," or "Easy" within a specific time period. ## ## Most of these strings are used as column / row headings in a table. @@ -112,9 +112,9 @@ statistics-counts-separate-suspended-buried-cards = Separate suspended/buried ca ## N.B. Stats cards may be very small on mobile devices and when the Stats ## window is certain sizes. -statistics-true-retention-title = True Retention +statistics-true-retention-title = Retention rate statistics-true-retention-subtitle = Pass rate of cards with an interval ≥ 1 day. -statistics-true-retention-tooltip = If you are using FSRS, your true retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data. +statistics-true-retention-tooltip = If you are using FSRS, your retention rate is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data. statistics-true-retention-range = Range statistics-true-retention-pass = Pass statistics-true-retention-fail = Fail diff --git a/ftl/qt-repo b/ftl/qt-repo index 69f2dbaeb..a1134ab59 160000 --- a/ftl/qt-repo +++ b/ftl/qt-repo @@ -1 +1 @@ -Subproject commit 69f2dbaeba6f72ac62da0b35881f320603da5124 +Subproject commit a1134ab59d3d23468af2968741aa1f21d16ff308 diff --git a/ftl/qt/qt-accel.ftl b/ftl/qt/qt-accel.ftl index 327cd6c46..3ab54eb24 100644 --- a/ftl/qt/qt-accel.ftl +++ b/ftl/qt/qt-accel.ftl @@ -1,4 +1,5 @@ qt-accel-about = &About +qt-accel-about-mac = About Anki... qt-accel-cards = &Cards qt-accel-check-database = &Check Database qt-accel-check-media = Check &Media @@ -45,3 +46,4 @@ qt-accel-zoom-editor-in = Zoom Editor &In qt-accel-zoom-editor-out = Zoom Editor &Out qt-accel-create-backup = Create &Backup qt-accel-load-backup = &Revert to Backup +qt-accel-upgrade-downgrade = Upgrade/Downgrade diff --git a/ftl/qt/qt-misc.ftl b/ftl/qt/qt-misc.ftl index 60c22ef8b..d7bbef990 100644 --- a/ftl/qt/qt-misc.ftl +++ b/ftl/qt/qt-misc.ftl @@ -73,7 +73,7 @@ qt-misc-second = qt-misc-layout-auto-enabled = Responsive layout enabled qt-misc-layout-vertical-enabled = Vertical layout enabled qt-misc-layout-horizontal-enabled = Horizontal layout enabled -qt-misc-please-restart-to-update-anki = Please restart Anki to update to the latest version. +qt-misc-open-anki-launcher = Change to a different Anki version? ## deprecated- these strings will be removed in the future, and do not need ## to be translated diff --git a/ftl/src/serialize.rs b/ftl/src/serialize.rs index c6eda559c..73513df69 100644 --- a/ftl/src/serialize.rs +++ b/ftl/src/serialize.rs @@ -435,7 +435,7 @@ impl TextWriter { item = item.trim_start_matches(' '); } - write!(self.buffer, "{}", item) + write!(self.buffer, "{item}") } fn write_char_into_indent(&mut self, ch: char) { diff --git a/ftl/src/string/mod.rs b/ftl/src/string/mod.rs index 1b64dd91c..e7bf2c5bd 100644 --- a/ftl/src/string/mod.rs +++ b/ftl/src/string/mod.rs @@ -67,7 +67,7 @@ fn additional_template_folder(dst_folder: &Utf8Path) -> Option { fn all_langs(lang_folder: &Utf8Path) -> Result> { std::fs::read_dir(lang_folder) - .with_context(|| format!("reading {:?}", lang_folder))? + .with_context(|| format!("reading {lang_folder:?}"))? .filter_map(Result::ok) .map(|e| Ok(e.path().utf8()?)) .collect() diff --git a/package.json b/package.json index d08655bad..9f12133db 100644 --- a/package.json +++ b/package.json @@ -19,8 +19,8 @@ "@poppanator/sveltekit-svg": "^5.0.0", "@sqltools/formatter": "^1.2.2", "@sveltejs/adapter-static": "^3.0.0", - "@sveltejs/kit": "^2.20.7", - "@sveltejs/vite-plugin-svelte": "4.0.0", + "@sveltejs/kit": "^2.22.2", + "@sveltejs/vite-plugin-svelte": "5.1", "@types/bootstrap": "^5.0.12", "@types/codemirror": "^5.60.0", "@types/d3": "^7.0.0", @@ -30,7 +30,7 @@ "@types/jqueryui": "^1.12.13", "@types/lodash-es": "^4.17.4", "@types/marked": "^5.0.0", - "@types/node": "^20", + "@types/node": "^22", "@typescript-eslint/eslint-plugin": "^5.60.1", "@typescript-eslint/parser": "^5.60.1", "caniuse-lite": "^1.0.30001431", @@ -48,16 +48,16 @@ "prettier": "^3.4.2", "prettier-plugin-svelte": "^3.3.2", "sass": "<1.77", - "svelte": "^5.17.3", - "svelte-check": "^3.4.4", - "svelte-preprocess": "^5.0.4", + "svelte": "^5.34.9", + "svelte-check": "^4.2.2", + "svelte-preprocess": "^6.0.3", "svelte-preprocess-esbuild": "^3.0.1", "svgo": "^3.2.0", "tslib": "^2.0.3", - "tsx": "^3.12.0", + "tsx": "^4.8.1", "typescript": "^5.0.4", - "vite": "5.4.19", - "vitest": "^2" + "vite": "6", + "vitest": "^3" }, "dependencies": { "@bufbuild/protobuf": "^1.2.1", @@ -81,7 +81,8 @@ }, "resolutions": { "canvas": "npm:empty-npm-package@1.0.0", - "cookie": "0.7.0" + "cookie": "0.7.0", + "vite": "6" }, "browserslist": [ "defaults", diff --git a/proto/anki/config.proto b/proto/anki/config.proto index d61f139d6..ea115f0fc 100644 --- a/proto/anki/config.proto +++ b/proto/anki/config.proto @@ -56,6 +56,7 @@ message ConfigKey { RENDER_LATEX = 25; LOAD_BALANCER_ENABLED = 26; FSRS_SHORT_TERM_WITH_STEPS_ENABLED = 27; + FSRS_LEGACY_EVALUATE = 28; } enum String { SET_DUE_BROWSER = 0; diff --git a/proto/anki/deck_config.proto b/proto/anki/deck_config.proto index 831283931..9dae49c6a 100644 --- a/proto/anki/deck_config.proto +++ b/proto/anki/deck_config.proto @@ -236,6 +236,7 @@ message DeckConfigsForUpdate { bool new_cards_ignore_review_limit = 7; bool fsrs = 8; bool fsrs_health_check = 11; + bool fsrs_legacy_evaluate = 12; bool apply_all_parent_limits = 9; uint32 days_since_last_fsrs_optimize = 10; } diff --git a/proto/anki/scheduler.proto b/proto/anki/scheduler.proto index 1b7d44a83..01f092a39 100644 --- a/proto/anki/scheduler.proto +++ b/proto/anki/scheduler.proto @@ -56,6 +56,8 @@ service SchedulerService { rpc SimulateFsrsReview(SimulateFsrsReviewRequest) returns (SimulateFsrsReviewResponse); rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse); + rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest) + returns (EvaluateParamsResponse); rpc ComputeMemoryState(cards.CardId) returns (ComputeMemoryStateResponse); // The number of days the calculated interval was fuzzed by on the previous // review (if any). Utilized by the FSRS add-on. @@ -442,6 +444,12 @@ message EvaluateParamsRequest { uint32 num_of_relearning_steps = 3; } +message EvaluateParamsLegacyRequest { + repeated float params = 1; + string search = 2; + int64 ignore_revlogs_before_ms = 3; +} + message EvaluateParamsResponse { float log_loss = 1; float rmse_bins = 2; diff --git a/pylib/anki/_backend.py b/pylib/anki/_backend.py index a0d8f8949..03fbb30d6 100644 --- a/pylib/anki/_backend.py +++ b/pylib/anki/_backend.py @@ -46,7 +46,6 @@ from .errors import ( # the following comment is required to suppress a warning that only shows up # when there are other pylint failures -# pylint: disable=c-extension-no-member if _rsbridge.buildhash() != anki.buildinfo.buildhash: raise Exception( f"""rsbridge and anki build hashes do not match: @@ -164,7 +163,7 @@ class RustBackend(RustBackendGenerated): finally: elapsed = time.time() - start if current_thread() is main_thread() and elapsed > 0.2: - print(f"blocked main thread for {int(elapsed*1000)}ms:") + print(f"blocked main thread for {int(elapsed * 1000)}ms:") print("".join(traceback.format_stack())) err = backend_pb2.BackendError() diff --git a/pylib/anki/cards.py b/pylib/anki/cards.py index 95e4ac7b9..854d4ed18 100644 --- a/pylib/anki/cards.py +++ b/pylib/anki/cards.py @@ -7,7 +7,7 @@ import pprint import time from typing import NewType -import anki # pylint: disable=unused-import +import anki import anki.collection import anki.decks import anki.notes diff --git a/pylib/anki/collection.py b/pylib/anki/collection.py index 6cf38174c..c64ffdb8b 100644 --- a/pylib/anki/collection.py +++ b/pylib/anki/collection.py @@ -158,7 +158,7 @@ class Collection(DeprecatedNamesMixin): self.tags = TagManager(self) self.conf = ConfigManager(self) self._load_scheduler() - self._startReps = 0 # pylint: disable=invalid-name + self._startReps = 0 def name(self) -> Any: return os.path.splitext(os.path.basename(self.path))[0] @@ -511,9 +511,7 @@ class Collection(DeprecatedNamesMixin): # Utils ########################################################################## - def nextID( # pylint: disable=invalid-name - self, type: str, inc: bool = True - ) -> Any: + def nextID(self, type: str, inc: bool = True) -> Any: type = f"next{type.capitalize()}" id = self.conf.get(type, 1) if inc: @@ -849,7 +847,6 @@ class Collection(DeprecatedNamesMixin): ) def _pb_search_separator(self, operator: SearchJoiner) -> SearchNode.Group.Joiner.V: - # pylint: disable=no-member if operator == "AND": return SearchNode.Group.Joiner.AND else: @@ -867,7 +864,9 @@ class Collection(DeprecatedNamesMixin): return column return None - def browser_row_for_id(self, id_: int) -> tuple[ + def browser_row_for_id( + self, id_: int + ) -> tuple[ Generator[tuple[str, bool, BrowserRow.Cell.TextElideMode.V], None, None], BrowserRow.Color.V, str, @@ -1212,8 +1211,6 @@ class Collection(DeprecatedNamesMixin): # the count on things like edits, which we probably could do by checking # the previous state in moveToState. - # pylint: disable=invalid-name - def startTimebox(self) -> None: self._startTime = time.time() self._startReps = self.sched.reps diff --git a/pylib/anki/exporting.py b/pylib/anki/exporting.py index 43713d8b2..ef6f02c63 100644 --- a/pylib/anki/exporting.py +++ b/pylib/anki/exporting.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name from __future__ import annotations @@ -351,7 +350,7 @@ class AnkiPackageExporter(AnkiExporter): colfile = path.replace(".apkg", ".anki2") AnkiExporter.exportInto(self, colfile) # prevent older clients from accessing - # pylint: disable=unreachable + self._addDummyCollection(z) z.write(colfile, "collection.anki21") diff --git a/pylib/anki/find.py b/pylib/anki/find.py index bcae6e556..106bf2876 100644 --- a/pylib/anki/find.py +++ b/pylib/anki/find.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name from __future__ import annotations diff --git a/pylib/anki/hooks.py b/pylib/anki/hooks.py index fcc3758f4..13148c649 100644 --- a/pylib/anki/hooks.py +++ b/pylib/anki/hooks.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name """ Tools for extending Anki. diff --git a/pylib/anki/importing/__init__.py b/pylib/anki/importing/__init__.py index cfc2cac3f..d4fccc643 100644 --- a/pylib/anki/importing/__init__.py +++ b/pylib/anki/importing/__init__.py @@ -11,8 +11,6 @@ from anki.importing.apkg import AnkiPackageImporter from anki.importing.base import Importer from anki.importing.csvfile import TextImporter from anki.importing.mnemo import MnemosyneImporter -from anki.importing.pauker import PaukerImporter -from anki.importing.supermemo_xml import SupermemoXmlImporter # type: ignore from anki.lang import TR @@ -24,8 +22,6 @@ def importers(col: Collection) -> Sequence[tuple[str, type[Importer]]]: AnkiPackageImporter, ), (col.tr.importing_mnemosyne_20_deck_db(), MnemosyneImporter), - (col.tr.importing_supermemo_xml_export_xml(), SupermemoXmlImporter), - (col.tr.importing_pauker_18_lesson_paugz(), PaukerImporter), ] anki.hooks.importing_importers(importers) return importers diff --git a/pylib/anki/importing/anki2.py b/pylib/anki/importing/anki2.py index 098265c3f..dcfa15c8d 100644 --- a/pylib/anki/importing/anki2.py +++ b/pylib/anki/importing/anki2.py @@ -1,7 +1,7 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name + from __future__ import annotations import os diff --git a/pylib/anki/importing/apkg.py b/pylib/anki/importing/apkg.py index ea2325960..012686ffa 100644 --- a/pylib/anki/importing/apkg.py +++ b/pylib/anki/importing/apkg.py @@ -1,7 +1,7 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name + from __future__ import annotations import json diff --git a/pylib/anki/importing/base.py b/pylib/anki/importing/base.py index 2ddcaaebf..fc27dc909 100644 --- a/pylib/anki/importing/base.py +++ b/pylib/anki/importing/base.py @@ -1,7 +1,7 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name + from __future__ import annotations from typing import Any diff --git a/pylib/anki/importing/csvfile.py b/pylib/anki/importing/csvfile.py index 6a2ed347c..fde7ec8ac 100644 --- a/pylib/anki/importing/csvfile.py +++ b/pylib/anki/importing/csvfile.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name from __future__ import annotations @@ -144,7 +143,6 @@ class TextImporter(NoteImporter): self.close() zuper = super() if hasattr(zuper, "__del__"): - # pylint: disable=no-member zuper.__del__(self) # type: ignore def noteFromFields(self, fields: list[str]) -> ForeignNote: diff --git a/pylib/anki/importing/mnemo.py b/pylib/anki/importing/mnemo.py index 5b7fda65f..a2f68ad4c 100644 --- a/pylib/anki/importing/mnemo.py +++ b/pylib/anki/importing/mnemo.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name import re import time @@ -35,7 +34,6 @@ f._id=d._fact_id""" ): if id != curid: if note: - # pylint: disable=unsubscriptable-object notes[note["_id"]] = note note = {"_id": _id} curid = id @@ -185,7 +183,6 @@ acq_reps+ret_reps, lapses, card_type_id from cards""" state = dict(n=1) def repl(match): - # pylint: disable=cell-var-from-loop # replace [...] with cloze refs res = "{{c%d::%s}}" % (state["n"], match.group(1)) state["n"] += 1 diff --git a/pylib/anki/importing/noteimp.py b/pylib/anki/importing/noteimp.py index f827a525a..cb35a373a 100644 --- a/pylib/anki/importing/noteimp.py +++ b/pylib/anki/importing/noteimp.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name from __future__ import annotations diff --git a/pylib/anki/importing/pauker.py b/pylib/anki/importing/pauker.py deleted file mode 100644 index ea5c45082..000000000 --- a/pylib/anki/importing/pauker.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright: Andreas Klauer -# License: BSD-3 - -# pylint: disable=invalid-name - -import gzip -import html -import math -import random -import time -import xml.etree.ElementTree as ET - -from anki.importing.noteimp import ForeignCard, ForeignNote, NoteImporter -from anki.stdmodels import _legacy_add_forward_reverse - -ONE_DAY = 60 * 60 * 24 - - -class PaukerImporter(NoteImporter): - """Import Pauker 1.8 Lesson (*.pau.gz)""" - - needMapper = False - allowHTML = True - - def run(self): - model = _legacy_add_forward_reverse(self.col) - model["name"] = "Pauker" - self.col.models.save(model, updateReqs=False) - self.col.models.set_current(model) - self.model = model - self.initMapping() - NoteImporter.run(self) - - def fields(self): - """Pauker is Front/Back""" - return 2 - - def foreignNotes(self): - """Build and return a list of notes.""" - notes = [] - - try: - f = gzip.open(self.file) - tree = ET.parse(f) # type: ignore - lesson = tree.getroot() - assert lesson.tag == "Lesson" - finally: - f.close() - - index = -4 - - for batch in lesson.findall("./Batch"): - index += 1 - - for card in batch.findall("./Card"): - # Create a note for this card. - front = card.findtext("./FrontSide/Text") - back = card.findtext("./ReverseSide/Text") - note = ForeignNote() - assert front and back - note.fields = [ - html.escape(x.strip()) - .replace("\n", "
") - .replace(" ", "  ") - for x in [front, back] - ] - notes.append(note) - - # Determine due date for cards. - frontdue = card.find("./FrontSide[@LearnedTimestamp]") - backdue = card.find("./ReverseSide[@Batch][@LearnedTimestamp]") - - if frontdue is not None: - note.cards[0] = self._learnedCard( - index, int(frontdue.attrib["LearnedTimestamp"]) - ) - - if backdue is not None: - note.cards[1] = self._learnedCard( - int(backdue.attrib["Batch"]), - int(backdue.attrib["LearnedTimestamp"]), - ) - - return notes - - def _learnedCard(self, batch, timestamp): - ivl = math.exp(batch) - now = time.time() - due = ivl - (now - timestamp / 1000.0) / ONE_DAY - fc = ForeignCard() - fc.due = self.col.sched.today + int(due + 0.5) - fc.ivl = random.randint(int(ivl * 0.90), int(ivl + 0.5)) - fc.factor = random.randint(1500, 2500) - return fc diff --git a/pylib/anki/importing/supermemo_xml.py b/pylib/anki/importing/supermemo_xml.py deleted file mode 100644 index 202592c2e..000000000 --- a/pylib/anki/importing/supermemo_xml.py +++ /dev/null @@ -1,484 +0,0 @@ -# Copyright: petr.michalec@gmail.com -# License: GNU GPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pytype: disable=attribute-error -# type: ignore -# pylint: disable=C -from __future__ import annotations - -import re -import sys -import time -import unicodedata -from string import capwords -from xml.dom import minidom -from xml.dom.minidom import Element, Text - -from anki.collection import Collection -from anki.importing.noteimp import ForeignCard, ForeignNote, NoteImporter -from anki.stdmodels import _legacy_add_basic_model - - -class SmartDict(dict): - """ - See http://www.peterbe.com/plog/SmartDict - Copyright 2005, Peter Bengtsson, peter@fry-it.com - 0BSD - - A smart dict can be instantiated either from a pythonic dict - or an instance object (eg. SQL recordsets) but it ensures that you can - do all the convenient lookups such as x.first_name, x['first_name'] or - x.get('first_name'). - """ - - def __init__(self, *a, **kw) -> None: - if a: - if isinstance(type(a[0]), dict): - kw.update(a[0]) - elif isinstance(type(a[0]), object): - kw.update(a[0].__dict__) - elif hasattr(a[0], "__class__") and a[0].__class__.__name__ == "SmartDict": - kw.update(a[0].__dict__) - - dict.__init__(self, **kw) - self.__dict__ = self - - -class SuperMemoElement(SmartDict): - "SmartDict wrapper to store SM Element data" - - def __init__(self, *a, **kw) -> None: - SmartDict.__init__(self, *a, **kw) - # default content - self.__dict__["lTitle"] = None - self.__dict__["Title"] = None - self.__dict__["Question"] = None - self.__dict__["Answer"] = None - self.__dict__["Count"] = None - self.__dict__["Type"] = None - self.__dict__["ID"] = None - self.__dict__["Interval"] = None - self.__dict__["Lapses"] = None - self.__dict__["Repetitions"] = None - self.__dict__["LastRepetiton"] = None - self.__dict__["AFactor"] = None - self.__dict__["UFactor"] = None - - -# This is an AnkiImporter -class SupermemoXmlImporter(NoteImporter): - needMapper = False - allowHTML = True - - """ - Supermemo XML export's to Anki parser. - Goes through a SM collection and fetch all elements. - - My SM collection was a big mess where topics and items were mixed. - I was unable to parse my content in a regular way like for loop on - minidom.getElementsByTagName() etc. My collection had also an - limitation, topics were splited into branches with max 100 items - on each. Learning themes were in deep structure. I wanted to have - full title on each element to be stored in tags. - - Code should be upgrade to support importing of SM2006 exports. - """ - - def __init__(self, col: Collection, file: str) -> None: - """Initialize internal variables. - Pameters to be exposed to GUI are stored in self.META""" - NoteImporter.__init__(self, col, file) - m = _legacy_add_basic_model(self.col) - m["name"] = "Supermemo" - self.col.models.save(m) - self.initMapping() - - self.lines = None - self.numFields = int(2) - - # SmXmlParse VARIABLES - self.xmldoc = None - self.pieces = [] - self.cntBuf = [] # to store last parsed data - self.cntElm = [] # to store SM Elements data - self.cntCol = [] # to store SM Colections data - - # store some meta info related to parse algorithm - # SmartDict works like dict / class wrapper - self.cntMeta = SmartDict() - self.cntMeta.popTitles = False - self.cntMeta.title = [] - - # META stores controls of import script, should be - # exposed to import dialog. These are default values. - self.META = SmartDict() - self.META.resetLearningData = False # implemented - self.META.onlyMemorizedItems = False # implemented - self.META.loggerLevel = 2 # implemented 0no,1info,2error,3debug - self.META.tagAllTopics = True - self.META.pathsToBeTagged = [ - "English for beginners", - "Advanced English 97", - "Phrasal Verbs", - ] # path patterns to be tagged - in gui entered like 'Advanced English 97|My Vocablary' - self.META.tagMemorizedItems = True # implemented - self.META.logToStdOutput = False # implemented - - self.notes = [] - - ## TOOLS - - def _fudgeText(self, text: str) -> str: - "Replace sm syntax to Anki syntax" - text = text.replace("\n\r", "
") - text = text.replace("\n", "
") - return text - - def _unicode2ascii(self, str: str) -> str: - "Remove diacritic punctuation from strings (titles)" - return "".join( - [ - c - for c in unicodedata.normalize("NFKD", str) - if not unicodedata.combining(c) - ] - ) - - def _decode_htmlescapes(self, html: str) -> str: - """Unescape HTML code.""" - # In case of bad formatted html you can import MinimalSoup etc.. see BeautifulSoup source code - from bs4 import BeautifulSoup - - # my sm2004 also ecaped & char in escaped sequences. - html = re.sub("&", "&", html) - - # https://anki.tenderapp.com/discussions/ankidesktop/39543-anki-is-replacing-the-character-by-when-i-exit-the-html-edit-mode-ctrlshiftx - if html.find(">") < 0: - return html - - # unescaped solitary chars < or > that were ok for minidom confuse btfl soup - # html = re.sub(u'>',u'>',html) - # html = re.sub(u'<',u'<',html) - - return str(BeautifulSoup(html, "html.parser")) - - def _afactor2efactor(self, af: float) -> float: - # Adapted from - - # Ranges for A-factors and E-factors - af_min = 1.2 - af_max = 6.9 - ef_min = 1.3 - ef_max = 3.3 - - # Sanity checks for the A-factor - if af < af_min: - af = af_min - elif af > af_max: - af = af_max - - # Scale af to the range 0..1 - af_scaled = (af - af_min) / (af_max - af_min) - # Rescale to the interval ef_min..ef_max - ef = ef_min + af_scaled * (ef_max - ef_min) - - return ef - - ## DEFAULT IMPORTER METHODS - - def foreignNotes(self) -> list[ForeignNote]: - # Load file and parse it by minidom - self.loadSource(self.file) - - # Migrating content / time consuming part - # addItemToCards is called for each sm element - self.logger("Parsing started.") - self.parse() - self.logger("Parsing done.") - - # Return imported cards - self.total = len(self.notes) - self.log.append("%d cards imported." % self.total) - return self.notes - - def fields(self) -> int: - return 2 - - ## PARSER METHODS - - def addItemToCards(self, item: SuperMemoElement) -> None: - "This method actually do conversion" - - # new anki card - note = ForeignNote() - - # clean Q and A - note.fields.append(self._fudgeText(self._decode_htmlescapes(item.Question))) - note.fields.append(self._fudgeText(self._decode_htmlescapes(item.Answer))) - note.tags = [] - - # pre-process scheduling data - # convert learning data - if ( - not self.META.resetLearningData - and int(item.Interval) >= 1 - and getattr(item, "LastRepetition", None) - ): - # migration of LearningData algorithm - tLastrep = time.mktime(time.strptime(item.LastRepetition, "%d.%m.%Y")) - tToday = time.time() - card = ForeignCard() - card.ivl = int(item.Interval) - card.lapses = int(item.Lapses) - card.reps = int(item.Repetitions) + int(item.Lapses) - nextDue = tLastrep + (float(item.Interval) * 86400.0) - remDays = int((nextDue - time.time()) / 86400) - card.due = self.col.sched.today + remDays - card.factor = int( - self._afactor2efactor(float(item.AFactor.replace(",", "."))) * 1000 - ) - note.cards[0] = card - - # categories & tags - # it's worth to have every theme (tree structure of sm collection) stored in tags, but sometimes not - # you can deceide if you are going to tag all toppics or just that containing some pattern - tTaggTitle = False - for pattern in self.META.pathsToBeTagged: - if ( - item.lTitle is not None - and pattern.lower() in " ".join(item.lTitle).lower() - ): - tTaggTitle = True - break - if tTaggTitle or self.META.tagAllTopics: - # normalize - remove diacritic punctuation from unicode chars to ascii - item.lTitle = [self._unicode2ascii(topic) for topic in item.lTitle] - - # Transform xyz / aaa / bbb / ccc on Title path to Tag xyzAaaBbbCcc - # clean things like [999] or [111-2222] from title path, example: xyz / [1000-1200] zyx / xyz - # clean whitespaces - # set Capital letters for first char of the word - tmp = list( - {re.sub(r"(\[[0-9]+\])", " ", i).replace("_", " ") for i in item.lTitle} - ) - tmp = list({re.sub(r"(\W)", " ", i) for i in tmp}) - tmp = list({re.sub("^[0-9 ]+$", "", i) for i in tmp}) - tmp = list({capwords(i).replace(" ", "") for i in tmp}) - tags = [j[0].lower() + j[1:] for j in tmp if j.strip() != ""] - - note.tags += tags - - if self.META.tagMemorizedItems and int(item.Interval) > 0: - note.tags.append("Memorized") - - self.logger("Element tags\t- " + repr(note.tags), level=3) - - self.notes.append(note) - - def logger(self, text: str, level: int = 1) -> None: - "Wrapper for Anki logger" - - dLevels = {0: "", 1: "Info", 2: "Verbose", 3: "Debug"} - if level <= self.META.loggerLevel: - # self.deck.updateProgress(_(text)) - - if self.META.logToStdOutput: - print( - self.__class__.__name__ - + " - " - + dLevels[level].ljust(9) - + " -\t" - + text - ) - - # OPEN AND LOAD - def openAnything(self, source): - """Open any source / actually only opening of files is used - @return an open handle which must be closed after use, i.e., handle.close()""" - - if source == "-": - return sys.stdin - - # try to open with urllib (if source is http, ftp, or file URL) - import urllib.error - import urllib.parse - import urllib.request - - try: - return urllib.request.urlopen(source) - except OSError: - pass - - # try to open with native open function (if source is pathname) - try: - return open(source, encoding="utf8") - except OSError: - pass - - # treat source as string - import io - - return io.StringIO(str(source)) - - def loadSource(self, source: str) -> None: - """Load source file and parse with xml.dom.minidom""" - self.source = source - self.logger("Load started...") - sock = open(self.source, encoding="utf8") - self.xmldoc = minidom.parse(sock).documentElement - sock.close() - self.logger("Load done.") - - # PARSE - def parse(self, node: Text | Element | None = None) -> None: - "Parse method - parses document elements" - - if node is None and self.xmldoc is not None: - node = self.xmldoc - - _method = "parse_%s" % node.__class__.__name__ - if hasattr(self, _method): - parseMethod = getattr(self, _method) - parseMethod(node) - else: - self.logger("No handler for method %s" % _method, level=3) - - def parse_Document(self, node): - "Parse XML document" - - self.parse(node.documentElement) - - def parse_Element(self, node: Element) -> None: - "Parse XML element" - - _method = "do_%s" % node.tagName - if hasattr(self, _method): - handlerMethod = getattr(self, _method) - handlerMethod(node) - else: - self.logger("No handler for method %s" % _method, level=3) - # print traceback.print_exc() - - def parse_Text(self, node: Text) -> None: - "Parse text inside elements. Text is stored into local buffer." - - text = node.data - self.cntBuf.append(text) - - # def parse_Comment(self, node): - # """ - # Source can contain XML comments, but we ignore them - # """ - # pass - - # DO - def do_SuperMemoCollection(self, node: Element) -> None: - "Process SM Collection" - - for child in node.childNodes: - self.parse(child) - - def do_SuperMemoElement(self, node: Element) -> None: - "Process SM Element (Type - Title,Topics)" - - self.logger("=" * 45, level=3) - - self.cntElm.append(SuperMemoElement()) - self.cntElm[-1]["lTitle"] = self.cntMeta["title"] - - # parse all child elements - for child in node.childNodes: - self.parse(child) - - # strip all saved strings, just for sure - for key in list(self.cntElm[-1].keys()): - if hasattr(self.cntElm[-1][key], "strip"): - self.cntElm[-1][key] = self.cntElm[-1][key].strip() - - # pop current element - smel = self.cntElm.pop() - - # Process cntElm if is valid Item (and not an Topic etc..) - # if smel.Lapses != None and smel.Interval != None and smel.Question != None and smel.Answer != None: - if smel.Title is None and smel.Question is not None and smel.Answer is not None: - if smel.Answer.strip() != "" and smel.Question.strip() != "": - # migrate only memorized otherway skip/continue - if self.META.onlyMemorizedItems and not (int(smel.Interval) > 0): - self.logger("Element skipped \t- not memorized ...", level=3) - else: - # import sm element data to Anki - self.addItemToCards(smel) - self.logger("Import element \t- " + smel["Question"], level=3) - - # print element - self.logger("-" * 45, level=3) - for key in list(smel.keys()): - self.logger( - "\t{} {}".format((key + ":").ljust(15), smel[key]), level=3 - ) - else: - self.logger("Element skipped \t- no valid Q and A ...", level=3) - - else: - # now we know that item was topic - # parsing of whole node is now finished - - # test if it's really topic - if smel.Title is not None: - # remove topic from title list - t = self.cntMeta["title"].pop() - self.logger("End of topic \t- %s" % (t), level=2) - - def do_Content(self, node: Element) -> None: - "Process SM element Content" - - for child in node.childNodes: - if hasattr(child, "tagName") and child.firstChild is not None: - self.cntElm[-1][child.tagName] = child.firstChild.data - - def do_LearningData(self, node: Element) -> None: - "Process SM element LearningData" - - for child in node.childNodes: - if hasattr(child, "tagName") and child.firstChild is not None: - self.cntElm[-1][child.tagName] = child.firstChild.data - - # It's being processed in do_Content now - # def do_Question(self, node): - # for child in node.childNodes: self.parse(child) - # self.cntElm[-1][node.tagName]=self.cntBuf.pop() - - # It's being processed in do_Content now - # def do_Answer(self, node): - # for child in node.childNodes: self.parse(child) - # self.cntElm[-1][node.tagName]=self.cntBuf.pop() - - def do_Title(self, node: Element) -> None: - "Process SM element Title" - - t = self._decode_htmlescapes(node.firstChild.data) - self.cntElm[-1][node.tagName] = t - self.cntMeta["title"].append(t) - self.cntElm[-1]["lTitle"] = self.cntMeta["title"] - self.logger("Start of topic \t- " + " / ".join(self.cntMeta["title"]), level=2) - - def do_Type(self, node: Element) -> None: - "Process SM element Type" - - if len(self.cntBuf) >= 1: - self.cntElm[-1][node.tagName] = self.cntBuf.pop() - - -# if __name__ == '__main__': - -# for testing you can start it standalone - -# file = u'/home/epcim/hg2g/dev/python/sm2anki/ADVENG2EXP.xxe.esc.zaloha_FINAL.xml' -# file = u'/home/epcim/hg2g/dev/python/anki/libanki/tests/importing/supermemo/original_ENGLISHFORBEGGINERS_noOEM.xml' -# file = u'/home/epcim/hg2g/dev/python/anki/libanki/tests/importing/supermemo/original_ENGLISHFORBEGGINERS_oem_1250.xml' -# file = str(sys.argv[1]) -# impo = SupermemoXmlImporter(Deck(),file) -# impo.foreignCards() - -# sys.exit(1) - -# vim: ts=4 sts=2 ft=python diff --git a/pylib/anki/lang.py b/pylib/anki/lang.py index a0a6bf757..3cbb60319 100644 --- a/pylib/anki/lang.py +++ b/pylib/anki/lang.py @@ -157,13 +157,13 @@ def lang_to_disk_lang(lang: str) -> str: # the currently set interface language -current_lang = "en" # pylint: disable=invalid-name +current_lang = "en" # the current Fluent translation instance. Code in pylib/ should # not reference this, and should use col.tr instead. The global # instance exists for legacy reasons, and as a convenience for the # Qt code. -current_i18n: anki._backend.RustBackend | None = None # pylint: disable=invalid-name +current_i18n: anki._backend.RustBackend | None = None tr_legacyglobal = anki._backend.Translations(None) @@ -178,7 +178,7 @@ def ngettext(single: str, plural: str, num: int) -> str: def set_lang(lang: str) -> None: - global current_lang, current_i18n # pylint: disable=invalid-name + global current_lang, current_i18n current_lang = lang current_i18n = anki._backend.RustBackend(langs=[lang]) tr_legacyglobal.backend = weakref.ref(current_i18n) @@ -198,9 +198,7 @@ def get_def_lang(user_lang: str | None = None) -> tuple[int, str]: # getdefaultlocale() is deprecated since Python 3.11, but we need to keep using it as getlocale() behaves differently: https://bugs.python.org/issue38805 with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) - (sys_lang, enc) = ( - locale.getdefaultlocale() # pylint: disable=deprecated-method - ) + (sys_lang, enc) = locale.getdefaultlocale() except AttributeError: # this will return a different format on Windows (e.g. Italian_Italy), resulting in us falling back to en_US # further below diff --git a/pylib/anki/models.py b/pylib/anki/models.py index 230084359..a2267663a 100644 --- a/pylib/anki/models.py +++ b/pylib/anki/models.py @@ -10,7 +10,7 @@ import time from collections.abc import Sequence from typing import Any, NewType, Union -import anki # pylint: disable=unused-import +import anki import anki.collection import anki.notes from anki import notetypes_pb2 @@ -419,7 +419,7 @@ and notes.mid = ? and cards.ord = ?""", # legacy API - used by unit tests and add-ons - def change( # pylint: disable=invalid-name + def change( self, notetype: NotetypeDict, nids: list[anki.notes.NoteId], @@ -478,8 +478,6 @@ and notes.mid = ? and cards.ord = ?""", # Legacy ########################################################################## - # pylint: disable=invalid-name - @deprecated(info="use note.cloze_numbers_in_fields()") def _availClozeOrds( self, notetype: NotetypeDict, flds: str, allow_empty: bool = True diff --git a/pylib/anki/notes.py b/pylib/anki/notes.py index 5de95bfb6..3d09d5632 100644 --- a/pylib/anki/notes.py +++ b/pylib/anki/notes.py @@ -7,7 +7,7 @@ import copy from collections.abc import Sequence from typing import NewType -import anki # pylint: disable=unused-import +import anki import anki.cards import anki.collection import anki.decks diff --git a/pylib/anki/rsbackend.py b/pylib/anki/rsbackend.py index 297c1b6d2..093712fca 100644 --- a/pylib/anki/rsbackend.py +++ b/pylib/anki/rsbackend.py @@ -4,10 +4,8 @@ # The backend code has moved into _backend; this file exists only to avoid breaking # some add-ons. They should be updated to point to the correct location in the # future. -# -# pylint: disable=unused-import -# pylint: enable=invalid-name +# ruff: noqa: F401 from anki.decks import DeckTreeNode from anki.errors import InvalidInput, NotFoundError from anki.lang import TR diff --git a/pylib/anki/scheduler/dummy.py b/pylib/anki/scheduler/dummy.py index 5732ad346..08896b1e5 100644 --- a/pylib/anki/scheduler/dummy.py +++ b/pylib/anki/scheduler/dummy.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name from __future__ import annotations diff --git a/pylib/anki/scheduler/legacy.py b/pylib/anki/scheduler/legacy.py index 58bed7933..35092588d 100644 --- a/pylib/anki/scheduler/legacy.py +++ b/pylib/anki/scheduler/legacy.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name from __future__ import annotations diff --git a/pylib/anki/scheduler/v3.py b/pylib/anki/scheduler/v3.py index 2a18ee021..3c1123d0b 100644 --- a/pylib/anki/scheduler/v3.py +++ b/pylib/anki/scheduler/v3.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name """ The V3/2021 scheduler. @@ -184,7 +183,7 @@ class Scheduler(SchedulerBaseWithLegacy): return self._interval_for_filtered_state(state.filtered) else: assert_exhaustive(kind) - return 0 # pylint: disable=unreachable + return 0 def _interval_for_normal_state( self, normal: scheduler_pb2.SchedulingState.Normal @@ -200,7 +199,7 @@ class Scheduler(SchedulerBaseWithLegacy): return normal.relearning.learning.scheduled_secs else: assert_exhaustive(kind) - return 0 # pylint: disable=unreachable + return 0 def _interval_for_filtered_state( self, filtered: scheduler_pb2.SchedulingState.Filtered @@ -212,7 +211,7 @@ class Scheduler(SchedulerBaseWithLegacy): return self._interval_for_normal_state(filtered.rescheduling.original_state) else: assert_exhaustive(kind) - return 0 # pylint: disable=unreachable + return 0 def nextIvl(self, card: Card, ease: int) -> Any: "Don't use this - it is only required by tests, and will be moved in the future." diff --git a/pylib/anki/stats.py b/pylib/anki/stats.py index e6ca1cb97..e8045decb 100644 --- a/pylib/anki/stats.py +++ b/pylib/anki/stats.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=C from __future__ import annotations @@ -27,7 +26,7 @@ def _legacy_card_stats( col: anki.collection.Collection, card_id: anki.cards.CardId, include_revlog: bool ) -> str: "A quick hack to preserve compatibility with the old HTML string API." - random_id = f"cardinfo-{base62(random.randint(0, 2 ** 64 - 1))}" + random_id = f"cardinfo-{base62(random.randint(0, 2**64 - 1))}" varName = random_id.replace("-", "") return f"""
@@ -324,7 +323,6 @@ group by day order by day""" yaxes=[dict(min=0), dict(position="right", min=0)], ) if days is not None: - # pylint: disable=invalid-unary-operand-type conf["xaxis"]["min"] = -days + 0.5 def plot(id: str, data: Any, ylabel: str, ylabel2: str) -> str: @@ -359,7 +357,6 @@ group by day order by day""" yaxes=[dict(min=0), dict(position="right", min=0)], ) if days is not None: - # pylint: disable=invalid-unary-operand-type conf["xaxis"]["min"] = -days + 0.5 def plot(id: str, data: Any, ylabel: str, ylabel2: str) -> str: diff --git a/pylib/anki/statsbg.py b/pylib/anki/statsbg.py index 552dfb5a9..b9ebb5aa8 100644 --- a/pylib/anki/statsbg.py +++ b/pylib/anki/statsbg.py @@ -1,5 +1,3 @@ -# pylint: disable=invalid-name - # from subtlepatterns.com; CC BY 4.0. # by Daniel Beaton # https://www.toptal.com/designers/subtlepatterns/fancy-deboss/ diff --git a/pylib/anki/stdmodels.py b/pylib/anki/stdmodels.py index 721b96bc6..4edb83a7a 100644 --- a/pylib/anki/stdmodels.py +++ b/pylib/anki/stdmodels.py @@ -12,7 +12,6 @@ from anki import notetypes_pb2 from anki._legacy import DeprecatedNamesMixinForModule from anki.utils import from_json_bytes -# pylint: disable=no-member StockNotetypeKind = notetypes_pb2.StockNotetype.Kind # add-on authors can add ("note type name", function) diff --git a/pylib/anki/tags.py b/pylib/anki/tags.py index a54aa7901..0c0338b82 100644 --- a/pylib/anki/tags.py +++ b/pylib/anki/tags.py @@ -16,7 +16,7 @@ import re from collections.abc import Collection, Sequence from typing import Match -import anki # pylint: disable=unused-import +import anki import anki.collection from anki import tags_pb2 from anki._legacy import DeprecatedNamesMixin, deprecated diff --git a/pylib/anki/utils.py b/pylib/anki/utils.py index c61fd0588..60ae75507 100644 --- a/pylib/anki/utils.py +++ b/pylib/anki/utils.py @@ -24,7 +24,6 @@ from anki.dbproxy import DBProxy _tmpdir: str | None try: - # pylint: disable=c-extension-no-member import orjson to_json_bytes: Callable[[Any], bytes] = orjson.dumps @@ -156,12 +155,12 @@ def field_checksum(data: str) -> int: # Temp files ############################################################################## -_tmpdir = None # pylint: disable=invalid-name +_tmpdir = None def tmpdir() -> str: "A reusable temp folder which we clean out on each program invocation." - global _tmpdir # pylint: disable=invalid-name + global _tmpdir if not _tmpdir: def cleanup() -> None: @@ -216,7 +215,6 @@ def call(argv: list[str], wait: bool = True, **kwargs: Any) -> int: try: info.dwFlags |= subprocess.STARTF_USESHOWWINDOW # type: ignore except Exception: - # pylint: disable=no-member info.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW # type: ignore else: info = None @@ -282,7 +280,7 @@ def plat_desc() -> str: elif is_win: theos = f"win:{platform.win32_ver()[0]}" elif system == "Linux": - import distro # pytype: disable=import-error # pylint: disable=import-error + import distro # pytype: disable=import-error dist_id = distro.id() dist_version = distro.version() diff --git a/pylib/hatch_build.py b/pylib/hatch_build.py index c3539da56..9e8ee9799 100644 --- a/pylib/hatch_build.py +++ b/pylib/hatch_build.py @@ -35,8 +35,16 @@ class CustomBuildHook(BuildHookInterface): assert generated_root.exists(), "you should build with --wheel" for path in generated_root.rglob("*"): - if path.is_file(): + if path.is_file() and not self._should_exclude(path): relative_path = path.relative_to(generated_root) # Place files under anki/ in the distribution dist_path = "anki" / relative_path force_include[str(path)] = str(dist_path) + + def _should_exclude(self, path: Path) -> bool: + """Check if a file should be excluded from the wheel.""" + # Exclude __pycache__ + path_str = str(path) + if "/__pycache__/" in path_str: + return True + return False diff --git a/pylib/pyproject.toml b/pylib/pyproject.toml index 555f30c86..23e10077f 100644 --- a/pylib/pyproject.toml +++ b/pylib/pyproject.toml @@ -4,19 +4,15 @@ dynamic = ["version"] requires-python = ">=3.9" license = "AGPL-3.0-or-later" dependencies = [ - "beautifulsoup4", "decorator", "markdown", "orjson", "protobuf>=4.21", "requests[socks]", + # remove after we update to min python 3.11+ "typing_extensions", - "types-protobuf", - "types-requests", - "types-orjson", # platform-specific dependencies "distro; sys_platform != 'darwin' and sys_platform != 'win32'", - "psutil; sys_platform == 'win32'", ] [build-system] diff --git a/pylib/rsbridge/build.rs b/pylib/rsbridge/build.rs index 2940563cb..4671bc296 100644 --- a/pylib/rsbridge/build.rs +++ b/pylib/rsbridge/build.rs @@ -28,6 +28,6 @@ fn main() { .to_string(); let libs_path = stdlib_path + "s"; - println!("cargo:rustc-link-search={}", libs_path); + println!("cargo:rustc-link-search={libs_path}"); } } diff --git a/pylib/tests/test_find.py b/pylib/tests/test_find.py index d9c2c1f87..236096572 100644 --- a/pylib/tests/test_find.py +++ b/pylib/tests/test_find.py @@ -169,8 +169,7 @@ def test_find_cards(): # properties id = col.db.scalar("select id from cards limit 1") col.db.execute( - "update cards set queue=2, ivl=10, reps=20, due=30, factor=2200 " - "where id = ?", + "update cards set queue=2, ivl=10, reps=20, due=30, factor=2200 where id = ?", id, ) assert len(col.find_cards("prop:ivl>5")) == 1 diff --git a/pylib/tests/test_importing.py b/pylib/tests/test_importing.py index 191de51f4..b7b63de26 100644 --- a/pylib/tests/test_importing.py +++ b/pylib/tests/test_importing.py @@ -13,7 +13,6 @@ from anki.importing import ( Anki2Importer, AnkiPackageImporter, MnemosyneImporter, - SupermemoXmlImporter, TextImporter, ) from tests.shared import getEmptyCol, getUpgradeDeckPath @@ -306,22 +305,6 @@ def test_csv_tag_only_if_modified(): col.close() -@pytest.mark.filterwarnings("ignore:Using or importing the ABCs") -def test_supermemo_xml_01_unicode(): - col = getEmptyCol() - file = str(os.path.join(testDir, "support", "supermemo1.xml")) - i = SupermemoXmlImporter(col, file) - # i.META.logToStdOutput = True - i.run() - assert i.total == 1 - cid = col.db.scalar("select id from cards") - c = col.get_card(cid) - # Applies A Factor-to-E Factor conversion - assert c.factor == 2879 - assert c.reps == 7 - col.close() - - def test_mnemo(): col = getEmptyCol() file = str(os.path.join(testDir, "support", "mnemo.db")) diff --git a/pylib/tests/test_schedv3.py b/pylib/tests/test_schedv3.py index 0deff7bf9..a71fa7140 100644 --- a/pylib/tests/test_schedv3.py +++ b/pylib/tests/test_schedv3.py @@ -551,12 +551,10 @@ def test_bury(): col.addNote(note) c2 = note.cards()[0] # burying - col.sched.bury_cards([c.id], manual=True) # pylint: disable=unexpected-keyword-arg + col.sched.bury_cards([c.id], manual=True) c.load() assert c.queue == QUEUE_TYPE_MANUALLY_BURIED - col.sched.bury_cards( - [c2.id], manual=False - ) # pylint: disable=unexpected-keyword-arg + col.sched.bury_cards([c2.id], manual=False) c2.load() assert c2.queue == QUEUE_TYPE_SIBLING_BURIED diff --git a/pylib/tools/genbuildinfo.py b/pylib/tools/genbuildinfo.py index b997ca5b3..add188d41 100644 --- a/pylib/tools/genbuildinfo.py +++ b/pylib/tools/genbuildinfo.py @@ -15,6 +15,5 @@ with open(buildhash_file, "r", encoding="utf8") as f: with open(outpath, "w", encoding="utf8") as f: # if we switch to uppercase we'll need to add legacy aliases - f.write("# pylint: disable=invalid-name\n") f.write(f"version = '{version}'\n") f.write(f"buildhash = '{buildhash}'\n") diff --git a/pylib/tools/genhooks.py b/pylib/tools/genhooks.py index e0e4924be..3644e3e95 100644 --- a/pylib/tools/genhooks.py +++ b/pylib/tools/genhooks.py @@ -133,7 +133,7 @@ prefix = """\ # This file is automatically generated; edit tools/genhooks.py instead. # Please import from anki.hooks instead of this file. -# pylint: disable=unused-import +# ruff: noqa: F401 from __future__ import annotations diff --git a/pylib/tools/hookslib.py b/pylib/tools/hookslib.py index 8920cdcfc..99f08fa1e 100644 --- a/pylib/tools/hookslib.py +++ b/pylib/tools/hookslib.py @@ -7,7 +7,6 @@ Code for generating hooks. from __future__ import annotations -import os import subprocess import sys from dataclasses import dataclass @@ -204,9 +203,6 @@ def write_file(path: str, hooks: list[Hook], prefix: str, suffix: str): code += f"\n{suffix}" - # work around issue with latest black - if sys.platform == "win32" and "HOME" in os.environ: - os.environ["USERPROFILE"] = os.environ["HOME"] with open(path, "wb") as file: file.write(code.encode("utf8")) - subprocess.run([sys.executable, "-m", "black", "-q", path], check=True) + subprocess.run([sys.executable, "-m", "ruff", "format", "-q", path], check=True) diff --git a/pyproject.toml b/pyproject.toml index f5443e229..7de32ec73 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,16 +7,23 @@ classifiers = ["Private :: Do Not Upload"] [dependency-groups] dev = [ - "black", - "isort", "mypy", "mypy-protobuf", - "pylint", + "ruff", "pytest", "PyChromeDevTools", - "colorama", # for isort --color "wheel", "hatchling", # for type checking hatch_build.py files + "mock", + "types-protobuf", + "types-requests", + "types-orjson", + "types-decorator", + "types-flask", + "types-flask-cors", + "types-markdown", + "types-waitress", + "types-pywin32", ] [project.optional-dependencies] diff --git a/python/sphinx/build.py b/python/sphinx/build.py index 7d979c510..61091e6e1 100644 --- a/python/sphinx/build.py +++ b/python/sphinx/build.py @@ -2,6 +2,7 @@ # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import os import subprocess + os.environ["REPO_ROOT"] = os.path.abspath(".") subprocess.run(["out/pyenv/bin/sphinx-apidoc", "-o", "out/python/sphinx", "pylib", "qt"], check=True) subprocess.run(["out/pyenv/bin/sphinx-build", "out/python/sphinx", "out/python/sphinx/html"], check=True) diff --git a/qt/.isort.cfg b/qt/.isort.cfg deleted file mode 100644 index aa01f87c7..000000000 --- a/qt/.isort.cfg +++ /dev/null @@ -1,5 +0,0 @@ -[settings] -py_version=39 -profile=black -known_first_party=anki,aqt -extend_skip=aqt/forms,hooks_gen.py diff --git a/qt/aqt/__init__.py b/qt/aqt/__init__.py index 740dcbc9f..53bdc3c92 100644 --- a/qt/aqt/__init__.py +++ b/qt/aqt/__init__.py @@ -3,6 +3,7 @@ from __future__ import annotations +# ruff: noqa: F401 import atexit import logging import os @@ -28,7 +29,7 @@ if sys.version_info[0] < 3 or sys.version_info[1] < 9: # ensure unicode filenames are supported try: "テスト".encode(sys.getfilesystemencoding()) -except UnicodeEncodeError as exc: +except UnicodeEncodeError: print("Anki requires a UTF-8 locale.") print("Please Google 'how to change locale on [your Linux distro]'") sys.exit(1) @@ -41,6 +42,11 @@ if "--syncserver" in sys.argv: # does not return run_sync_server() +if sys.platform == "win32": + from win32com.shell import shell + + shell.SetCurrentProcessExplicitAppUserModelID("Ankitects.Anki") + import argparse import builtins import cProfile @@ -285,7 +291,6 @@ class NativeEventFilter(QAbstractNativeEventFilter): def nativeEventFilter( self, eventType: Any, message: Any ) -> tuple[bool, Any | None]: - if eventType == "windows_generic_MSG": import ctypes.wintypes @@ -558,7 +563,7 @@ def run() -> None: print(f"Starting Anki {_version}...") try: _run() - except Exception as e: + except Exception: traceback.print_exc() QMessageBox.critical( None, diff --git a/qt/aqt/_macos_helper.py b/qt/aqt/_macos_helper.py index 634d94756..27b368e80 100644 --- a/qt/aqt/_macos_helper.py +++ b/qt/aqt/_macos_helper.py @@ -6,8 +6,6 @@ from __future__ import annotations import sys if sys.platform == "darwin": - from anki_mac_helper import ( # pylint:disable=unused-import,import-error - macos_helper, - ) + from anki_mac_helper import macos_helper else: macos_helper = None diff --git a/qt/aqt/about.py b/qt/aqt/about.py index fb90a9355..228d3cfeb 100644 --- a/qt/aqt/about.py +++ b/qt/aqt/about.py @@ -66,7 +66,8 @@ def show(mw: aqt.AnkiQt) -> QDialog: # WebView contents ###################################################################### abouttext = "
" - abouttext += f"

{tr.about_anki_is_a_friendly_intelligent_spaced()}" + lede = tr.about_anki_is_a_friendly_intelligent_spaced().replace("Anki", "Anki®") + abouttext += f"

{lede}" abouttext += f"

{tr.about_anki_is_licensed_under_the_agpl3()}" abouttext += f"

{tr.about_version(val=version_with_build())}
" abouttext += ("Python %s Qt %s PyQt %s
") % ( @@ -223,6 +224,7 @@ def show(mw: aqt.AnkiQt) -> QDialog: "Mukunda Madhav Dey", "Adnane Taghi", "Anon_0000", + "Bilolbek Normuminov", ) ) diff --git a/qt/aqt/addons.py b/qt/aqt/addons.py index fdce9142a..a940fb208 100644 --- a/qt/aqt/addons.py +++ b/qt/aqt/addons.py @@ -927,7 +927,6 @@ class AddonsDialog(QDialog): or self.mgr.configAction(addon.dir_name) ) ) - return def _onAddonItemSelected(self, row_int: int) -> None: try: @@ -1457,7 +1456,9 @@ class ChooseAddonsToUpdateDialog(QDialog): layout.addWidget(addons_list_widget) self.addons_list_widget = addons_list_widget - button_box = QDialogButtonBox(QDialogButtonBox.StandardButton.Ok | QDialogButtonBox.StandardButton.Cancel) # type: ignore + button_box = QDialogButtonBox( + QDialogButtonBox.StandardButton.Ok | QDialogButtonBox.StandardButton.Cancel + ) # type: ignore qconnect( button_box.button(QDialogButtonBox.StandardButton.Ok).clicked, self.accept ) diff --git a/qt/aqt/ankihub.py b/qt/aqt/ankihub.py index 4d3b00c8a..0ea9b6dac 100644 --- a/qt/aqt/ankihub.py +++ b/qt/aqt/ankihub.py @@ -36,7 +36,6 @@ def ankihub_login( username: str = "", password: str = "", ) -> None: - def on_future_done(fut: Future[str], username: str, password: str) -> None: try: token = fut.result() @@ -73,7 +72,6 @@ def ankihub_logout( on_success: Callable[[], None], token: str, ) -> None: - def logout() -> None: mw.pm.set_ankihub_username(None) mw.pm.set_ankihub_token(None) diff --git a/qt/aqt/browser/__init__.py b/qt/aqt/browser/__init__.py index ffff667e7..130167124 100644 --- a/qt/aqt/browser/__init__.py +++ b/qt/aqt/browser/__init__.py @@ -2,6 +2,7 @@ # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html from __future__ import annotations +# ruff: noqa: F401 import sys import aqt diff --git a/qt/aqt/browser/sidebar/__init__.py b/qt/aqt/browser/sidebar/__init__.py index 99ca8f7c4..555ed3cdd 100644 --- a/qt/aqt/browser/sidebar/__init__.py +++ b/qt/aqt/browser/sidebar/__init__.py @@ -1,5 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +# ruff: noqa: F401 from anki.utils import is_mac from aqt.theme import theme_manager diff --git a/qt/aqt/browser/sidebar/tree.py b/qt/aqt/browser/sidebar/tree.py index e28f166e9..22d7fa4a6 100644 --- a/qt/aqt/browser/sidebar/tree.py +++ b/qt/aqt/browser/sidebar/tree.py @@ -106,7 +106,7 @@ class SidebarTreeView(QTreeView): def _setup_style(self) -> None: # match window background color and tweak style bgcolor = QPalette().window().color().name() - border = theme_manager.var(colors.BORDER) + theme_manager.var(colors.BORDER) styles = [ "padding: 3px", "padding-right: 0px", @@ -711,7 +711,6 @@ class SidebarTreeView(QTreeView): def _flags_tree(self, root: SidebarItem) -> None: icon_off = "icons:flag-variant-off-outline.svg" - icon = "icons:flag-variant.svg" icon_outline = "icons:flag-variant-outline.svg" root = self._section_root( diff --git a/qt/aqt/browser/table/__init__.py b/qt/aqt/browser/table/__init__.py index bd666cf1a..c942dc30f 100644 --- a/qt/aqt/browser/table/__init__.py +++ b/qt/aqt/browser/table/__init__.py @@ -2,6 +2,7 @@ # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html from __future__ import annotations +# ruff: noqa: F401 import copy import time from collections.abc import Generator, Sequence diff --git a/qt/aqt/browser/table/model.py b/qt/aqt/browser/table/model.py index e8d3bb7b6..732e8c99c 100644 --- a/qt/aqt/browser/table/model.py +++ b/qt/aqt/browser/table/model.py @@ -105,11 +105,11 @@ class DataModel(QAbstractTableModel): row = CellRow(*self.col.browser_row_for_id(item)) except BackendError as e: return CellRow.disabled(self.len_columns(), str(e)) - except Exception as e: + except Exception: return CellRow.disabled( self.len_columns(), tr.errors_please_check_database() ) - except BaseException as e: + except BaseException: # fatal error like a panic in the backend - dump it to the # console so it gets picked up by the error handler import traceback diff --git a/qt/aqt/browser/table/state.py b/qt/aqt/browser/table/state.py index 8054d2597..4faf88611 100644 --- a/qt/aqt/browser/table/state.py +++ b/qt/aqt/browser/table/state.py @@ -59,7 +59,7 @@ class ItemState(ABC): # abstractproperty is deprecated but used due to mypy limitations # (https://github.com/python/mypy/issues/1362) - @abstractproperty # pylint: disable=deprecated-decorator + @abstractproperty def active_columns(self) -> list[str]: """Return the saved or default columns for the state.""" diff --git a/qt/aqt/browser/table/table.py b/qt/aqt/browser/table/table.py index fb921822b..e28075b3f 100644 --- a/qt/aqt/browser/table/table.py +++ b/qt/aqt/browser/table/table.py @@ -361,8 +361,7 @@ class Table: for m in self.col.models.all(): for t in m["tmpls"]: bsize = t.get("bsize", 0) - if bsize > curmax: - curmax = bsize + curmax = max(curmax, bsize) assert self._view is not None vh = self._view.verticalHeader() diff --git a/qt/aqt/clayout.py b/qt/aqt/clayout.py index 388ae46c0..aec5326f4 100644 --- a/qt/aqt/clayout.py +++ b/qt/aqt/clayout.py @@ -221,7 +221,7 @@ class CardLayout(QDialog): ) for i in range(min(len(self.cloze_numbers), 9)): QShortcut( # type: ignore - QKeySequence(f"Alt+{i+1}"), + QKeySequence(f"Alt+{i + 1}"), self, activated=lambda n=i: self.pform.cloze_number_combo.setCurrentIndex(n), ) @@ -790,7 +790,7 @@ class CardLayout(QDialog): assert a is not None qconnect( a.triggered, - lambda: self.on_restore_to_default(), # pylint: disable=unnecessary-lambda + lambda: self.on_restore_to_default(), ) if not self._isCloze(): diff --git a/qt/aqt/debug_console.py b/qt/aqt/debug_console.py index a37d14010..54fa8a17a 100644 --- a/qt/aqt/debug_console.py +++ b/qt/aqt/debug_console.py @@ -294,7 +294,6 @@ class DebugConsole(QDialog): } self._captureOutput(True) try: - # pylint: disable=exec-used exec(text, vars) except Exception: self._output += traceback.format_exc() diff --git a/qt/aqt/deckbrowser.py b/qt/aqt/deckbrowser.py index 77bd84220..5dc688155 100644 --- a/qt/aqt/deckbrowser.py +++ b/qt/aqt/deckbrowser.py @@ -386,9 +386,7 @@ class DeckBrowser: if b[0]: b[0] = tr.actions_shortcut_key(val=shortcut(b[0])) buf += """ -""" % tuple( - b - ) +""" % tuple(b) self.bottom.draw( buf=buf, link_handler=self._linkHandler, diff --git a/qt/aqt/editor.py b/qt/aqt/editor.py index 3a980145d..f2f267097 100644 --- a/qt/aqt/editor.py +++ b/qt/aqt/editor.py @@ -36,7 +36,7 @@ from anki.hooks import runFilter from anki.httpclient import HttpClient from anki.models import NotetypeDict, NotetypeId, StockNotetype from anki.notes import Note, NoteFieldsCheckResult, NoteId -from anki.utils import checksum, is_lin, is_mac, is_win, namedtmp +from anki.utils import checksum, is_lin, is_win, namedtmp from aqt import AnkiQt, colors, gui_hooks from aqt.operations import QueryOp from aqt.operations.note import update_note @@ -343,7 +343,7 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too gui_hooks.editor_did_init_shortcuts(cuts, self) for row in cuts: if len(row) == 2: - keys, fn = row # pylint: disable=unbalanced-tuple-unpacking + keys, fn = row fn = self._addFocusCheck(fn) else: keys, fn, _ = row @@ -796,7 +796,7 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too def accept(file: str) -> None: self.resolve_media(file) - file = getFile( + getFile( parent=self.widget, title=tr.editing_add_media(), cb=cast(Callable[[Any], None], accept), @@ -999,7 +999,7 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too if html.find(">") < 0: return html - with warnings.catch_warnings() as w: + with warnings.catch_warnings(): warnings.simplefilter("ignore", UserWarning) doc = BeautifulSoup(html, "html.parser") @@ -1029,15 +1029,14 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too m = re.match(r"http://127.0.0.1:\d+/(.*)$", str(src)) if m: tag["src"] = m.group(1) - else: - # in external pastes, download remote media - if isinstance(src, str) and self.isURL(src): - fname = self._retrieveURL(src) - if fname: - tag["src"] = fname - elif isinstance(src, str) and src.startswith("data:image/"): - # and convert inlined data - tag["src"] = self.inlinedImageToFilename(str(src)) + # in external pastes, download remote media + elif isinstance(src, str) and self.isURL(src): + fname = self._retrieveURL(src) + if fname: + tag["src"] = fname + elif isinstance(src, str) and src.startswith("data:image/"): + # and convert inlined data + tag["src"] = self.inlinedImageToFilename(str(src)) html = str(doc) return html @@ -1102,7 +1101,7 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too ) filter = f"{tr.editing_media()} ({extension_filter})" - file = getFile( + getFile( parent=self.widget, title=tr.editing_add_media(), cb=cast(Callable[[Any], None], self.setup_mask_editor), @@ -1735,10 +1734,9 @@ class EditorWebView(AnkiWebView): assert a is not None qconnect(a.triggered, lambda: openFolder(path)) - if is_win or is_mac: - a = menu.addAction(tr.editing_show_in_folder()) - assert a is not None - qconnect(a.triggered, lambda: show_in_folder(path)) + a = menu.addAction(tr.editing_show_in_folder()) + assert a is not None + qconnect(a.triggered, lambda: show_in_folder(path)) def _clipboard(self) -> QClipboard: clipboard = self.editor.mw.app.clipboard() diff --git a/qt/aqt/errors.py b/qt/aqt/errors.py index af1036acd..a6d9251e2 100644 --- a/qt/aqt/errors.py +++ b/qt/aqt/errors.py @@ -23,25 +23,36 @@ from aqt.utils import openHelp, showWarning, supportText, tooltip, tr if TYPE_CHECKING: from aqt.main import AnkiQt +# so we can be non-modal/non-blocking, without Python deallocating the message +# box ahead of time +_mbox: QMessageBox | None = None + def show_exception(*, parent: QWidget, exception: Exception) -> None: "Present a caught exception to the user using a pop-up." if isinstance(exception, Interrupted): # nothing to do return + global _mbox + error_lines = [] + help_page = HelpPage.TROUBLESHOOTING if isinstance(exception, BackendError): if exception.context: - print(exception.context) + error_lines.append(exception.context) if exception.backtrace: - print(exception.backtrace) - showWarning(str(exception), parent=parent, help=exception.help_page) + error_lines.append(exception.backtrace) + if exception.help_page is not None: + help_page = exception.help_page else: # if the error is not originating from the backend, dump # a traceback to the console to aid in debugging - traceback.print_exception( - None, exception, exception.__traceback__, file=sys.stdout + error_lines = traceback.format_exception( + None, exception, exception.__traceback__ ) - showWarning(str(exception), parent=parent) + error_text = "\n".join(error_lines) + print(error_lines) + _mbox = _init_message_box(str(exception), error_text, help_page) + _mbox.show() def is_chromium_cert_error(error: str) -> bool: @@ -158,9 +169,39 @@ if not os.environ.get("DEBUG"): sys.excepthook = excepthook -# so we can be non-modal/non-blocking, without Python deallocating the message -# box ahead of time -_mbox: QMessageBox | None = None + +def _init_message_box( + user_text: str, debug_text: str, help_page=HelpPage.TROUBLESHOOTING +): + global _mbox + + _mbox = QMessageBox() + _mbox.setWindowTitle("Anki") + _mbox.setText(user_text) + _mbox.setIcon(QMessageBox.Icon.Warning) + _mbox.setTextFormat(Qt.TextFormat.PlainText) + + def show_help(): + openHelp(help_page) + + def copy_debug_info(): + QApplication.clipboard().setText(debug_text) + tooltip(tr.errors_copied_to_clipboard(), parent=_mbox) + + help = _mbox.addButton(QMessageBox.StandardButton.Help) + if debug_text: + debug_info = _mbox.addButton( + tr.errors_copy_debug_info_button(), QMessageBox.ButtonRole.ActionRole + ) + debug_info.disconnect() + debug_info.clicked.connect(copy_debug_info) + cancel = _mbox.addButton(QMessageBox.StandardButton.Cancel) + cancel.setText(tr.actions_close()) + + help.disconnect() + help.clicked.connect(show_help) + + return _mbox class ErrorHandler(QObject): @@ -252,33 +293,7 @@ class ErrorHandler(QObject): user_text += "\n\n" + self._addonText(error) debug_text += addon_debug_info() - def show_troubleshooting(): - openHelp(HelpPage.TROUBLESHOOTING) - - def copy_debug_info(): - QApplication.clipboard().setText(debug_text) - tooltip(tr.errors_copied_to_clipboard(), parent=_mbox) - - global _mbox - _mbox = QMessageBox() - _mbox.setWindowTitle("Anki") - _mbox.setText(user_text) - _mbox.setIcon(QMessageBox.Icon.Warning) - _mbox.setTextFormat(Qt.TextFormat.PlainText) - - troubleshooting = _mbox.addButton( - tr.errors_troubleshooting_button(), QMessageBox.ButtonRole.ActionRole - ) - debug_info = _mbox.addButton( - tr.errors_copy_debug_info_button(), QMessageBox.ButtonRole.ActionRole - ) - cancel = _mbox.addButton(QMessageBox.StandardButton.Cancel) - cancel.setText(tr.actions_close()) - - troubleshooting.disconnect() - troubleshooting.clicked.connect(show_troubleshooting) - debug_info.disconnect() - debug_info.clicked.connect(copy_debug_info) + _mbox = _init_message_box(user_text, debug_text) if self.fatal_error_encountered: _mbox.exec() diff --git a/qt/aqt/exporting.py b/qt/aqt/exporting.py index 4ff024917..cadbaef0c 100644 --- a/qt/aqt/exporting.py +++ b/qt/aqt/exporting.py @@ -212,11 +212,10 @@ class ExportDialog(QDialog): if self.isVerbatim: msg = tr.exporting_collection_exported() self.mw.reopen() + elif self.isTextNote: + msg = tr.exporting_note_exported(count=self.exporter.count) else: - if self.isTextNote: - msg = tr.exporting_note_exported(count=self.exporter.count) - else: - msg = tr.exporting_card_exported(count=self.exporter.count) + msg = tr.exporting_card_exported(count=self.exporter.count) gui_hooks.legacy_exporter_did_export(self.exporter) tooltip(msg, period=3000) QDialog.reject(self) diff --git a/qt/aqt/forms/__init__.py b/qt/aqt/forms/__init__.py index 9484f91ec..7cbfe3a6f 100644 --- a/qt/aqt/forms/__init__.py +++ b/qt/aqt/forms/__init__.py @@ -1,3 +1,4 @@ +# ruff: noqa: F401 from . import ( about, addcards, diff --git a/qt/aqt/forms/main.ui b/qt/aqt/forms/main.ui index 596ea985c..bffc67ad0 100644 --- a/qt/aqt/forms/main.ui +++ b/qt/aqt/forms/main.ui @@ -46,7 +46,7 @@ 0 0 667 - 24 + 43 @@ -93,6 +93,7 @@ + @@ -130,7 +131,7 @@ Ctrl+P - QAction::PreferencesRole + QAction::MenuRole::PreferencesRole @@ -138,7 +139,7 @@ qt_accel_about - QAction::AboutRole + QAction::MenuRole::ApplicationSpecificRole @@ -283,6 +284,11 @@ qt_accel_load_backup + + + qt_accel_upgrade_downgrade + + diff --git a/qt/aqt/import_export/importing.py b/qt/aqt/import_export/importing.py index 938824035..cb27c5e4b 100644 --- a/qt/aqt/import_export/importing.py +++ b/qt/aqt/import_export/importing.py @@ -134,9 +134,8 @@ IMPORTERS: list[type[Importer]] = [ def legacy_file_endings(col: Collection) -> list[str]: - from anki.importing import AnkiPackageImporter + from anki.importing import AnkiPackageImporter, TextImporter, importers from anki.importing import MnemosyneImporter as LegacyMnemosyneImporter - from anki.importing import TextImporter, importers return [ ext diff --git a/qt/aqt/importing.py b/qt/aqt/importing.py index 8f9741a77..8701f9843 100644 --- a/qt/aqt/importing.py +++ b/qt/aqt/importing.py @@ -11,10 +11,10 @@ from collections.abc import Callable from concurrent.futures import Future from typing import Any -import anki.importing as importing import aqt.deckchooser import aqt.forms import aqt.modelchooser +from anki import importing from anki.importing.anki2 import MediaMapInvalid, V2ImportIntoV1 from anki.importing.apkg import AnkiPackageImporter from aqt.import_export.importing import ColpkgImporter @@ -262,7 +262,7 @@ class ImportDialog(QDialog): self.mapwidget.setLayout(self.grid) self.grid.setContentsMargins(3, 3, 3, 3) self.grid.setSpacing(6) - for num in range(len(self.mapping)): # pylint: disable=consider-using-enumerate + for num in range(len(self.mapping)): text = tr.importing_field_of_file_is(val=num + 1) self.grid.addWidget(QLabel(text), num, 0) if self.mapping[num] == "_tags": @@ -357,7 +357,7 @@ def importFile(mw: AnkiQt, file: str) -> None: try: importer.open() mw.progress.finish() - diag = ImportDialog(mw, importer) + ImportDialog(mw, importer) except UnicodeDecodeError: mw.progress.finish() showUnicodeWarning() @@ -443,3 +443,4 @@ def setupApkgImport(mw: AnkiQt, importer: AnkiPackageImporter) -> bool: return True ColpkgImporter.do_import(mw, importer.file) return False + return False diff --git a/qt/aqt/main.py b/qt/aqt/main.py index bc28e287b..c707d1b2a 100644 --- a/qt/aqt/main.py +++ b/qt/aqt/main.py @@ -376,7 +376,6 @@ class AnkiQt(QMainWindow): def openProfile(self) -> None: name = self.pm.profiles()[self.profileForm.profiles.currentRow()] self.pm.load(name) - return def onOpenProfile(self, *, callback: Callable[[], None] | None = None) -> None: def on_done() -> None: @@ -451,7 +450,6 @@ class AnkiQt(QMainWindow): self.loadProfile() def onOpenBackup(self) -> None: - def do_open(path: str) -> None: if not askUser( tr.qt_misc_replace_your_collection_with_an_earlier2( @@ -677,7 +675,7 @@ class AnkiQt(QMainWindow): gui_hooks.collection_did_load(self.col) self.apply_collection_options() self.moveToState("deckBrowser") - except Exception as e: + except Exception: # dump error to stderr so it gets picked up by errors.py traceback.print_exc() @@ -774,7 +772,6 @@ class AnkiQt(QMainWindow): oldState = self.state cleanup = getattr(self, f"_{oldState}Cleanup", None) if cleanup: - # pylint: disable=not-callable cleanup(state) self.clearStateShortcuts() self.state = state @@ -821,7 +818,7 @@ class AnkiQt(QMainWindow): self.bottomWeb.hide_timer.start() def _reviewCleanup(self, newState: MainWindowState) -> None: - if newState != "resetRequired" and newState != "review": + if newState not in {"resetRequired", "review"}: self.reviewer.auto_advance_enabled = False self.reviewer.cleanup() self.toolbarWeb.elevate() @@ -1308,6 +1305,14 @@ title="{}" {}>{}""".format( def onPrefs(self) -> None: aqt.dialogs.open("Preferences", self) + def on_upgrade_downgrade(self) -> None: + if not askUser(tr.qt_misc_open_anki_launcher()): + return + + from aqt.package import update_and_restart + + update_and_restart() + def onNoteTypes(self) -> None: import aqt.models @@ -1389,6 +1394,8 @@ title="{}" {}>{}""".format( ########################################################################## def setupMenus(self) -> None: + from aqt.package import launcher_executable + m = self.form # File @@ -1405,6 +1412,7 @@ title="{}" {}>{}""".format( qconnect(m.actionDocumentation.triggered, self.onDocumentation) qconnect(m.actionDonate.triggered, self.onDonate) qconnect(m.actionAbout.triggered, self.onAbout) + m.actionAbout.setText(tr.qt_accel_about_mac()) # Edit qconnect(m.actionUndo.triggered, self.undo) @@ -1417,6 +1425,9 @@ title="{}" {}>{}""".format( qconnect(m.actionCreateFiltered.triggered, self.onCram) qconnect(m.actionEmptyCards.triggered, self.onEmptyCards) qconnect(m.actionNoteTypes.triggered, self.onNoteTypes) + qconnect(m.action_upgrade_downgrade.triggered, self.on_upgrade_downgrade) + if not launcher_executable(): + m.action_upgrade_downgrade.setVisible(False) qconnect(m.actionPreferences.triggered, self.onPrefs) # View @@ -1708,11 +1719,37 @@ title="{}" {}>{}""".format( self.maybeHideAccelerators() self.hideStatusTips() elif is_win: - # make sure ctypes is bundled - from ctypes import windll, wintypes # type: ignore + self._setupWin32() - _dummy1 = windll - _dummy2 = wintypes + def _setupWin32(self): + """Fix taskbar display/pinning""" + if sys.platform != "win32": + return + + launcher_path = os.environ.get("ANKI_LAUNCHER") + if not launcher_path: + return + + from win32com.propsys import propsys, pscon + from win32com.propsys.propsys import PROPVARIANTType + + hwnd = int(self.winId()) + prop_store = propsys.SHGetPropertyStoreForWindow(hwnd) # type: ignore[call-arg] + prop_store.SetValue( + pscon.PKEY_AppUserModel_ID, PROPVARIANTType("Ankitects.Anki") + ) + prop_store.SetValue( + pscon.PKEY_AppUserModel_RelaunchCommand, + PROPVARIANTType(f'"{launcher_path}"'), + ) + prop_store.SetValue( + pscon.PKEY_AppUserModel_RelaunchDisplayNameResource, PROPVARIANTType("Anki") + ) + prop_store.SetValue( + pscon.PKEY_AppUserModel_RelaunchIconResource, + PROPVARIANTType(f"{launcher_path},0"), + ) + prop_store.Commit() def maybeHideAccelerators(self, tgt: Any | None = None) -> None: if not self.hideMenuAccels: diff --git a/qt/aqt/mediasrv.py b/qt/aqt/mediasrv.py index 69ef054ec..f08be4cef 100644 --- a/qt/aqt/mediasrv.py +++ b/qt/aqt/mediasrv.py @@ -230,7 +230,11 @@ def _handle_local_file_request(request: LocalFileRequest) -> Response: else: max_age = 60 * 60 return flask.send_file( - fullpath, mimetype=mimetype, conditional=True, max_age=max_age, download_name="foo" # type: ignore[call-arg] + fullpath, + mimetype=mimetype, + conditional=True, + max_age=max_age, + download_name="foo", # type: ignore[call-arg] ) else: print(f"Not found: {path}") @@ -647,7 +651,7 @@ exposed_backend_list = [ "compute_fsrs_params", "compute_optimal_retention", "set_wants_abort", - "evaluate_params", + "evaluate_params_legacy", "get_optimal_retention_parameters", "simulate_fsrs_review", # DeckConfigService diff --git a/qt/aqt/mpv.py b/qt/aqt/mpv.py index e86675e41..2586d024a 100644 --- a/qt/aqt/mpv.py +++ b/qt/aqt/mpv.py @@ -24,7 +24,7 @@ # # ------------------------------------------------------------------------------ -# pylint: disable=raise-missing-from + from __future__ import annotations import inspect @@ -66,7 +66,6 @@ class MPVTimeoutError(MPVError): if is_win: - # pylint: disable=import-error import pywintypes import win32file # pytype: disable=import-error import win32job @@ -138,15 +137,15 @@ class MPVBase: extended_info = win32job.QueryInformationJobObject( self._job, win32job.JobObjectExtendedLimitInformation ) - extended_info["BasicLimitInformation"][ - "LimitFlags" - ] = win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE + extended_info["BasicLimitInformation"]["LimitFlags"] = ( + win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE + ) win32job.SetInformationJobObject( self._job, win32job.JobObjectExtendedLimitInformation, extended_info, ) - handle = self._proc._handle # pylint: disable=no-member + handle = self._proc._handle win32job.AssignProcessToJobObject(self._job, handle) def _stop_process(self): @@ -193,7 +192,10 @@ class MPVBase: None, ) win32pipe.SetNamedPipeHandleState( - self._sock, 1, None, None # PIPE_NOWAIT + self._sock, + 1, + None, + None, # PIPE_NOWAIT ) except pywintypes.error as err: if err.args[0] == winerror.ERROR_FILE_NOT_FOUND: @@ -394,7 +396,7 @@ class MPVBase: return self._get_response(timeout) except MPVCommandError as e: raise MPVCommandError(f"{message['command']!r}: {e}") - except Exception as e: + except Exception: if _retry: print("mpv timed out, restarting") self._stop_process() @@ -501,7 +503,6 @@ class MPV(MPVBase): # Simulate an init event when the process and all callbacks have been # completely set up. if hasattr(self, "on_init"): - # pylint: disable=no-member self.on_init() # diff --git a/qt/aqt/overview.py b/qt/aqt/overview.py index 184a51cf5..b1fc9a119 100644 --- a/qt/aqt/overview.py +++ b/qt/aqt/overview.py @@ -113,7 +113,7 @@ class Overview: self.mw.moveToState("deckBrowser") elif url == "review": openLink(f"{aqt.appShared}info/{self.sid}?v={self.sidVer}") - elif url == "studymore" or url == "customStudy": + elif url in {"studymore", "customStudy"}: self.onStudyMore() elif url == "unbury": self.on_unbury() @@ -180,7 +180,6 @@ class Overview: ############################################################ def _renderPage(self) -> None: - but = self.mw.button deck = self.mw.col.decks.current() self.sid = deck.get("sharedFrom") if self.sid: @@ -307,9 +306,7 @@ class Overview: if b[0]: b[0] = tr.actions_shortcut_key(val=shortcut(b[0])) buf += """ -""" % tuple( - b - ) +""" % tuple(b) self.bottom.draw( buf=buf, link_handler=link_handler, diff --git a/qt/aqt/package.py b/qt/aqt/package.py index f1834b594..f85a17335 100644 --- a/qt/aqt/package.py +++ b/qt/aqt/package.py @@ -5,13 +5,16 @@ from __future__ import annotations +import contextlib +import os import subprocess +import sys from pathlib import Path -from anki.utils import is_mac +from anki.utils import is_mac, is_win -# pylint: disable=unused-import,import-error +# ruff: noqa: F401 def first_run_setup() -> None: """Code run the first time after install/upgrade. @@ -65,3 +68,101 @@ def first_run_setup() -> None: # Wait for both commands to complete for proc in processes: proc.wait() + + +def uv_binary() -> str | None: + """Return the path to the uv binary.""" + return os.environ.get("ANKI_LAUNCHER_UV") + + +def launcher_root() -> str | None: + """Return the path to the launcher root directory (AnkiProgramFiles).""" + return os.environ.get("UV_PROJECT") + + +def venv_binary(cmd: str) -> str | None: + """Return the path to a binary in the launcher's venv.""" + root = launcher_root() + if not root: + return None + + root_path = Path(root) + if is_win: + binary_path = root_path / ".venv" / "Scripts" / cmd + else: + binary_path = root_path / ".venv" / "bin" / cmd + + return str(binary_path) + + +def add_python_requirements(reqs: list[str]) -> tuple[bool, str]: + """Add Python requirements to the launcher venv using uv add. + + Returns (success, output)""" + + binary = uv_binary() + if not binary: + return (False, "Not in packaged build.") + + uv_cmd = [binary, "add"] + reqs + result = subprocess.run(uv_cmd, capture_output=True, text=True, check=False) + + if result.returncode == 0: + root = launcher_root() + if root: + sync_marker = Path(root) / ".sync_complete" + sync_marker.touch() + + return (True, result.stdout) + else: + return (False, result.stderr) + + +def launcher_executable() -> str | None: + """Return the path to the Anki launcher executable.""" + return os.getenv("ANKI_LAUNCHER") + + +def trigger_launcher_run() -> None: + """Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run.""" + try: + root = launcher_root() + if not root: + return + + pyproject_path = Path(root) / "pyproject.toml" + + if pyproject_path.exists(): + # Touch the file to update its mtime + pyproject_path.touch() + except Exception as e: + print(e) + + +def update_and_restart() -> None: + """Update and restart Anki using the launcher.""" + from aqt import mw + + launcher = launcher_executable() + assert launcher + + trigger_launcher_run() + + with contextlib.suppress(ResourceWarning): + env = os.environ.copy() + creationflags = 0 + if sys.platform == "win32": + creationflags = ( + subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS + ) + subprocess.Popen( + [launcher], + start_new_session=True, + stdin=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + env=env, + creationflags=creationflags, + ) + + mw.app.quit() diff --git a/qt/aqt/profiles.py b/qt/aqt/profiles.py index 6597f6705..919be170c 100644 --- a/qt/aqt/profiles.py +++ b/qt/aqt/profiles.py @@ -128,7 +128,7 @@ class ProfileManager: default_answer_keys = {ease_num: str(ease_num) for ease_num in range(1, 5)} last_run_version: int = 0 - def __init__(self, base: Path) -> None: # + def __init__(self, base: Path) -> None: "base should be retrieved via ProfileMangager.get_created_base_folder" ## Settings which should be forgotten each Anki restart self.session: dict[str, Any] = {} @@ -153,7 +153,7 @@ class ProfileManager: else: try: self.load(profile) - except Exception as exc: + except Exception: self.invalid_profile_provided_on_commandline = True # Profile load/save @@ -483,7 +483,11 @@ create table if not exists profiles code = obj[1] name = obj[0] r = QMessageBox.question( - None, "Anki", tr.profiles_confirm_lang_choice(lang=name), QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No, QMessageBox.StandardButton.No # type: ignore + None, + "Anki", + tr.profiles_confirm_lang_choice(lang=name), + QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No, + QMessageBox.StandardButton.No, # type: ignore ) if r != QMessageBox.StandardButton.Yes: return self.setDefaultLang(f.lang.currentRow()) diff --git a/qt/aqt/progress.py b/qt/aqt/progress.py index 8c45c44ee..cc7e750de 100644 --- a/qt/aqt/progress.py +++ b/qt/aqt/progress.py @@ -119,13 +119,12 @@ class ProgressManager: if not self._levels: # no current progress; safe to fire func() + elif repeat: + # skip this time; we'll fire again + pass else: - if repeat: - # skip this time; we'll fire again - pass - else: - # retry in 100ms - self.single_shot(100, func, requires_collection) + # retry in 100ms + self.single_shot(100, func, requires_collection) return handler diff --git a/qt/aqt/qt/__init__.py b/qt/aqt/qt/__init__.py index 11670e90c..730bc771b 100644 --- a/qt/aqt/qt/__init__.py +++ b/qt/aqt/qt/__init__.py @@ -2,7 +2,7 @@ # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html # make sure not to optimize imports on this file -# pylint: disable=unused-import +# ruff: noqa: F401 from __future__ import annotations import os @@ -23,7 +23,7 @@ def debug() -> None: from pdb import set_trace pyqtRemoveInputHook() - set_trace() # pylint: disable=forgotten-debug-statement + set_trace() if os.environ.get("DEBUG"): diff --git a/qt/aqt/qt/qt6.py b/qt/aqt/qt/qt6.py index 2d387aabf..dabed757b 100644 --- a/qt/aqt/qt/qt6.py +++ b/qt/aqt/qt/qt6.py @@ -2,8 +2,7 @@ # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html # make sure not to optimize imports on this file -# pylint: disable=unused-import - +# ruff: noqa: F401 """ PyQt6 imports """ diff --git a/qt/aqt/reviewer.py b/qt/aqt/reviewer.py index 6e34a7931..a8839c598 100644 --- a/qt/aqt/reviewer.py +++ b/qt/aqt/reviewer.py @@ -21,13 +21,11 @@ from anki.scheduler.base import ScheduleCardsAsNew from anki.scheduler.v3 import ( CardAnswer, QueuedCards, -) -from anki.scheduler.v3 import Scheduler as V3Scheduler -from anki.scheduler.v3 import ( SchedulingContext, SchedulingStates, SetSchedulingStatesRequest, ) +from anki.scheduler.v3 import Scheduler as V3Scheduler from anki.tags import MARKED_TAG from anki.types import assert_exhaustive from anki.utils import is_mac @@ -597,10 +595,9 @@ class Reviewer: def _shortcutKeys( self, ) -> Sequence[tuple[str, Callable] | tuple[Qt.Key, Callable]]: - - def generate_default_answer_keys() -> ( - Generator[tuple[str, partial], None, None] - ): + def generate_default_answer_keys() -> Generator[ + tuple[str, partial], None, None + ]: for ease in aqt.mw.pm.default_answer_keys: key = aqt.mw.pm.get_answer_key(ease) if not key: diff --git a/qt/aqt/sound.py b/qt/aqt/sound.py index 8ff49024f..d20365232 100644 --- a/qt/aqt/sound.py +++ b/qt/aqt/sound.py @@ -101,7 +101,7 @@ def is_audio_file(fname: str) -> bool: return ext in AUDIO_EXTENSIONS -class SoundOrVideoPlayer(Player): # pylint: disable=abstract-method +class SoundOrVideoPlayer(Player): default_rank = 0 def rank_for_tag(self, tag: AVTag) -> int | None: @@ -111,7 +111,7 @@ class SoundOrVideoPlayer(Player): # pylint: disable=abstract-method return None -class SoundPlayer(Player): # pylint: disable=abstract-method +class SoundPlayer(Player): default_rank = 0 def rank_for_tag(self, tag: AVTag) -> int | None: @@ -121,7 +121,7 @@ class SoundPlayer(Player): # pylint: disable=abstract-method return None -class VideoPlayer(Player): # pylint: disable=abstract-method +class VideoPlayer(Player): default_rank = 0 def rank_for_tag(self, tag: AVTag) -> int | None: @@ -324,7 +324,7 @@ def retryWait(proc: subprocess.Popen) -> int: ########################################################################## -class SimpleProcessPlayer(Player): # pylint: disable=abstract-method +class SimpleProcessPlayer(Player): "A player that invokes a new process for each tag to play." args: list[str] = [] diff --git a/qt/aqt/stylesheets.py b/qt/aqt/stylesheets.py index 0721e76d2..a262e18b9 100644 --- a/qt/aqt/stylesheets.py +++ b/qt/aqt/stylesheets.py @@ -208,7 +208,7 @@ class CustomStyles: button_pressed_gradient( tm.var(colors.BUTTON_GRADIENT_START), tm.var(colors.BUTTON_GRADIENT_END), - tm.var(colors.SHADOW) + tm.var(colors.SHADOW), ) }; }} @@ -340,7 +340,7 @@ class CustomStyles: }} QTabBar::tab:selected:hover {{ background: { - button_gradient( + button_gradient( tm.var(colors.BUTTON_PRIMARY_GRADIENT_START), tm.var(colors.BUTTON_PRIMARY_GRADIENT_END), ) @@ -391,7 +391,7 @@ class CustomStyles: button_pressed_gradient( tm.var(colors.BUTTON_GRADIENT_START), tm.var(colors.BUTTON_GRADIENT_END), - tm.var(colors.SHADOW) + tm.var(colors.SHADOW), ) } }} @@ -647,10 +647,12 @@ class CustomStyles: margin: -7px 0; }} QSlider::handle:hover {{ - background: {button_gradient( - tm.var(colors.BUTTON_GRADIENT_START), - tm.var(colors.BUTTON_GRADIENT_END), - )} + background: { + button_gradient( + tm.var(colors.BUTTON_GRADIENT_START), + tm.var(colors.BUTTON_GRADIENT_END), + ) + } }} """ diff --git a/qt/aqt/sync.py b/qt/aqt/sync.py index 5a4d5fd4c..bedc05f8e 100644 --- a/qt/aqt/sync.py +++ b/qt/aqt/sync.py @@ -44,7 +44,7 @@ def get_sync_status( ) -> None: auth = mw.pm.sync_auth() if not auth: - callback(SyncStatus(required=SyncStatus.NO_CHANGES)) # pylint:disable=no-member + callback(SyncStatus(required=SyncStatus.NO_CHANGES)) return def on_future_done(fut: Future[SyncStatus]) -> None: @@ -302,7 +302,6 @@ def sync_login( username: str = "", password: str = "", ) -> None: - def on_future_done(fut: Future[SyncAuth], username: str, password: str) -> None: try: auth = fut.result() @@ -374,7 +373,9 @@ def get_id_and_pass_from_user( g.addWidget(passwd, 1, 1) l2.setBuddy(passwd) vbox.addLayout(g) - bb = QDialogButtonBox(QDialogButtonBox.StandardButton.Ok | QDialogButtonBox.StandardButton.Cancel) # type: ignore + bb = QDialogButtonBox( + QDialogButtonBox.StandardButton.Ok | QDialogButtonBox.StandardButton.Cancel + ) # type: ignore ok_button = bb.button(QDialogButtonBox.StandardButton.Ok) assert ok_button is not None ok_button.setAutoDefault(True) diff --git a/qt/aqt/theme.py b/qt/aqt/theme.py index e06cf71c2..675eb9345 100644 --- a/qt/aqt/theme.py +++ b/qt/aqt/theme.py @@ -187,7 +187,7 @@ class ThemeManager: self, card_ord: int, night_mode: bool | None = None ) -> str: "Returns body classes used when showing a card." - return f"card card{card_ord+1} {self.body_class(night_mode, reviewer=True)}" + return f"card card{card_ord + 1} {self.body_class(night_mode, reviewer=True)}" def var(self, vars: dict[str, str]) -> str: """Given day/night colors/props, return the correct one for the current theme.""" @@ -213,13 +213,12 @@ class ThemeManager: return False elif theme == Theme.DARK: return True + elif is_win: + return get_windows_dark_mode() + elif is_mac: + return get_macos_dark_mode() else: - if is_win: - return get_windows_dark_mode() - elif is_mac: - return get_macos_dark_mode() - else: - return get_linux_dark_mode() + return get_linux_dark_mode() def apply_style(self) -> None: "Apply currently configured style." @@ -340,7 +339,7 @@ def get_windows_dark_mode() -> bool: if not is_win: return False - from winreg import ( # type: ignore[attr-defined] # pylint: disable=import-error + from winreg import ( # type: ignore[attr-defined] HKEY_CURRENT_USER, OpenKey, QueryValueEx, @@ -352,7 +351,7 @@ def get_windows_dark_mode() -> bool: r"Software\Microsoft\Windows\CurrentVersion\Themes\Personalize", ) return not QueryValueEx(key, "AppsUseLightTheme")[0] - except Exception as err: + except Exception: # key reportedly missing or set to wrong type on some systems return False @@ -416,12 +415,12 @@ def get_linux_dark_mode() -> bool: capture_output=True, encoding="utf8", ) - except FileNotFoundError as e: + except FileNotFoundError: # detection strategy failed, missing program # print(e) continue - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError: # detection strategy failed, command returned error # print(e) continue diff --git a/qt/aqt/toolbar.py b/qt/aqt/toolbar.py index 44f2ee66c..be547b5ba 100644 --- a/qt/aqt/toolbar.py +++ b/qt/aqt/toolbar.py @@ -87,6 +87,7 @@ class TopWebView(ToolbarWebView): else: self.flatten() + self.adjustHeightToFit() self.show() def _onHeight(self, qvar: int | None) -> None: diff --git a/qt/aqt/tts.py b/qt/aqt/tts.py index 079a5e3de..d559fb41f 100644 --- a/qt/aqt/tts.py +++ b/qt/aqt/tts.py @@ -166,7 +166,6 @@ class MacVoice(TTSVoice): original_name: str -# pylint: disable=no-member class MacTTSPlayer(TTSProcessPlayer): "Invokes a process to play the audio in the background." @@ -487,7 +486,7 @@ if is_win: class WindowsTTSPlayer(TTSProcessPlayer): default_rank = -1 try: - import win32com.client # pylint: disable=import-error + import win32com.client speaker = win32com.client.Dispatch("SAPI.SpVoice") except Exception as exc: diff --git a/qt/aqt/update.py b/qt/aqt/update.py index d8e92426c..e5794eead 100644 --- a/qt/aqt/update.py +++ b/qt/aqt/update.py @@ -1,16 +1,21 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -import os -from pathlib import Path +from __future__ import annotations import aqt from anki.buildinfo import buildhash from anki.collection import CheckForUpdateResponse, Collection -from anki.utils import dev_mode, int_time, int_version, is_mac, is_win, plat_desc +from anki.utils import dev_mode, int_time, int_version, plat_desc from aqt.operations import QueryOp +from aqt.package import ( + launcher_executable as _launcher_executable, +) +from aqt.package import ( + update_and_restart as _update_and_restart, +) from aqt.qt import * -from aqt.utils import show_info, show_warning, showText, tr +from aqt.utils import openLink, show_warning, showText, tr def check_for_update() -> None: @@ -80,33 +85,7 @@ def prompt_to_update(mw: aqt.AnkiQt, ver: str) -> None: # ignore this update mw.pm.meta["suppressUpdate"] = ver elif ret == QMessageBox.StandardButton.Yes: - update_and_restart() - - -def update_and_restart() -> None: - """Download and install the update, then restart Anki.""" - update_on_next_run() - # todo: do this automatically in the future - show_info(tr.qt_misc_please_restart_to_update_anki()) - - -def update_on_next_run() -> None: - """Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run.""" - try: - # Get the local data directory equivalent to Rust's dirs::data_local_dir() - if is_win: - data_dir = Path(os.environ.get("LOCALAPPDATA", "")) - elif is_mac: - data_dir = Path.home() / "Library" / "Application Support" - else: # Linux - data_dir = Path( - os.environ.get("XDG_DATA_HOME", Path.home() / ".local" / "share") - ) - - pyproject_path = data_dir / "AnkiProgramFiles" / "pyproject.toml" - - if pyproject_path.exists(): - # Touch the file to update its mtime - pyproject_path.touch() - except Exception as e: - print(e) + if _launcher_executable(): + _update_and_restart() + else: + openLink(aqt.appWebsiteDownloadSection) diff --git a/qt/aqt/utils.py b/qt/aqt/utils.py index e17550fc0..64d057082 100644 --- a/qt/aqt/utils.py +++ b/qt/aqt/utils.py @@ -19,7 +19,7 @@ from send2trash import send2trash import aqt from anki._legacy import DeprecatedNamesMixinForModule from anki.collection import Collection, HelpPage -from anki.lang import TR, tr_legacyglobal # pylint: disable=unused-import +from anki.lang import TR, tr_legacyglobal # noqa: F401 from anki.utils import ( call, invalid_filename, @@ -31,7 +31,7 @@ from anki.utils import ( from aqt.qt import * from aqt.qt import ( PYQT_VERSION_STR, - QT_VERSION_STR, + QT_VERSION_STR, # noqa: F401 QAction, QApplication, QCheckBox, @@ -294,7 +294,7 @@ def showInfo( icon = QMessageBox.Icon.Critical else: icon = QMessageBox.Icon.Information - mb = QMessageBox(parent_widget) # + mb = QMessageBox(parent_widget) if textFormat == "plain": mb.setTextFormat(Qt.TextFormat.PlainText) elif textFormat == "rich": @@ -936,14 +936,39 @@ def show_in_folder(path: str) -> None: """ call(osascript_to_args(script)) else: - # Just open the file in any other platform - with no_bundled_libs(): - QDesktopServices.openUrl(QUrl.fromLocalFile(path)) + # For linux, there are multiple file managers. Let's test if one of the + # most common file managers is found and use it in case it is installed. + # If none of this list are installed, use a fallback. The fallback + # might open the image in a web browser, image viewer or others, + # depending on the users defaults. + file_managers = [ + "nautilus", # GNOME + "dolphin", # KDE + "pcmanfm", # LXDE + "thunar", # XFCE + "nemo", # Cinnamon + "caja", # MATE + ] + + available_file_manager = None + + # Test if a file manager is installed and use it, fallback otherwise + for file_manager in file_managers: + if shutil.which(file_manager): + available_file_manager = file_manager + break + + if available_file_manager: + subprocess.run([available_file_manager, path], check=False) + else: + # Just open the file in any other platform + with no_bundled_libs(): + QDesktopServices.openUrl(QUrl.fromLocalFile(path)) def _show_in_folder_win32(path: str) -> None: - import win32con # pylint: disable=import-error - import win32gui # pylint: disable=import-error + import win32con + import win32gui from aqt import mw @@ -1238,12 +1263,12 @@ def opengl_vendor() -> str | None: # Can't use versionFunctions there return None - vp = QOpenGLVersionProfile() # type: ignore # pylint: disable=undefined-variable + vp = QOpenGLVersionProfile() # type: ignore vp.setVersion(2, 0) try: vf = ctx.versionFunctions(vp) # type: ignore - except ImportError as e: + except ImportError: return None if vf is None: diff --git a/qt/aqt/webview.py b/qt/aqt/webview.py index 966d3de5a..95d84c00e 100644 --- a/qt/aqt/webview.py +++ b/qt/aqt/webview.py @@ -980,7 +980,6 @@ def _create_ankiwebview_subclass( /, **fixed_kwargs: Unpack[_AnkiWebViewKwargs], ) -> Type[AnkiWebView]: - def __init__(self, *args: Any, **kwargs: _AnkiWebViewKwargs) -> None: # user‑supplied kwargs override fixed kwargs merged = cast(_AnkiWebViewKwargs, {**fixed_kwargs, **kwargs}) diff --git a/qt/aqt/winpaths.py b/qt/aqt/winpaths.py index e53a47c06..8b2698739 100644 --- a/qt/aqt/winpaths.py +++ b/qt/aqt/winpaths.py @@ -100,7 +100,7 @@ _SHGetFolderPath.restype = _err_unless_zero def _get_path_buf(csidl): path_buf = ctypes.create_unicode_buffer(wintypes.MAX_PATH) - result = _SHGetFolderPath(0, csidl, 0, 0, path_buf) + _SHGetFolderPath(0, csidl, 0, 0, path_buf) return path_buf.value diff --git a/qt/hatch_build.py b/qt/hatch_build.py index fc716a57f..aaf345842 100644 --- a/qt/hatch_build.py +++ b/qt/hatch_build.py @@ -67,11 +67,16 @@ class CustomBuildHook(BuildHookInterface): def _should_exclude(self, path: Path) -> bool: """Check if a file should be excluded from the wheel.""" - # Match the exclusions from write_wheel.py exclude_aqt function + path_str = str(path) + + # Exclude __pycache__ + if "/__pycache__/" in path_str: + return True + if path.suffix in [".ui", ".scss", ".map", ".ts"]: return True if path.name.startswith("tsconfig"): return True - if "/aqt/data" in str(path): + if "/aqt/data" in path_str: return True return False diff --git a/qt/launcher/Cargo.toml b/qt/launcher/Cargo.toml index 45ca11e9b..32fb15991 100644 --- a/qt/launcher/Cargo.toml +++ b/qt/launcher/Cargo.toml @@ -11,10 +11,15 @@ rust-version.workspace = true anki_io.workspace = true anki_process.workspace = true anyhow.workspace = true +camino.workspace = true dirs.workspace = true +[target.'cfg(all(unix, not(target_os = "macos")))'.dependencies] +libc.workspace = true + [target.'cfg(windows)'.dependencies] -winapi.workspace = true +windows.workspace = true +widestring.workspace = true libc.workspace = true libc-stdhandle.workspace = true @@ -22,5 +27,9 @@ libc-stdhandle.workspace = true name = "build_win" path = "src/bin/build_win.rs" +[[bin]] +name = "anki-console" +path = "src/bin/anki_console.rs" + [target.'cfg(windows)'.build-dependencies] embed-resource.workspace = true diff --git a/qt/launcher/addon/__init__.py b/qt/launcher/addon/__init__.py new file mode 100644 index 000000000..63a2cc5a9 --- /dev/null +++ b/qt/launcher/addon/__init__.py @@ -0,0 +1,192 @@ +# Copyright: Ankitects Pty Ltd and contributors +# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +from __future__ import annotations + +import contextlib +import os +import subprocess +import sys +from pathlib import Path +from typing import Any + +from anki.utils import pointVersion +from aqt import mw +from aqt.qt import QAction +from aqt.utils import askUser, is_mac, is_win, showInfo + + +def launcher_executable() -> str | None: + """Return the path to the Anki launcher executable.""" + return os.getenv("ANKI_LAUNCHER") + + +def uv_binary() -> str | None: + """Return the path to the uv binary.""" + return os.environ.get("ANKI_LAUNCHER_UV") + + +def launcher_root() -> str | None: + """Return the path to the launcher root directory (AnkiProgramFiles).""" + return os.environ.get("UV_PROJECT") + + +def venv_binary(cmd: str) -> str | None: + """Return the path to a binary in the launcher's venv.""" + root = launcher_root() + if not root: + return None + + root_path = Path(root) + if is_win: + binary_path = root_path / ".venv" / "Scripts" / cmd + else: + binary_path = root_path / ".venv" / "bin" / cmd + + return str(binary_path) + + +def add_python_requirements(reqs: list[str]) -> tuple[bool, str]: + """Add Python requirements to the launcher venv using uv add. + + Returns (success, output)""" + + binary = uv_binary() + if not binary: + return (False, "Not in packaged build.") + + uv_cmd = [binary, "add"] + reqs + result = subprocess.run(uv_cmd, capture_output=True, text=True, check=False) + + if result.returncode == 0: + root = launcher_root() + if root: + sync_marker = Path(root) / ".sync_complete" + sync_marker.touch() + return (True, result.stdout) + else: + return (False, result.stderr) + + +def trigger_launcher_run() -> None: + """Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run.""" + try: + root = launcher_root() + if not root: + return + + pyproject_path = Path(root) / "pyproject.toml" + + if pyproject_path.exists(): + # Touch the file to update its mtime + pyproject_path.touch() + except Exception as e: + print(e) + + +def update_and_restart() -> None: + """Update and restart Anki using the launcher.""" + launcher = launcher_executable() + assert launcher + + trigger_launcher_run() + + with contextlib.suppress(ResourceWarning): + env = os.environ.copy() + creationflags = 0 + if sys.platform == "win32": + creationflags = ( + subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS + ) + subprocess.Popen( + [launcher], + start_new_session=True, + stdin=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + env=env, + creationflags=creationflags, + ) + + mw.app.quit() + + +def confirm_then_upgrade(): + if not askUser("Change to a different Anki version?"): + return + update_and_restart() + + +# return modified command array that points to bundled command, and return +# required environment +def _packagedCmd(cmd: list[str]) -> tuple[Any, dict[str, str]]: + cmd = cmd[:] + env = os.environ.copy() + # keep LD_LIBRARY_PATH when in snap environment + if "LD_LIBRARY_PATH" in env and "SNAP" not in env: + del env["LD_LIBRARY_PATH"] + + # Try to find binary in anki-audio package for Windows/Mac + if is_win or is_mac: + try: + import anki_audio + + audio_pkg_path = Path(anki_audio.__file__).parent + if is_win: + packaged_path = audio_pkg_path / (cmd[0] + ".exe") + else: # is_mac + packaged_path = audio_pkg_path / cmd[0] + + if packaged_path.exists(): + cmd[0] = str(packaged_path) + return cmd, env + except ImportError: + # anki-audio not available, fall back to old behavior + pass + + packaged_path = Path(sys.prefix) / cmd[0] + if packaged_path.exists(): + cmd[0] = str(packaged_path) + + return cmd, env + + +def on_addon_config(): + showInfo( + "This add-on is automatically added when installing older Anki versions, so that they work with the launcher. You can remove it if you wish." + ) + + +def setup(): + mw.addonManager.setConfigAction(__name__, on_addon_config) + + if pointVersion() >= 250600: + return + if not launcher_executable(): + return + + # Add action to tools menu + action = QAction("Upgrade/Downgrade", mw) + action.triggered.connect(confirm_then_upgrade) + mw.form.menuTools.addAction(action) + + # Monkey-patch audio tools to use anki-audio + if is_win or is_mac: + import aqt + import aqt.sound + + aqt.sound._packagedCmd = _packagedCmd + + # Inject launcher functions into launcher module + import aqt.package + + aqt.package.launcher_executable = launcher_executable + aqt.package.update_and_restart = update_and_restart + aqt.package.trigger_launcher_run = trigger_launcher_run + aqt.package.uv_binary = uv_binary + aqt.package.launcher_root = launcher_root + aqt.package.venv_binary = venv_binary + aqt.package.add_python_requirements = add_python_requirements + + +setup() diff --git a/qt/launcher/addon/manifest.json b/qt/launcher/addon/manifest.json new file mode 100644 index 000000000..b4f08e70d --- /dev/null +++ b/qt/launcher/addon/manifest.json @@ -0,0 +1,6 @@ +{ + "name": "Anki Launcher", + "package": "anki-launcher", + "min_point_version": 50, + "max_point_version": 250600 +} diff --git a/qt/launcher/mac/Info.plist b/qt/launcher/mac/Info.plist index 59b67605f..ac0ab2f09 100644 --- a/qt/launcher/mac/Info.plist +++ b/qt/launcher/mac/Info.plist @@ -7,7 +7,9 @@ CFBundleShortVersionString 1.0 LSMinimumSystemVersion - 11 + 12 + LSApplicationCategoryType + public.app-category.education CFBundleDocumentTypes diff --git a/qt/launcher/pyproject.toml b/qt/launcher/pyproject.toml index 2a45626c7..cc521b432 100644 --- a/qt/launcher/pyproject.toml +++ b/qt/launcher/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "anki-launcher" -version = "0.1.0" +version = "1.0.0" description = "UV-based launcher for Anki." requires-python = ">=3.9" dependencies = [ diff --git a/qt/launcher/src/bin/anki_console.rs b/qt/launcher/src/bin/anki_console.rs new file mode 100644 index 000000000..181db920f --- /dev/null +++ b/qt/launcher/src/bin/anki_console.rs @@ -0,0 +1,58 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +#![windows_subsystem = "console"] + +use std::env; +use std::io::stdin; +use std::process::Command; + +use anyhow::Context; +use anyhow::Result; + +fn main() { + if let Err(e) = run() { + eprintln!("Error: {e:#}"); + std::process::exit(1); + } +} + +fn run() -> Result<()> { + let current_exe = env::current_exe().context("Failed to get current executable path")?; + let exe_dir = current_exe + .parent() + .context("Failed to get executable directory")?; + + let anki_exe = exe_dir.join("anki.exe"); + + if !anki_exe.exists() { + anyhow::bail!("anki.exe not found in the same directory"); + } + + // Forward all command line arguments to anki.exe + let args: Vec = env::args().skip(1).collect(); + + let mut cmd = Command::new(&anki_exe); + cmd.args(&args); + + if std::env::var("ANKI_IMPLICIT_CONSOLE").is_err() { + // if directly invoked by the user, signal the launcher that the + // user wants a Python console + std::env::set_var("ANKI_CONSOLE", "1"); + } + + // Wait for the process to complete and forward its exit code + let status = cmd.status().context("Failed to execute anki.exe")?; + if !status.success() { + println!("\nPress enter to close."); + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + } + + if let Some(code) = status.code() { + std::process::exit(code); + } else { + // Process was terminated by a signal + std::process::exit(1); + } +} diff --git a/qt/launcher/src/bin/build_win.rs b/qt/launcher/src/bin/build_win.rs index ff385d9ea..fc9082bf2 100644 --- a/qt/launcher/src/bin/build_win.rs +++ b/qt/launcher/src/bin/build_win.rs @@ -114,6 +114,12 @@ fn copy_files(output_dir: &Path) -> Result<()> { let launcher_dst = output_dir.join("anki.exe"); copy_file(&launcher_src, &launcher_dst)?; + // Copy anki-console binary + let console_src = + PathBuf::from(CARGO_TARGET_DIR).join("x86_64-pc-windows-msvc/release/anki-console.exe"); + let console_dst = output_dir.join("anki-console.exe"); + copy_file(&console_src, &console_dst)?; + // Copy uv.exe and uvw.exe let uv_src = PathBuf::from("../../../out/extracted/uv/uv.exe"); let uv_dst = output_dir.join("uv.exe"); @@ -133,14 +139,12 @@ fn copy_files(output_dir: &Path) -> Result<()> { output_dir.join(".python-version"), )?; - // Copy anki-console.bat - copy_file("anki-console.bat", output_dir.join("anki-console.bat"))?; - Ok(()) } fn sign_binaries(output_dir: &Path) -> Result<()> { sign_file(&output_dir.join("anki.exe"))?; + sign_file(&output_dir.join("anki-console.exe"))?; sign_file(&output_dir.join("uv.exe"))?; Ok(()) } @@ -217,7 +221,7 @@ fn generate_install_manifest(output_dir: &Path) -> Result<()> { // Convert to Windows-style backslashes for NSIS let windows_path = relative_path.display().to_string().replace('/', "\\"); // Use Windows line endings (\r\n) as expected by NSIS - manifest_content.push_str(&format!("{}\r\n", windows_path)); + manifest_content.push_str(&format!("{windows_path}\r\n")); } } } diff --git a/qt/launcher/src/main.rs b/qt/launcher/src/main.rs index 2ad3ac00c..5c06aadf7 100644 --- a/qt/launcher/src/main.rs +++ b/qt/launcher/src/main.rs @@ -10,27 +10,45 @@ use std::process::Command; use std::time::SystemTime; use std::time::UNIX_EPOCH; +use anki_io::copy_file; use anki_io::copy_if_newer; use anki_io::create_dir_all; use anki_io::modified_time; use anki_io::read_file; use anki_io::remove_file; use anki_io::write_file; -use anki_process::CommandExt; +use anki_io::ToUtf8Path; +use anki_process::CommandExt as AnkiCommandExt; use anyhow::Context; use anyhow::Result; +use crate::platform::ensure_os_supported; use crate::platform::ensure_terminal_shown; -use crate::platform::exec_anki; -use crate::platform::get_anki_binary_path; use crate::platform::get_exe_and_resources_dirs; use crate::platform::get_uv_binary_name; -use crate::platform::handle_first_launch; -use crate::platform::initial_terminal_setup; -use crate::platform::launch_anki_detached; +use crate::platform::launch_anki_normally; +use crate::platform::respawn_launcher; mod platform; +struct State { + current_version: Option, + prerelease_marker: std::path::PathBuf, + uv_install_root: std::path::PathBuf, + uv_cache_dir: std::path::PathBuf, + no_cache_marker: std::path::PathBuf, + anki_base_folder: std::path::PathBuf, + uv_path: std::path::PathBuf, + uv_python_install_dir: std::path::PathBuf, + user_pyproject_path: std::path::PathBuf, + user_python_version_path: std::path::PathBuf, + dist_pyproject_path: std::path::PathBuf, + dist_python_version_path: std::path::PathBuf, + uv_lock_path: std::path::PathBuf, + sync_complete_marker: std::path::PathBuf, + previous_version: Option, +} + #[derive(Debug, Clone)] pub enum VersionKind { PyOxidizer(String), @@ -43,20 +61,14 @@ pub enum MainMenuChoice { KeepExisting, Version(VersionKind), ToggleBetas, + ToggleCache, + Uninstall, Quit, } -#[derive(Debug, Clone, Default)] -pub struct Config { - pub show_console: bool, -} - fn main() { if let Err(e) = run() { - let mut config: Config = Config::default(); - initial_terminal_setup(&mut config); - - eprintln!("Error: {:#}", e); + eprintln!("Error: {e:#}"); eprintln!("Press enter to close..."); let mut input = String::new(); let _ = stdin().read_line(&mut input); @@ -66,58 +78,154 @@ fn main() { } fn run() -> Result<()> { - let mut config: Config = Config::default(); - let uv_install_root = dirs::data_local_dir() .context("Unable to determine data_dir")? .join("AnkiProgramFiles"); - let sync_complete_marker = uv_install_root.join(".sync_complete"); - let prerelease_marker = uv_install_root.join("prerelease"); let (exe_dir, resources_dir) = get_exe_and_resources_dirs()?; - let dist_pyproject_path = resources_dir.join("pyproject.toml"); - let user_pyproject_path = uv_install_root.join("pyproject.toml"); - let dist_python_version_path = resources_dir.join(".python-version"); - let user_python_version_path = uv_install_root.join(".python-version"); - let uv_lock_path = uv_install_root.join("uv.lock"); - let uv_path: std::path::PathBuf = exe_dir.join(get_uv_binary_name()); - // Create install directory and copy project files in - create_dir_all(&uv_install_root)?; - let had_user_pyproj = user_pyproject_path.exists(); - if !had_user_pyproj { - // during initial launcher testing, enable betas by default - write_file(&prerelease_marker, "")?; + let mut state = State { + current_version: None, + prerelease_marker: uv_install_root.join("prerelease"), + uv_install_root: uv_install_root.clone(), + uv_cache_dir: uv_install_root.join("cache"), + no_cache_marker: uv_install_root.join("nocache"), + anki_base_folder: get_anki_base_path()?, + uv_path: exe_dir.join(get_uv_binary_name()), + uv_python_install_dir: uv_install_root.join("python"), + user_pyproject_path: uv_install_root.join("pyproject.toml"), + user_python_version_path: uv_install_root.join(".python-version"), + dist_pyproject_path: resources_dir.join("pyproject.toml"), + dist_python_version_path: resources_dir.join(".python-version"), + uv_lock_path: uv_install_root.join("uv.lock"), + sync_complete_marker: uv_install_root.join(".sync_complete"), + previous_version: None, + }; + + // Check for uninstall request from Windows uninstaller + if std::env::var("ANKI_LAUNCHER_UNINSTALL").is_ok() { + ensure_terminal_shown()?; + handle_uninstall(&state)?; + return Ok(()); } - copy_if_newer(&dist_pyproject_path, &user_pyproject_path)?; - copy_if_newer(&dist_python_version_path, &user_python_version_path)?; + // Create install directory and copy project files in + create_dir_all(&state.uv_install_root)?; - let pyproject_has_changed = !sync_complete_marker.exists() || { - let pyproject_toml_time = modified_time(&user_pyproject_path)?; - let sync_complete_time = modified_time(&sync_complete_marker)?; + copy_if_newer(&state.dist_pyproject_path, &state.user_pyproject_path)?; + copy_if_newer( + &state.dist_python_version_path, + &state.user_python_version_path, + )?; + + let pyproject_has_changed = !state.sync_complete_marker.exists() || { + let pyproject_toml_time = modified_time(&state.user_pyproject_path)?; + let sync_complete_time = modified_time(&state.sync_complete_marker)?; Ok::(pyproject_toml_time > sync_complete_time) } .unwrap_or(true); if !pyproject_has_changed { - // If venv is already up to date, exec as normal - initial_terminal_setup(&mut config); - let anki_bin = get_anki_binary_path(&uv_install_root); - exec_anki(&anki_bin, &config)?; + // If venv is already up to date, launch Anki normally + let args: Vec = std::env::args().skip(1).collect(); + let cmd = build_python_command(&state, &args)?; + launch_anki_normally(cmd)?; return Ok(()); } - // we'll need to launch uv; reinvoke ourselves in a terminal so the user can see + // If we weren't in a terminal, respawn ourselves in one ensure_terminal_shown()?; + print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top println!("\x1B[1mAnki Launcher\x1B[0m\n"); - // Check if there's an existing installation before removing marker - let has_existing_install = sync_complete_marker.exists(); + ensure_os_supported()?; + check_versions(&mut state); + + main_menu_loop(&state)?; + + // Write marker file to indicate we've completed the sync process + write_sync_marker(&state.sync_complete_marker)?; + + #[cfg(target_os = "macos")] + { + let cmd = build_python_command(&state, &[])?; + platform::mac::prepare_for_launch_after_update(cmd, &uv_install_root)?; + } + + if cfg!(unix) && !cfg!(target_os = "macos") { + println!("\nPress enter to start Anki."); + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + } else { + // on Windows/macOS, the user needs to close the terminal/console + // currently, but ideas on how we can avoid this would be good! + println!(); + println!("Anki will start shortly."); + println!("\x1B[1mYou can close this window.\x1B[0m\n"); + } + + // respawn the launcher as a disconnected subprocess for normal startup + respawn_launcher()?; + + Ok(()) +} + +fn extract_aqt_version( + uv_path: &std::path::Path, + uv_install_root: &std::path::Path, +) -> Option { + let output = Command::new(uv_path) + .current_dir(uv_install_root) + .args(["pip", "show", "aqt"]) + .output() + .ok()?; + + if !output.status.success() { + return None; + } + + let stdout = String::from_utf8(output.stdout).ok()?; + for line in stdout.lines() { + if let Some(version) = line.strip_prefix("Version: ") { + return Some(version.trim().to_string()); + } + } + None +} + +fn check_versions(state: &mut State) { + // If sync_complete_marker is missing, do nothing + if !state.sync_complete_marker.exists() { + return; + } + + // Determine current version by invoking uv pip show aqt + match extract_aqt_version(&state.uv_path, &state.uv_install_root) { + Some(version) => { + state.current_version = Some(version); + } + None => { + println!("Warning: Could not determine current Anki version"); + } + } + + // Read previous version from "previous-version" file + let previous_version_path = state.uv_install_root.join("previous-version"); + if let Ok(content) = read_file(&previous_version_path) { + if let Ok(version_str) = String::from_utf8(content) { + let version = version_str.trim().to_string(); + if !version.is_empty() { + state.previous_version = Some(version); + } + } + } +} + +fn main_menu_loop(state: &State) -> Result<()> { loop { - let menu_choice = get_main_menu_choice(has_existing_install, &prerelease_marker); + let menu_choice = get_main_menu_choice(state); match menu_choice { MainMenuChoice::Quit => std::process::exit(0), @@ -127,63 +235,131 @@ fn run() -> Result<()> { } MainMenuChoice::ToggleBetas => { // Toggle beta prerelease file - if prerelease_marker.exists() { - let _ = remove_file(&prerelease_marker); + if state.prerelease_marker.exists() { + let _ = remove_file(&state.prerelease_marker); println!("Beta releases disabled."); } else { - write_file(&prerelease_marker, "")?; + write_file(&state.prerelease_marker, "")?; println!("Beta releases enabled."); } println!(); continue; } - _ => { + MainMenuChoice::ToggleCache => { + // Toggle cache disable file + if state.no_cache_marker.exists() { + let _ = remove_file(&state.no_cache_marker); + println!("Download caching enabled."); + } else { + write_file(&state.no_cache_marker, "")?; + // Delete the cache directory and everything in it + if state.uv_cache_dir.exists() { + let _ = anki_io::remove_dir_all(&state.uv_cache_dir); + } + println!("Download caching disabled and cache cleared."); + } + println!(); + continue; + } + MainMenuChoice::Uninstall => { + if handle_uninstall(state)? { + std::process::exit(0); + } + continue; + } + choice @ (MainMenuChoice::Latest | MainMenuChoice::Version(_)) => { // For other choices, update project files and sync update_pyproject_for_version( - menu_choice.clone(), - dist_pyproject_path.clone(), - user_pyproject_path.clone(), - dist_python_version_path.clone(), - user_python_version_path.clone(), + choice.clone(), + state.dist_pyproject_path.clone(), + state.user_pyproject_path.clone(), + state.dist_python_version_path.clone(), + state.user_python_version_path.clone(), )?; + // Extract current version before syncing (but don't write to file yet) + let previous_version_to_save = + extract_aqt_version(&state.uv_path, &state.uv_install_root); + // Remove sync marker before attempting sync - let _ = remove_file(&sync_complete_marker); - - // Sync the venv - let mut command = Command::new(&uv_path); - command.current_dir(&uv_install_root).args([ - "sync", - "--upgrade", - "--managed-python", - ]); - - // Add python version if .python-version file exists - if user_python_version_path.exists() { - let python_version = read_file(&user_python_version_path)?; - let python_version_str = String::from_utf8(python_version) - .context("Invalid UTF-8 in .python-version")?; - let python_version_trimmed = python_version_str.trim(); - command.args(["--python", python_version_trimmed]); - } - - // Set UV_PRERELEASE=allow if beta mode is enabled - if prerelease_marker.exists() { - command.env("UV_PRERELEASE", "allow"); - } + let _ = remove_file(&state.sync_complete_marker); println!("\x1B[1mUpdating Anki...\x1B[0m\n"); + let python_version_trimmed = if state.user_python_version_path.exists() { + let python_version = read_file(&state.user_python_version_path)?; + let python_version_str = String::from_utf8(python_version) + .context("Invalid UTF-8 in .python-version")?; + Some(python_version_str.trim().to_string()) + } else { + None + }; + + // `uv sync` does not pull in Python automatically, unlike `uv run`. + // This might be system/platform specific and/or a uv bug. + let mut command = Command::new(&state.uv_path); + command + .current_dir(&state.uv_install_root) + .env("UV_CACHE_DIR", &state.uv_cache_dir) + .env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir) + .args(["python", "install", "--managed-python"]); + + // Add python version if .python-version file exists + if let Some(version) = &python_version_trimmed { + command.args([version]); + } + + if let Err(e) = command.ensure_success() { + println!("Python install failed: {e:#}"); + println!(); + continue; + } + + // Sync the venv + let mut command = Command::new(&state.uv_path); + command + .current_dir(&state.uv_install_root) + .env("UV_CACHE_DIR", &state.uv_cache_dir) + .env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir) + .args(["sync", "--upgrade", "--managed-python"]); + + // Add python version if .python-version file exists + if let Some(version) = &python_version_trimmed { + command.args(["--python", version]); + } + + // Set UV_PRERELEASE=allow if beta mode is enabled + if state.prerelease_marker.exists() { + command.env("UV_PRERELEASE", "allow"); + } + + if state.no_cache_marker.exists() { + command.env("UV_NO_CACHE", "1"); + } + match command.ensure_success() { Ok(_) => { - // Sync succeeded, break out of loop + // Sync succeeded + if matches!(&choice, MainMenuChoice::Version(VersionKind::PyOxidizer(_))) { + inject_helper_addon(&state.uv_install_root)?; + } + + // Now that sync succeeded, save the previous version + if let Some(current_version) = previous_version_to_save { + let previous_version_path = + state.uv_install_root.join("previous-version"); + if let Err(e) = write_file(&previous_version_path, ¤t_version) { + println!("Warning: Could not save previous version: {e}"); + } + } + break; } Err(e) => { // If sync fails due to things like a missing wheel on pypi, // we need to remove the lockfile or uv will cache the bad result. - let _ = remove_file(&uv_lock_path); - println!("Install failed: {:#}", e); + let _ = remove_file(&state.uv_lock_path); + println!("Install failed: {e:#}"); println!(); continue; } @@ -191,22 +367,6 @@ fn run() -> Result<()> { } } } - - // Write marker file to indicate we've completed the sync process - write_sync_marker(&sync_complete_marker)?; - - // First launch - let anki_bin = get_anki_binary_path(&uv_install_root); - handle_first_launch(&anki_bin)?; - - println!("\nPress enter to start Anki."); - - let mut input = String::new(); - let _ = stdin().read_line(&mut input); - - // Then launch the binary as detached subprocess so the terminal can close - launch_anki_detached(&anki_bin, &config)?; - Ok(()) } @@ -219,24 +379,33 @@ fn write_sync_marker(sync_complete_marker: &std::path::Path) -> Result<()> { Ok(()) } -fn get_main_menu_choice( - has_existing_install: bool, - prerelease_marker: &std::path::Path, -) -> MainMenuChoice { +fn get_main_menu_choice(state: &State) -> MainMenuChoice { loop { println!("1) Latest Anki (just press enter)"); println!("2) Choose a version"); - if has_existing_install { - println!("3) Keep existing version"); + if let Some(current_version) = &state.current_version { + println!("3) Keep existing version ({current_version})"); + } + if let Some(prev_version) = &state.previous_version { + if state.current_version.as_ref() != Some(prev_version) { + println!("4) Revert to previous version ({prev_version})"); + } } println!(); - let betas_enabled = prerelease_marker.exists(); + let betas_enabled = state.prerelease_marker.exists(); println!( - "4) Allow betas: {}", + "5) Allow betas: {}", if betas_enabled { "on" } else { "off" } ); - println!("5) Quit"); + let cache_enabled = !state.no_cache_marker.exists(); + println!( + "6) Cache downloads: {}", + if cache_enabled { "on" } else { "off" } + ); + println!(); + println!("7) Uninstall"); + println!("8) Quit"); print!("> "); let _ = stdout().flush(); @@ -250,15 +419,28 @@ fn get_main_menu_choice( "" | "1" => MainMenuChoice::Latest, "2" => MainMenuChoice::Version(get_version_kind()), "3" => { - if has_existing_install { + if state.current_version.is_some() { MainMenuChoice::KeepExisting } else { println!("Invalid input. Please try again.\n"); continue; } } - "4" => MainMenuChoice::ToggleBetas, - "5" => MainMenuChoice::Quit, + "4" => { + if let Some(prev_version) = &state.previous_version { + if state.current_version.as_ref() != Some(prev_version) { + if let Some(version_kind) = parse_version_kind(prev_version) { + return MainMenuChoice::Version(version_kind); + } + } + } + println!("Invalid input. Please try again.\n"); + continue; + } + "5" => MainMenuChoice::ToggleBetas, + "6" => MainMenuChoice::ToggleCache, + "7" => MainMenuChoice::Uninstall, + "8" => MainMenuChoice::Quit, _ => { println!("Invalid input. Please try again."); continue; @@ -288,7 +470,7 @@ fn get_version_kind() -> VersionKind { return version_kind; } None => { - println!("Invalid version format. Please enter a version like 24.10 or 25.06.1 (minimum 2.1.50)"); + println!("Invalid version format. Please enter a version like 25.07.1 or 24.11 (minimum 2.1.50)"); continue; } } @@ -313,8 +495,13 @@ fn update_pyproject_for_version( // Do nothing - keep existing pyproject.toml and .python-version } MainMenuChoice::ToggleBetas => { - // This should not be reached as ToggleBetas is handled in the loop - unreachable!("ToggleBetas should be handled in the main loop"); + unreachable!(); + } + MainMenuChoice::ToggleCache => { + unreachable!(); + } + MainMenuChoice::Uninstall => { + unreachable!(); } MainMenuChoice::Version(version_kind) => { let content = read_file(&dist_pyproject_path)?; @@ -328,6 +515,7 @@ fn update_pyproject_for_version( &format!( concat!( "aqt[qt6]=={}\",\n", + " \"anki-audio==0.1.0; sys.platform == 'win32' or sys.platform == 'darwin'\",\n", " \"pyqt6==6.6.1\",\n", " \"pyqt6-qt6==6.6.2\",\n", " \"pyqt6-webengine==6.6.0\",\n", @@ -339,7 +527,7 @@ fn update_pyproject_for_version( ) } VersionKind::Uv(version) => { - content_str.replace("anki-release", &format!("anki-release=={}", version)) + content_str.replace("anki-release", &format!("anki-release=={version}")) } }; write_file(&user_pyproject_path, &updated_content)?; @@ -350,7 +538,7 @@ fn update_pyproject_for_version( write_file(&user_python_version_path, "3.9")?; } VersionKind::Uv(_) => { - copy_if_newer(&dist_python_version_path, &user_python_version_path)?; + copy_file(&dist_python_version_path, &user_python_version_path)?; } } } @@ -403,3 +591,138 @@ fn parse_version_kind(version: &str) -> Option { Some(VersionKind::Uv(version.to_string())) } } + +fn inject_helper_addon(_uv_install_root: &std::path::Path) -> Result<()> { + let addons21_path = get_anki_addons21_path()?; + + if !addons21_path.exists() { + return Ok(()); + } + + let addon_folder = addons21_path.join("anki-launcher"); + + // Remove existing anki-launcher folder if it exists + if addon_folder.exists() { + anki_io::remove_dir_all(&addon_folder)?; + } + + // Create the anki-launcher folder + create_dir_all(&addon_folder)?; + + // Write the embedded files + let init_py_content = include_str!("../addon/__init__.py"); + let manifest_json_content = include_str!("../addon/manifest.json"); + + write_file(addon_folder.join("__init__.py"), init_py_content)?; + write_file(addon_folder.join("manifest.json"), manifest_json_content)?; + + Ok(()) +} + +fn get_anki_base_path() -> Result { + let anki_base_path = if cfg!(target_os = "windows") { + // Windows: %APPDATA%\Anki2 + dirs::config_dir() + .context("Unable to determine config directory")? + .join("Anki2") + } else if cfg!(target_os = "macos") { + // macOS: ~/Library/Application Support/Anki2 + dirs::data_dir() + .context("Unable to determine data directory")? + .join("Anki2") + } else { + // Linux: ~/.local/share/Anki2 + dirs::data_dir() + .context("Unable to determine data directory")? + .join("Anki2") + }; + + Ok(anki_base_path) +} + +fn get_anki_addons21_path() -> Result { + Ok(get_anki_base_path()?.join("addons21")) +} + +fn handle_uninstall(state: &State) -> Result { + println!("Uninstall Anki's program files? (y/n)"); + print!("> "); + let _ = stdout().flush(); + + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + let input = input.trim().to_lowercase(); + + if input != "y" { + println!("Uninstall cancelled."); + println!(); + return Ok(false); + } + + // Remove program files + if state.uv_install_root.exists() { + anki_io::remove_dir_all(&state.uv_install_root)?; + println!("Program files removed."); + } + + println!(); + println!("Remove all profiles/cards? (y/n)"); + print!("> "); + let _ = stdout().flush(); + + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + let input = input.trim().to_lowercase(); + + if input == "y" && state.anki_base_folder.exists() { + anki_io::remove_dir_all(&state.anki_base_folder)?; + println!("User data removed."); + } + + println!(); + + // Platform-specific messages + #[cfg(target_os = "macos")] + platform::mac::finalize_uninstall(); + + #[cfg(target_os = "windows")] + platform::windows::finalize_uninstall(); + + #[cfg(all(unix, not(target_os = "macos")))] + platform::unix::finalize_uninstall(); + + Ok(true) +} + +fn build_python_command(state: &State, args: &[String]) -> Result { + let python_exe = if cfg!(target_os = "windows") { + let show_console = std::env::var("ANKI_CONSOLE").is_ok(); + if show_console { + state.uv_install_root.join(".venv/Scripts/python.exe") + } else { + state.uv_install_root.join(".venv/Scripts/pythonw.exe") + } + } else { + state.uv_install_root.join(".venv/bin/python") + }; + + let mut cmd = Command::new(&python_exe); + cmd.args(["-c", "import aqt, sys; sys.argv[0] = 'Anki'; aqt.run()"]); + cmd.args(args); + // tell the Python code it was invoked by the launcher, and updating is + // available + cmd.env("ANKI_LAUNCHER", std::env::current_exe()?.utf8()?.as_str()); + + // Set UV and Python paths for the Python code + let (exe_dir, _) = get_exe_and_resources_dirs()?; + let uv_path = exe_dir.join(get_uv_binary_name()); + cmd.env("ANKI_LAUNCHER_UV", uv_path.utf8()?.as_str()); + cmd.env("UV_PROJECT", state.uv_install_root.utf8()?.as_str()); + + // Set UV_PRERELEASE=allow if beta mode is enabled + if state.prerelease_marker.exists() { + cmd.env("UV_PRERELEASE", "allow"); + } + + Ok(cmd) +} diff --git a/qt/launcher/src/platform/mac.rs b/qt/launcher/src/platform/mac.rs index 2369f538a..f97d7fd07 100644 --- a/qt/launcher/src/platform/mac.rs +++ b/qt/launcher/src/platform/mac.rs @@ -1,7 +1,9 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use std::os::unix::process::CommandExt; +use std::io; +use std::io::Write; +use std::path::Path; use std::process::Command; use std::sync::atomic::AtomicBool; use std::sync::atomic::Ordering; @@ -13,45 +15,7 @@ use anki_process::CommandExt as AnkiCommandExt; use anyhow::Context; use anyhow::Result; -// Re-export Unix functions that macOS uses -pub use super::unix::{ - ensure_terminal_shown, - exec_anki, - get_anki_binary_path, - initial_terminal_setup, -}; - -pub fn launch_anki_detached(anki_bin: &std::path::Path, _config: &crate::Config) -> Result<()> { - use std::process::Stdio; - - let child = Command::new(anki_bin) - .stdin(Stdio::null()) - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .process_group(0) - .ensure_spawn()?; - std::mem::forget(child); - - println!("Anki will start shortly."); - println!("\x1B[1mYou can close this window.\x1B[0m\n"); - Ok(()) -} - -pub fn relaunch_in_terminal() -> Result<()> { - let current_exe = std::env::current_exe().context("Failed to get current executable path")?; - Command::new("open") - .args(["-a", "Terminal"]) - .arg(current_exe) - .ensure_spawn()?; - std::process::exit(0); -} - -pub fn handle_first_launch(anki_bin: &std::path::Path) -> Result<()> { - use std::io::Write; - use std::io::{ - self, - }; - +pub fn prepare_for_launch_after_update(mut cmd: Command, root: &Path) -> Result<()> { // Pre-validate by running --version to trigger any Gatekeeper checks print!("\n\x1B[1mThis may take a few minutes. Please wait\x1B[0m"); io::stdout().flush().unwrap(); @@ -67,13 +31,27 @@ pub fn handle_first_launch(anki_bin: &std::path::Path) -> Result<()> { } }); - let _ = Command::new(anki_bin) + let _ = cmd .env("ANKI_FIRST_RUN", "1") .arg("--version") .stdout(std::process::Stdio::null()) .stderr(std::process::Stdio::null()) .ensure_success(); + if cfg!(target_os = "macos") { + // older Anki versions had a short mpv timeout and didn't support + // ANKI_FIRST_RUN, so we need to ensure mpv passes Gatekeeper + // validation prior to launch + let mpv_path = root.join(".venv/lib/python3.9/site-packages/anki_audio/mpv"); + if mpv_path.exists() { + let _ = Command::new(&mpv_path) + .arg("--version") + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .ensure_success(); + } + } + // Stop progress indicator running.store(false, Ordering::Relaxed); progress_thread.join().unwrap(); @@ -81,22 +59,40 @@ pub fn handle_first_launch(anki_bin: &std::path::Path) -> Result<()> { Ok(()) } -pub fn get_exe_and_resources_dirs() -> Result<(std::path::PathBuf, std::path::PathBuf)> { - let exe_dir = std::env::current_exe() - .context("Failed to get current executable path")? - .parent() - .context("Failed to get executable directory")? - .to_owned(); - - let resources_dir = exe_dir - .parent() - .context("Failed to get parent directory")? - .join("Resources"); - - Ok((exe_dir, resources_dir)) +pub fn relaunch_in_terminal() -> Result<()> { + let current_exe = std::env::current_exe().context("Failed to get current executable path")?; + Command::new("open") + .args(["-a", "Terminal"]) + .arg(current_exe) + .ensure_spawn()?; + std::process::exit(0); } -pub fn get_uv_binary_name() -> &'static str { - // macOS uses standard uv binary name - "uv" +pub fn finalize_uninstall() { + if let Ok(exe_path) = std::env::current_exe() { + // Find the .app bundle by walking up the directory tree + let mut app_bundle_path = exe_path.as_path(); + while let Some(parent) = app_bundle_path.parent() { + if let Some(name) = parent.file_name() { + if name.to_string_lossy().ends_with(".app") { + let result = Command::new("trash").arg(parent).output(); + + match result { + Ok(output) if output.status.success() => { + println!("Anki has been uninstalled."); + return; + } + _ => { + // Fall back to manual instructions + println!( + "Please manually drag Anki.app to the trash to complete uninstall." + ); + } + } + return; + } + } + app_bundle_path = parent; + } + } } diff --git a/qt/launcher/src/platform/mod.rs b/qt/launcher/src/platform/mod.rs index bb7208abe..50a303656 100644 --- a/qt/launcher/src/platform/mod.rs +++ b/qt/launcher/src/platform/mod.rs @@ -1,18 +1,137 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -#[cfg(unix)] -mod unix; - -#[cfg(target_os = "macos")] -mod mac; - -#[cfg(target_os = "windows")] -mod windows; - -#[cfg(target_os = "macos")] -pub use mac::*; #[cfg(all(unix, not(target_os = "macos")))] -pub use unix::*; +pub mod unix; + +#[cfg(target_os = "macos")] +pub mod mac; + #[cfg(target_os = "windows")] -pub use windows::*; +pub mod windows; + +use std::path::PathBuf; + +use anki_process::CommandExt; +use anyhow::Context; +use anyhow::Result; + +pub fn get_exe_and_resources_dirs() -> Result<(PathBuf, PathBuf)> { + let exe_dir = std::env::current_exe() + .context("Failed to get current executable path")? + .parent() + .context("Failed to get executable directory")? + .to_owned(); + + let resources_dir = if cfg!(target_os = "macos") { + // On macOS, resources are in ../Resources relative to the executable + exe_dir + .parent() + .context("Failed to get parent directory")? + .join("Resources") + } else { + // On other platforms, resources are in the same directory as executable + exe_dir.clone() + }; + + Ok((exe_dir, resources_dir)) +} + +pub fn get_uv_binary_name() -> &'static str { + if cfg!(target_os = "windows") { + "uv.exe" + } else if cfg!(target_os = "macos") { + "uv" + } else if cfg!(target_arch = "x86_64") { + "uv.amd64" + } else { + "uv.arm64" + } +} + +pub fn respawn_launcher() -> Result<()> { + use std::process::Stdio; + + let mut launcher_cmd = if cfg!(target_os = "macos") { + // On macOS, we need to launch the .app bundle, not the executable directly + let current_exe = + std::env::current_exe().context("Failed to get current executable path")?; + + // Navigate from Contents/MacOS/launcher to the .app bundle + let app_bundle = current_exe + .parent() // MacOS + .and_then(|p| p.parent()) // Contents + .and_then(|p| p.parent()) // .app + .context("Failed to find .app bundle")?; + + let mut cmd = std::process::Command::new("open"); + cmd.arg(app_bundle); + cmd + } else { + let current_exe = + std::env::current_exe().context("Failed to get current executable path")?; + std::process::Command::new(current_exe) + }; + + launcher_cmd + .stdin(Stdio::null()) + .stdout(Stdio::null()) + .stderr(Stdio::null()); + + #[cfg(windows)] + { + use std::os::windows::process::CommandExt; + const CREATE_NEW_PROCESS_GROUP: u32 = 0x00000200; + const DETACHED_PROCESS: u32 = 0x00000008; + launcher_cmd.creation_flags(CREATE_NEW_PROCESS_GROUP | DETACHED_PROCESS); + } + + #[cfg(all(unix, not(target_os = "macos")))] + { + use std::os::unix::process::CommandExt; + launcher_cmd.process_group(0); + } + + let child = launcher_cmd.ensure_spawn()?; + std::mem::forget(child); + + Ok(()) +} + +pub fn launch_anki_normally(mut cmd: std::process::Command) -> Result<()> { + #[cfg(windows)] + { + crate::platform::windows::prepare_to_launch_normally(); + cmd.ensure_success()?; + } + #[cfg(unix)] + cmd.ensure_exec()?; + Ok(()) +} + +#[cfg(windows)] +pub use windows::ensure_terminal_shown; + +#[cfg(unix)] +pub fn ensure_terminal_shown() -> Result<()> { + use std::io::IsTerminal; + + let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout()); + if !stdout_is_terminal { + #[cfg(target_os = "macos")] + mac::relaunch_in_terminal()?; + #[cfg(not(target_os = "macos"))] + unix::relaunch_in_terminal()?; + } + + // Set terminal title to "Anki Launcher" + print!("\x1b]2;Anki Launcher\x07"); + Ok(()) +} + +pub fn ensure_os_supported() -> Result<()> { + #[cfg(all(unix, not(target_os = "macos")))] + unix::ensure_glibc_supported()?; + + Ok(()) +} diff --git a/qt/launcher/src/platform/unix.rs b/qt/launcher/src/platform/unix.rs index 324bf5aa3..0df33838f 100644 --- a/qt/launcher/src/platform/unix.rs +++ b/qt/launcher/src/platform/unix.rs @@ -1,36 +1,11 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -#![allow(dead_code)] - -use std::io::IsTerminal; -use std::path::PathBuf; use std::process::Command; -use anki_process::CommandExt as AnkiCommandExt; use anyhow::Context; use anyhow::Result; -use crate::Config; - -pub fn initial_terminal_setup(_config: &mut Config) { - // No special terminal setup needed on Unix -} - -pub fn ensure_terminal_shown() -> Result<()> { - let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout()); - if !stdout_is_terminal { - // If launched from GUI, try to relaunch in a terminal - crate::platform::relaunch_in_terminal()?; - } - - // Set terminal title to "Anki Launcher" - print!("\x1b]2;Anki Launcher\x07"); - - Ok(()) -} - -#[cfg(not(target_os = "macos"))] pub fn relaunch_in_terminal() -> Result<()> { let current_exe = std::env::current_exe().context("Failed to get current executable path")?; @@ -73,51 +48,51 @@ pub fn relaunch_in_terminal() -> Result<()> { Ok(()) } -pub fn get_anki_binary_path(uv_install_root: &std::path::Path) -> PathBuf { - uv_install_root.join(".venv/bin/anki") +pub fn finalize_uninstall() { + use std::io::stdin; + use std::io::stdout; + use std::io::Write; + + let uninstall_script = std::path::Path::new("/usr/local/share/anki/uninstall.sh"); + + if uninstall_script.exists() { + println!("To finish uninstalling, run 'sudo /usr/local/share/anki/uninstall.sh'"); + } else { + println!("Anki has been uninstalled."); + } + println!("Press enter to quit."); + let _ = stdout().flush(); + let mut input = String::new(); + let _ = stdin().read_line(&mut input); } -pub fn launch_anki_detached(anki_bin: &std::path::Path, config: &Config) -> Result<()> { - // On non-macOS Unix systems, we don't need to detach since we never spawned a - // terminal - exec_anki(anki_bin, config) -} +pub fn ensure_glibc_supported() -> Result<()> { + use std::ffi::CStr; + let get_glibc_version = || -> Option<(u32, u32)> { + let version_ptr = unsafe { libc::gnu_get_libc_version() }; + if version_ptr.is_null() { + return None; + } + + let version_cstr = unsafe { CStr::from_ptr(version_ptr) }; + let version_str = version_cstr.to_str().ok()?; + + // Parse version string (format: "2.36" or "2.36.1") + let version_parts: Vec<&str> = version_str.split('.').collect(); + if version_parts.len() < 2 { + return None; + } + + let major: u32 = version_parts[0].parse().ok()?; + let minor: u32 = version_parts[1].parse().ok()?; + + Some((major, minor)) + }; + + let (major, minor) = get_glibc_version().unwrap_or_default(); + if major < 2 || (major == 2 && minor < 36) { + anyhow::bail!("Anki requires a modern Linux distro with glibc 2.36 or later."); + } -pub fn handle_first_launch(_anki_bin: &std::path::Path) -> Result<()> { - // No special first launch handling needed for generic Unix systems Ok(()) } - -pub fn exec_anki(anki_bin: &std::path::Path, _config: &Config) -> Result<()> { - let args: Vec = std::env::args().skip(1).collect(); - Command::new(anki_bin) - .args(args) - .ensure_exec() - .map_err(anyhow::Error::new) -} - -pub fn get_exe_and_resources_dirs() -> Result<(PathBuf, PathBuf)> { - let exe_dir = std::env::current_exe() - .context("Failed to get current executable path")? - .parent() - .context("Failed to get executable directory")? - .to_owned(); - - // On generic Unix systems, assume resources are in the same directory as - // executable - let resources_dir = exe_dir.clone(); - - Ok((exe_dir, resources_dir)) -} - -pub fn get_uv_binary_name() -> &'static str { - // Use architecture-specific uv binary for non-Mac Unix systems - if cfg!(target_arch = "x86_64") { - "uv.amd64" - } else if cfg!(target_arch = "aarch64") { - "uv.arm64" - } else { - // Fallback to generic uv for other architectures - "uv" - } -} diff --git a/qt/launcher/src/platform/windows.rs b/qt/launcher/src/platform/windows.rs index 4e3752d44..3c060a9de 100644 --- a/qt/launcher/src/platform/windows.rs +++ b/qt/launcher/src/platform/windows.rs @@ -1,82 +1,84 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use std::path::PathBuf; +use std::io::stdin; use std::process::Command; -use anki_process::CommandExt; use anyhow::Context; use anyhow::Result; -use winapi::um::consoleapi; -use winapi::um::errhandlingapi; -use winapi::um::wincon; - -use crate::Config; +use widestring::u16cstr; +use windows::core::PCWSTR; +use windows::Win32::System::Console::AttachConsole; +use windows::Win32::System::Console::GetConsoleWindow; +use windows::Win32::System::Console::ATTACH_PARENT_PROCESS; +use windows::Win32::System::Registry::RegCloseKey; +use windows::Win32::System::Registry::RegOpenKeyExW; +use windows::Win32::System::Registry::RegQueryValueExW; +use windows::Win32::System::Registry::HKEY; +use windows::Win32::System::Registry::HKEY_CURRENT_USER; +use windows::Win32::System::Registry::KEY_READ; +use windows::Win32::System::Registry::REG_SZ; +use windows::Win32::UI::Shell::SetCurrentProcessExplicitAppUserModelID; pub fn ensure_terminal_shown() -> Result<()> { - ensure_console(); - // // Check if we're already relaunched to prevent infinite recursion - // if std::env::var("ANKI_LAUNCHER_IN_TERMINAL").is_ok() { - // println!("Recurse: Preparing to start Anki...\n"); - // return Ok(()); - // } - - // if have_console { - // } else { - // relaunch_in_cmd()?; - // } - Ok(()) -} - -fn ensure_console() { unsafe { - if !wincon::GetConsoleWindow().is_null() { - return; + if !GetConsoleWindow().is_invalid() { + // We already have a console, no need to spawn anki-console.exe + return Ok(()); } - - if consoleapi::AllocConsole() == 0 { - let error_code = errhandlingapi::GetLastError(); - eprintln!("unexpected AllocConsole error: {}", error_code); - return; - } - - // This black magic triggers Windows to switch to the new - // ANSI-supporting console host, which is usually only available - // when the app is built with the console subsystem. - let _ = Command::new("cmd").args(&["/C", ""]).status(); } + + if std::env::var("ANKI_IMPLICIT_CONSOLE").is_ok() && attach_to_parent_console() { + // Successfully attached to parent console + reconnect_stdio_to_console(); + return Ok(()); + } + + // No console available, spawn anki-console.exe and exit + let current_exe = std::env::current_exe().context("Failed to get current executable path")?; + let exe_dir = current_exe + .parent() + .context("Failed to get executable directory")?; + + let console_exe = exe_dir.join("anki-console.exe"); + + if !console_exe.exists() { + anyhow::bail!("anki-console.exe not found in the same directory"); + } + + // Spawn anki-console.exe without waiting + Command::new(&console_exe) + .env("ANKI_IMPLICIT_CONSOLE", "1") + .spawn() + .context("Failed to spawn anki-console.exe")?; + + // Exit immediately after spawning + std::process::exit(0); } -fn attach_to_parent_console() -> bool { +pub fn attach_to_parent_console() -> bool { unsafe { - if !wincon::GetConsoleWindow().is_null() { + if !GetConsoleWindow().is_invalid() { // we have a console already - println!("attach: already had console, false"); return false; } - if wincon::AttachConsole(wincon::ATTACH_PARENT_PROCESS) != 0 { + if AttachConsole(ATTACH_PARENT_PROCESS).is_ok() { // successfully attached to parent - println!("attach: true"); + reconnect_stdio_to_console(); true } else { - println!("attach: false"); false } } } -/// If parent process has a console (eg cmd.exe), redirect our output there. -/// Sets config.show_console to true if successfully attached to console. -pub fn initial_terminal_setup(config: &mut Config) { +/// Reconnect stdin/stdout/stderr to the console. +fn reconnect_stdio_to_console() { use std::ffi::CString; use libc_stdhandle::*; - if !attach_to_parent_console() { - return; - } - // we launched without a console, so we'll need to open stdin/out/err let conin = CString::new("CONIN$").unwrap(); let conout = CString::new("CONOUT$").unwrap(); @@ -89,79 +91,127 @@ pub fn initial_terminal_setup(config: &mut Config) { libc::freopen(conout.as_ptr(), w.as_ptr(), stdout()); libc::freopen(conout.as_ptr(), w.as_ptr(), stderr()); } - - config.show_console = true; } -pub fn get_anki_binary_path(uv_install_root: &std::path::Path) -> std::path::PathBuf { - uv_install_root.join(".venv/Scripts/anki.exe") +pub fn finalize_uninstall() { + let uninstaller_path = get_uninstaller_path(); + + match uninstaller_path { + Some(path) => { + println!("Launching Windows uninstaller..."); + let result = Command::new(&path).env("ANKI_LAUNCHER", "1").spawn(); + + match result { + Ok(_) => { + println!("Uninstaller launched successfully."); + return; + } + Err(e) => { + println!("Failed to launch uninstaller: {e}"); + println!("You can manually run: {}", path.display()); + } + } + } + None => { + println!("Windows uninstaller not found."); + println!("You may need to uninstall via Windows Settings > Apps."); + } + } + println!("Press enter to close..."); + let mut input = String::new(); + let _ = stdin().read_line(&mut input); } -fn build_python_command( - anki_bin: &std::path::Path, - args: &[String], - config: &Config, -) -> Result { - let venv_dir = anki_bin - .parent() - .context("Failed to get venv Scripts directory")? - .parent() - .context("Failed to get venv directory")?; +fn get_uninstaller_path() -> Option { + // Try to read install directory from registry + if let Some(install_dir) = read_registry_install_dir() { + let uninstaller = install_dir.join("uninstall.exe"); + if uninstaller.exists() { + return Some(uninstaller); + } + } - // Use python.exe if show_console is true, otherwise pythonw.exe - let python_exe = if config.show_console { - venv_dir.join("Scripts/python.exe") - } else { - venv_dir.join("Scripts/pythonw.exe") - }; + // Fall back to default location + let default_dir = dirs::data_local_dir()?.join("Programs").join("Anki"); + let uninstaller = default_dir.join("uninstall.exe"); + if uninstaller.exists() { + return Some(uninstaller); + } - let mut cmd = Command::new(python_exe); - cmd.args(["-c", "import aqt; aqt.run()"]); - cmd.args(args); - - Ok(cmd) + None } -pub fn launch_anki_detached(anki_bin: &std::path::Path, config: &Config) -> Result<()> { - use std::os::windows::process::CommandExt; - use std::process::Stdio; +fn read_registry_install_dir() -> Option { + unsafe { + let mut hkey = HKEY::default(); - const CREATE_NEW_PROCESS_GROUP: u32 = 0x00000200; - const DETACHED_PROCESS: u32 = 0x00000008; + // Convert the registry path to wide string + let subkey = u16cstr!("SOFTWARE\\Anki"); - let mut cmd = build_python_command(anki_bin, &[], config)?; - cmd.stdin(Stdio::null()) - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .creation_flags(CREATE_NEW_PROCESS_GROUP | DETACHED_PROCESS) - .ensure_spawn()?; - Ok(()) + // Open the registry key + let result = RegOpenKeyExW( + HKEY_CURRENT_USER, + PCWSTR(subkey.as_ptr()), + Some(0), + KEY_READ, + &mut hkey, + ); + + if result.is_err() { + return None; + } + + // Query the Install_Dir64 value + let value_name = u16cstr!("Install_Dir64"); + + let mut value_type = REG_SZ; + let mut data_size = 0u32; + + // First call to get the size + let result = RegQueryValueExW( + hkey, + PCWSTR(value_name.as_ptr()), + None, + Some(&mut value_type), + None, + Some(&mut data_size), + ); + + if result.is_err() || data_size == 0 { + let _ = RegCloseKey(hkey); + return None; + } + + // Allocate buffer and read the value + let mut buffer: Vec = vec![0; (data_size / 2) as usize]; + let result = RegQueryValueExW( + hkey, + PCWSTR(value_name.as_ptr()), + None, + Some(&mut value_type), + Some(buffer.as_mut_ptr() as *mut u8), + Some(&mut data_size), + ); + + let _ = RegCloseKey(hkey); + + if result.is_ok() { + // Convert wide string back to PathBuf + let len = buffer.iter().position(|&x| x == 0).unwrap_or(buffer.len()); + let path_str = String::from_utf16_lossy(&buffer[..len]); + Some(std::path::PathBuf::from(path_str)) + } else { + None + } + } } -pub fn handle_first_launch(_anki_bin: &std::path::Path) -> Result<()> { - Ok(()) -} - -pub fn exec_anki(anki_bin: &std::path::Path, config: &Config) -> Result<()> { - let args: Vec = std::env::args().skip(1).collect(); - let mut cmd = build_python_command(anki_bin, &args, config)?; - cmd.ensure_success()?; - Ok(()) -} - -pub fn get_exe_and_resources_dirs() -> Result<(PathBuf, PathBuf)> { - let exe_dir = std::env::current_exe() - .context("Failed to get current executable path")? - .parent() - .context("Failed to get executable directory")? - .to_owned(); - - // On Windows, resources dir is the same as exe_dir - let resources_dir = exe_dir.clone(); - - Ok((exe_dir, resources_dir)) -} - -pub fn get_uv_binary_name() -> &'static str { - "uv.exe" +pub fn prepare_to_launch_normally() { + // Set the App User Model ID for Windows taskbar grouping + unsafe { + let _ = + SetCurrentProcessExplicitAppUserModelID(PCWSTR(u16cstr!("Ankitects.Anki").as_ptr())); + } + + attach_to_parent_console(); } diff --git a/qt/launcher/win/anki-console.bat b/qt/launcher/win/anki-console.bat deleted file mode 100644 index a565fa7b6..000000000 --- a/qt/launcher/win/anki-console.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -"%~dp0"\anki %* -pause - - diff --git a/qt/launcher/win/anki.template.nsi b/qt/launcher/win/anki.template.nsi index 7b2bfd8fc..84dedf9c8 100644 --- a/qt/launcher/win/anki.template.nsi +++ b/qt/launcher/win/anki.template.nsi @@ -250,8 +250,18 @@ FunctionEnd ; Uninstaller function un.onInit - MessageBox MB_OKCANCEL "This will remove Anki's program files, but will not delete your card data. If you wish to delete your card data as well, you can do so via File>Switch Profile inside Anki first. Are you sure you wish to uninstall Anki?" /SD IDOK IDOK next - Quit + ; Check for ANKI_LAUNCHER environment variable + ReadEnvStr $R0 "ANKI_LAUNCHER" + ${If} $R0 != "" + ; Wait for launcher to exit + Sleep 2000 + Goto next + ${Else} + ; Try to launch anki.exe with ANKI_LAUNCHER_UNINSTALL=1 + IfFileExists "$INSTDIR\anki.exe" 0 next + nsExec::Exec 'cmd /c "set ANKI_LAUNCHER_UNINSTALL=1 && start /b "" "$INSTDIR\anki.exe""' + Quit + ${EndIf} next: functionEnd diff --git a/qt/launcher/win/build.bat b/qt/launcher/win/build.bat index b21831462..da574f210 100644 --- a/qt/launcher/win/build.bat +++ b/qt/launcher/win/build.bat @@ -1,5 +1,10 @@ @echo off -set CODESIGN=1 -REM set NO_COMPRESS=1 +if "%NOCOMP%"=="1" ( + set NO_COMPRESS=1 + set CODESIGN=0 +) else ( + set CODESIGN=1 + set NO_COMPRESS=0 +) cargo run --bin build_win diff --git a/qt/pyproject.toml b/qt/pyproject.toml index e6537c76c..5a237edb0 100644 --- a/qt/pyproject.toml +++ b/qt/pyproject.toml @@ -11,17 +11,9 @@ dependencies = [ "requests", "send2trash", "waitress>=2.0.0", - "psutil; sys.platform == 'win32'", "pywin32; sys.platform == 'win32'", "anki-mac-helper; sys.platform == 'darwin'", "pip-system-certs!=5.1", - "mock", - "types-decorator", - "types-flask", - "types-flask-cors", - "types-markdown", - "types-waitress", - "types-pywin32", "pyqt6>=6.2", "pyqt6-webengine>=6.2", # anki dependency is added dynamically in hatch_build.py with exact version @@ -80,6 +72,9 @@ build-backend = "hatchling.build" [project.scripts] anki = "aqt:run" +[project.gui-scripts] +ankiw = "aqt:run" + [tool.hatch.build.targets.wheel] packages = ["aqt"] exclude = ["**/*.scss", "**/*.ui"] diff --git a/rslib/Cargo.toml b/rslib/Cargo.toml index d3c7e215f..a1d24cc87 100644 --- a/rslib/Cargo.toml +++ b/rslib/Cargo.toml @@ -48,6 +48,7 @@ async-trait.workspace = true axum.workspace = true axum-client-ip.workspace = true axum-extra.workspace = true +bitflags.workspace = true blake3.workspace = true bytes.workspace = true chrono.workspace = true diff --git a/rslib/i18n/check.rs b/rslib/i18n/check.rs index f168406d1..48521bbdd 100644 --- a/rslib/i18n/check.rs +++ b/rslib/i18n/check.rs @@ -21,14 +21,11 @@ pub fn check(lang_map: &TranslationsByLang) { fn check_content(lang: &str, fname: &str, content: &str) { let lang_id: LanguageIdentifier = "en-US".parse().unwrap(); let resource = FluentResource::try_new(content.into()).unwrap_or_else(|e| { - panic!("{}\nUnable to parse {}/{}: {:?}", content, lang, fname, e); + panic!("{content}\nUnable to parse {lang}/{fname}: {e:?}"); }); let mut bundle: FluentBundle = FluentBundle::new(vec![lang_id]); bundle.add_resource(resource).unwrap_or_else(|e| { - panic!( - "{}\nUnable to bundle - duplicate key? {}/{}: {:?}", - content, lang, fname, e - ); + panic!("{content}\nUnable to bundle - duplicate key? {lang}/{fname}: {e:?}"); }); } diff --git a/rslib/i18n/gather.rs b/rslib/i18n/gather.rs index a25e5813e..835e108af 100644 --- a/rslib/i18n/gather.rs +++ b/rslib/i18n/gather.rs @@ -48,8 +48,7 @@ fn add_folder(map: &mut TranslationsByLang, folder: &Path, lang: &str) { let text = fs::read_to_string(entry.path()).unwrap(); assert!( text.ends_with('\n'), - "file was missing final newline: {:?}", - entry + "file was missing final newline: {entry:?}" ); map_entry.entry(module).or_default().push_str(&text); println!("cargo:rerun-if-changed={}", entry.path().to_str().unwrap()); diff --git a/rslib/i18n/src/lib.rs b/rslib/i18n/src/lib.rs index 1d79198bf..bfd6f5ba2 100644 --- a/rslib/i18n/src/lib.rs +++ b/rslib/i18n/src/lib.rs @@ -130,7 +130,7 @@ fn get_bundle( ) -> Option> { let res = FluentResource::try_new(text.into()) .map_err(|e| { - println!("Unable to parse translations file: {:?}", e); + println!("Unable to parse translations file: {e:?}"); }) .ok()?; @@ -138,14 +138,14 @@ fn get_bundle( bundle .add_resource(res) .map_err(|e| { - println!("Duplicate key detected in translation file: {:?}", e); + println!("Duplicate key detected in translation file: {e:?}"); }) .ok()?; if !extra_text.is_empty() { match FluentResource::try_new(extra_text) { Ok(res) => bundle.add_resource_overriding(res), - Err((_res, e)) => println!("Unable to parse translations file: {:?}", e), + Err((_res, e)) => println!("Unable to parse translations file: {e:?}"), } } @@ -291,7 +291,7 @@ impl I18n { let mut errs = vec![]; let out = bundle.format_pattern(pat, args.as_ref(), &mut errs); if !errs.is_empty() { - println!("Error(s) in translation '{}': {:?}", key, errs); + println!("Error(s) in translation '{key}': {errs:?}"); } // clone so we can discard args return out.to_string().into(); diff --git a/rslib/i18n/typescript.rs b/rslib/i18n/typescript.rs index 0c2230338..ce30048e2 100644 --- a/rslib/i18n/typescript.rs +++ b/rslib/i18n/typescript.rs @@ -81,7 +81,7 @@ fn get_args(variables: &[Variable]) -> String { .iter() .map(|v| format!("\"{}\": args.{}", v.name, typescript_arg_name(&v.name))) .join(", "); - format!("{{{}}}", out) + format!("{{{out}}}") } } diff --git a/rslib/i18n/write_strings.rs b/rslib/i18n/write_strings.rs index f9df5716f..36af62eeb 100644 --- a/rslib/i18n/write_strings.rs +++ b/rslib/i18n/write_strings.rs @@ -69,12 +69,6 @@ impl I18n { {var_build} self.translate("{key}"{out_args}) }}"#, - func = func, - key = key, - doc = doc, - in_args = in_args, - out_args = out_args, - var_build = var_build, ) .unwrap(); } @@ -103,9 +97,6 @@ fn build_vars(translation: &Translation) -> String { writeln!( buf, r#" args.set("{fluent_name}", {rust_name}{trailer});"#, - fluent_name = fluent_name, - rust_name = rust_name, - trailer = trailer, ) .unwrap(); } @@ -204,13 +195,7 @@ pub(crate) const {lang_name}: phf::Map<&str, &str> = phf::phf_map! {{", .unwrap(); for (module, contents) in modules { - writeln!( - buf, - r###" "{module}" => r##"{contents}"##,"###, - module = module, - contents = contents - ) - .unwrap(); + writeln!(buf, r###" "{module}" => r##"{contents}"##,"###).unwrap(); } buf.push_str("};\n"); diff --git a/rslib/proto/python.rs b/rslib/proto/python.rs index 0ca2c15ea..a5adb4179 100644 --- a/rslib/proto/python.rs +++ b/rslib/proto/python.rs @@ -183,9 +183,9 @@ fn python_type(field: &FieldDescriptor, output: bool) -> String { }; if field.is_list() { if output { - format!("Sequence[{}]", kind) + format!("Sequence[{kind}]") } else { - format!("Iterable[{}]", kind) + format!("Iterable[{kind}]") } } else if field.is_map() { let map_kind = field.kind(); @@ -213,7 +213,6 @@ fn write_header(out: &mut impl Write) -> Result<()> { out.write_all( br#"# Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; https://www.gnu.org/licenses/agpl.html -# pylint: skip-file from __future__ import annotations diff --git a/rslib/rust_interface.rs b/rslib/rust_interface.rs index a75100b5b..6861df7dc 100644 --- a/rslib/rust_interface.rs +++ b/rslib/rust_interface.rs @@ -263,7 +263,7 @@ impl MethodHelpers for Method { fn get_input_arg_with_label(&self) -> String { self.input_type() .as_ref() - .map(|t| format!("input: {}", t)) + .map(|t| format!("input: {t}")) .unwrap_or_default() } diff --git a/rslib/src/backend/config.rs b/rslib/src/backend/config.rs index 349f2d9af..b6e81ce2a 100644 --- a/rslib/src/backend/config.rs +++ b/rslib/src/backend/config.rs @@ -39,6 +39,7 @@ impl From for BoolKey { BoolKeyProto::RenderLatex => BoolKey::RenderLatex, BoolKeyProto::LoadBalancerEnabled => BoolKey::LoadBalancerEnabled, BoolKeyProto::FsrsShortTermWithStepsEnabled => BoolKey::FsrsShortTermWithStepsEnabled, + BoolKeyProto::FsrsLegacyEvaluate => BoolKey::FsrsLegacyEvaluate, } } } diff --git a/rslib/src/browser_table.rs b/rslib/src/browser_table.rs index 90b3ac50c..c297f2bac 100644 --- a/rslib/src/browser_table.rs +++ b/rslib/src/browser_table.rs @@ -517,7 +517,7 @@ impl RowContext { return "".into(); }; if self.cards[0].is_undue_queue() { - format!("({})", due) + format!("({due})") } else { due.into() } @@ -625,7 +625,7 @@ impl RowContext { if self.notes_mode { let decks = self.cards.iter().map(|c| c.deck_id).unique().count(); if decks > 1 { - return format!("({})", decks); + return format!("({decks})"); } } let deck_name = self.deck.human_name(); diff --git a/rslib/src/card/mod.rs b/rslib/src/card/mod.rs index 82203ace6..b6b9ce807 100644 --- a/rslib/src/card/mod.rs +++ b/rslib/src/card/mod.rs @@ -187,12 +187,16 @@ impl Card { self.usn = usn; } - /// Caller must ensure provided deck exists and is not filtered. - fn set_deck(&mut self, deck: DeckId) { - self.remove_from_filtered_deck_restoring_queue(); + pub fn clear_fsrs_data(&mut self) { self.memory_state = None; self.desired_retention = None; self.decay = None; + } + + /// Caller must ensure provided deck exists and is not filtered. + fn set_deck(&mut self, deck: DeckId) { + self.remove_from_filtered_deck_restoring_queue(); + self.clear_fsrs_data(); self.deck_id = deck; } diff --git a/rslib/src/card_rendering/writer.rs b/rslib/src/card_rendering/writer.rs index 892cb9087..22fb1fb34 100644 --- a/rslib/src/card_rendering/writer.rs +++ b/rslib/src/card_rendering/writer.rs @@ -52,7 +52,7 @@ trait Write { } fn write_sound(&mut self, buf: &mut String, resource: &str) { - write!(buf, "[sound:{}]", resource).unwrap(); + write!(buf, "[sound:{resource}]").unwrap(); } fn write_directive(&mut self, buf: &mut String, directive: &Directive) { @@ -94,9 +94,9 @@ trait Write { fn write_directive_option(&mut self, buf: &mut String, key: &str, val: &str) { if val.contains([']', ' ', '\t', '\r', '\n']) { - write!(buf, " {}=\"{}\"", key, val).unwrap(); + write!(buf, " {key}=\"{val}\"").unwrap(); } else { - write!(buf, " {}={}", key, val).unwrap(); + write!(buf, " {key}={val}").unwrap(); } } @@ -158,7 +158,7 @@ impl Write for AvExtractor<'_> { fn write_tts_directive(&mut self, buf: &mut String, directive: &TtsDirective) { if let Some(error) = directive.error(self.tr) { - write!(buf, "[{}]", error).unwrap(); + write!(buf, "[{error}]").unwrap(); return; } @@ -173,7 +173,7 @@ impl Write for AvExtractor<'_> { other_args: directive .options .iter() - .map(|(key, val)| format!("{}={}", key, val)) + .map(|(key, val)| format!("{key}={val}")) .collect(), }, )), @@ -204,7 +204,7 @@ impl AvPrettifier { impl Write for AvPrettifier { fn write_sound(&mut self, buf: &mut String, resource: &str) { - write!(buf, "🔉{}🔉", resource).unwrap(); + write!(buf, "🔉{resource}🔉").unwrap(); } fn write_tts_directive(&mut self, buf: &mut String, directive: &TtsDirective) { diff --git a/rslib/src/cloze.rs b/rslib/src/cloze.rs index 208a2f4ed..02919dc12 100644 --- a/rslib/src/cloze.rs +++ b/rslib/src/cloze.rs @@ -25,6 +25,9 @@ use crate::latex::contains_latex; use crate::template::RenderContext; use crate::text::strip_html_preserving_entities; +static CLOZE: LazyLock = + LazyLock::new(|| Regex::new(r"(?s)\{\{c\d+::(.*?)(::.*?)?\}\}").unwrap()); + static MATHJAX: LazyLock = LazyLock::new(|| { Regex::new( r"(?xsi) @@ -453,6 +456,10 @@ pub fn cloze_number_in_fields(fields: impl IntoIterator>) -> Ha set } +pub(crate) fn strip_clozes(text: &str) -> Cow<'_, str> { + CLOZE.replace_all(text, "$1") +} + fn strip_html_inside_mathjax(text: &str) -> Cow { MATHJAX.replace_all(text, |caps: &Captures| -> String { format!( @@ -610,6 +617,16 @@ mod test { ); } + #[test] + fn strip_clozes_regex() { + assert_eq!( + strip_clozes( + r#"The {{c1::moon::🌛}} {{c2::orbits::this hint has "::" in it}} the {{c3::🌏}}."# + ), + "The moon orbits the 🌏." + ); + } + #[test] fn mathjax_html() { // escaped angle brackets should be preserved diff --git a/rslib/src/config/bool.rs b/rslib/src/config/bool.rs index 39273b931..c76787cb0 100644 --- a/rslib/src/config/bool.rs +++ b/rslib/src/config/bool.rs @@ -41,6 +41,7 @@ pub enum BoolKey { WithDeckConfigs, Fsrs, FsrsHealthCheck, + FsrsLegacyEvaluate, LoadBalancerEnabled, FsrsShortTermWithStepsEnabled, #[strum(to_string = "normalize_note_text")] diff --git a/rslib/src/config/deck.rs b/rslib/src/config/deck.rs index d684534c0..a88ca61a7 100644 --- a/rslib/src/config/deck.rs +++ b/rslib/src/config/deck.rs @@ -41,5 +41,5 @@ impl Collection { } fn build_aux_deck_key(deck: DeckId, key: &str) -> String { - format!("_deck_{deck}_{key}", deck = deck, key = key) + format!("_deck_{deck}_{key}") } diff --git a/rslib/src/config/notetype.rs b/rslib/src/config/notetype.rs index 0d3fd9611..f9b70292d 100644 --- a/rslib/src/config/notetype.rs +++ b/rslib/src/config/notetype.rs @@ -32,7 +32,7 @@ impl Collection { }; Ok(get_aux_notetype_config_key( ntid, - &format!("{}_{}", key, ordinal), + &format!("{key}_{ordinal}"), )) } } @@ -70,5 +70,5 @@ impl Collection { } pub fn get_aux_notetype_config_key(ntid: NotetypeId, key: &str) -> String { - format!("_nt_{ntid}_{key}", ntid = ntid, key = key) + format!("_nt_{ntid}_{key}") } diff --git a/rslib/src/dbcheck.rs b/rslib/src/dbcheck.rs index ae960ab5c..f58a2184a 100644 --- a/rslib/src/dbcheck.rs +++ b/rslib/src/dbcheck.rs @@ -387,10 +387,10 @@ impl Collection { let mut basic = all_stock_notetypes(&self.tr).remove(0); let mut field = 3; while basic.fields.len() < field_count { - basic.add_field(format!("{}", field)); + basic.add_field(format!("{field}")); field += 1; } - basic.name = format!("db-check-{}-{}", stamp, field_count); + basic.name = format!("db-check-{stamp}-{field_count}"); let qfmt = basic.templates[0].config.q_format.clone(); let afmt = basic.templates[0].config.a_format.clone(); for n in 0..extra_cards_required { diff --git a/rslib/src/deckconfig/update.rs b/rslib/src/deckconfig/update.rs index 128e43770..9eb3b595f 100644 --- a/rslib/src/deckconfig/update.rs +++ b/rslib/src/deckconfig/update.rs @@ -74,6 +74,7 @@ impl Collection { apply_all_parent_limits: self.get_config_bool(BoolKey::ApplyAllParentLimits), fsrs: self.get_config_bool(BoolKey::Fsrs), fsrs_health_check: self.get_config_bool(BoolKey::FsrsHealthCheck), + fsrs_legacy_evaluate: self.get_config_bool(BoolKey::FsrsLegacyEvaluate), days_since_last_fsrs_optimize, }) } diff --git a/rslib/src/decks/addupdate.rs b/rslib/src/decks/addupdate.rs index be4cb34cd..eb9e242a5 100644 --- a/rslib/src/decks/addupdate.rs +++ b/rslib/src/decks/addupdate.rs @@ -93,7 +93,7 @@ impl Collection { pub(crate) fn recover_missing_deck(&mut self, did: DeckId, usn: Usn) -> Result<()> { let mut deck = Deck::new_normal(); deck.id = did; - deck.name = NativeDeckName::from_native_str(format!("recovered{}", did)); + deck.name = NativeDeckName::from_native_str(format!("recovered{did}")); deck.set_modified(usn); self.add_or_update_single_deck_with_existing_id(&mut deck, usn) } diff --git a/rslib/src/decks/remove.rs b/rslib/src/decks/remove.rs index befb770f8..a3bc78209 100644 --- a/rslib/src/decks/remove.rs +++ b/rslib/src/decks/remove.rs @@ -28,7 +28,7 @@ impl Collection { let card_count = match deck.kind { DeckKind::Normal(_) => self.delete_all_cards_in_normal_deck(deck.id)?, DeckKind::Filtered(_) => { - self.return_all_cards_in_filtered_deck(deck.id)?; + self.return_all_cards_in_filtered_deck(deck)?; 0 } }; diff --git a/rslib/src/error/db.rs b/rslib/src/error/db.rs index b44f771dc..a36cd1a60 100644 --- a/rslib/src/error/db.rs +++ b/rslib/src/error/db.rs @@ -67,7 +67,7 @@ impl From for AnkiError { } AnkiError::DbError { source: DbError { - info: format!("{:?}", err), + info: format!("{err:?}"), kind: DbErrorKind::Other, }, } @@ -88,7 +88,7 @@ impl From for AnkiError { } AnkiError::DbError { source: DbError { - info: format!("{:?}", err), + info: format!("{err:?}"), kind: DbErrorKind::Other, }, } @@ -101,7 +101,7 @@ impl DbError { DbErrorKind::Corrupt => self.info.clone(), // fixme: i18n DbErrorKind::Locked => "Anki already open, or media currently syncing.".into(), - _ => format!("{:?}", self), + _ => format!("{self:?}"), } } } diff --git a/rslib/src/error/invalid_input.rs b/rslib/src/error/invalid_input.rs index f62174578..970e1c692 100644 --- a/rslib/src/error/invalid_input.rs +++ b/rslib/src/error/invalid_input.rs @@ -26,7 +26,7 @@ impl InvalidInputError { pub fn context(&self) -> String { if let Some(source) = &self.source { - format!("{}", source) + format!("{source}") } else { String::new() } diff --git a/rslib/src/error/mod.rs b/rslib/src/error/mod.rs index 0da89e0ff..d2bd11cf0 100644 --- a/rslib/src/error/mod.rs +++ b/rslib/src/error/mod.rs @@ -149,13 +149,13 @@ impl AnkiError { } CardTypeErrorDetails::MissingCloze => tr.card_templates_missing_cloze(), }; - format!("{}
{}", header, details) + format!("{header}
{details}") } AnkiError::DbError { source } => source.message(tr), AnkiError::SearchError { source } => source.message(tr), AnkiError::ParseNumError => tr.errors_parse_number_fail().into(), AnkiError::FilteredDeckError { source } => source.message(tr), - AnkiError::InvalidRegex { info: source } => format!("

{}
", source), + AnkiError::InvalidRegex { info: source } => format!("
{source}
"), AnkiError::MultipleNotetypesSelected => tr.errors_multiple_notetypes_selected().into(), AnkiError::DatabaseCheckRequired => tr.errors_please_check_database().into(), AnkiError::MediaCheckRequired => tr.errors_please_check_media().into(), @@ -172,7 +172,7 @@ impl AnkiError { | AnkiError::InvalidServiceIndex | AnkiError::InvalidMethodIndex | AnkiError::UndoEmpty - | AnkiError::InvalidCertificateFormat => format!("{:?}", self), + | AnkiError::InvalidCertificateFormat => format!("{self:?}"), AnkiError::FileIoError { source } => source.message(), AnkiError::InvalidInput { source } => source.message(), AnkiError::NotFound { source } => source.message(tr), diff --git a/rslib/src/error/network.rs b/rslib/src/error/network.rs index 469978cff..eb293c359 100644 --- a/rslib/src/error/network.rs +++ b/rslib/src/error/network.rs @@ -68,7 +68,7 @@ impl AnkiError { impl From<&reqwest::Error> for AnkiError { fn from(err: &reqwest::Error) -> Self { let url = err.url().map(|url| url.as_str()).unwrap_or(""); - let str_err = format!("{}", err); + let str_err = format!("{err}"); // strip url from error to avoid exposing keys let info = str_err.replace(url, ""); @@ -205,7 +205,7 @@ impl NetworkError { NetworkErrorKind::Other => tr.network_other(), }; let details = tr.network_details(self.info.as_str()); - format!("{}\n\n{}", summary, details) + format!("{summary}\n\n{details}") } } @@ -226,7 +226,7 @@ impl From for AnkiError { } .into() } else { - AnkiError::sync_error(format!("{:?}", err), SyncErrorKind::Other) + AnkiError::sync_error(format!("{err:?}"), SyncErrorKind::Other) } } } diff --git a/rslib/src/image_occlusion/imagedata.rs b/rslib/src/image_occlusion/imagedata.rs index 9319d85c6..fdf8ea4fd 100644 --- a/rslib/src/image_occlusion/imagedata.rs +++ b/rslib/src/image_occlusion/imagedata.rs @@ -77,7 +77,7 @@ impl Collection { ) -> Result { let value = match self.get_image_occlusion_note_inner(note_id) { Ok(note) => Value::Note(note), - Err(err) => Value::Error(format!("{:?}", err)), + Err(err) => Value::Error(format!("{err:?}")), }; Ok(GetImageOcclusionNoteResponse { value: Some(value) }) } diff --git a/rslib/src/image_occlusion/imageocclusion.rs b/rslib/src/image_occlusion/imageocclusion.rs index e2eea9a39..1de86bf87 100644 --- a/rslib/src/image_occlusion/imageocclusion.rs +++ b/rslib/src/image_occlusion/imageocclusion.rs @@ -98,7 +98,7 @@ pub fn get_image_cloze_data(text: &str) -> String { let Some((x, y)) = point_pair.split_once(',') else { continue; }; - write!(&mut point_str, "{},{} ", x, y).unwrap(); + write!(&mut point_str, "{x},{y} ").unwrap(); } // remove the trailing space point_str.pop(); diff --git a/rslib/src/import_export/text/import.rs b/rslib/src/import_export/text/import.rs index f28c27ca3..4425bb386 100644 --- a/rslib/src/import_export/text/import.rs +++ b/rslib/src/import_export/text/import.rs @@ -274,6 +274,9 @@ impl<'a> Context<'a> { deck.name = NativeDeckName::from_human_name(name); self.col.add_deck_inner(&mut deck, self.usn)?; self.deck_ids.insert(deck.id, deck.human_name()); + if name.is_empty() { + self.deck_ids.default = Some(deck.id); + } Some(deck.id) } else { None diff --git a/rslib/src/latex.rs b/rslib/src/latex.rs index 3ebeebf8a..e5cb002ac 100644 --- a/rslib/src/latex.rs +++ b/rslib/src/latex.rs @@ -100,7 +100,7 @@ fn fname_for_latex(latex: &str, svg: bool) -> String { let ext = if svg { "svg" } else { "png" }; let csum = hex::encode(sha1_of_data(latex.as_bytes())); - format!("latex-{}.{}", csum, ext) + format!("latex-{csum}.{ext}") } fn image_link_for_fname(src: &str, fname: &str) -> String { @@ -122,11 +122,7 @@ mod test { assert_eq!( extract_latex("a[latex]one
and
two[/latex]b", false), ( - format!( - "a\"one
and
two\"b", - fname - ) - .into(), + format!("a\"one
and
two\"b").into(), vec![ExtractedLatex { fname: fname.into(), latex: "one\nand\ntwo".into() diff --git a/rslib/src/log.rs b/rslib/src/log.rs index 4fb4dcfaf..fedc597c4 100644 --- a/rslib/src/log.rs +++ b/rslib/src/log.rs @@ -69,8 +69,8 @@ fn maybe_rotate_log(path: &str) -> io::Result<()> { return Ok(()); } - let path2 = format!("{}.1", path); - let path3 = format!("{}.2", path); + let path2 = format!("{path}.1"); + let path3 = format!("{path}.2"); // if a rotated file already exists, rename it if let Err(e) = fs::rename(&path2, path3) { diff --git a/rslib/src/media/files.rs b/rslib/src/media/files.rs index 9fd3bc85f..6974e2f81 100644 --- a/rslib/src/media/files.rs +++ b/rslib/src/media/files.rs @@ -218,7 +218,7 @@ fn truncate_filename(fname: &str, max_bytes: usize) -> Cow { let mut new_name = if ext.is_empty() { stem.to_string() } else { - format!("{}.{}", stem, ext) + format!("{stem}.{ext}") }; // make sure we don't break Windows by ending with a space or dot diff --git a/rslib/src/notes/mod.rs b/rslib/src/notes/mod.rs index 2b53321b9..932022e99 100644 --- a/rslib/src/notes/mod.rs +++ b/rslib/src/notes/mod.rs @@ -270,7 +270,7 @@ impl Note { self.fields .last_mut() .unwrap() - .push_str(&format!("; {}", last)); + .push_str(&format!("; {last}")); } } } diff --git a/rslib/src/notetype/schema11.rs b/rslib/src/notetype/schema11.rs index 272456ab7..8d713cbe7 100644 --- a/rslib/src/notetype/schema11.rs +++ b/rslib/src/notetype/schema11.rs @@ -126,7 +126,7 @@ fn other_to_bytes(other: &HashMap) -> Vec { } else { serde_json::to_vec(other).unwrap_or_else(|e| { // theoretically should never happen - println!("serialization failed for {:?}: {}", other, e); + println!("serialization failed for {other:?}: {e}"); vec![] }) } @@ -140,7 +140,7 @@ pub(crate) fn parse_other_fields( Default::default() } else { let mut map: HashMap = serde_json::from_slice(bytes).unwrap_or_else(|e| { - println!("deserialization failed for other: {}", e); + println!("deserialization failed for other: {e}"); Default::default() }); map.retain(|k, _v| !reserved.contains(k)); diff --git a/rslib/src/notetype/stock.rs b/rslib/src/notetype/stock.rs index f17f6b949..9b5df66d5 100644 --- a/rslib/src/notetype/stock.rs +++ b/rslib/src/notetype/stock.rs @@ -179,8 +179,8 @@ pub(crate) fn cloze(tr: &I18n) -> Notetype { let back_extra = tr.notetypes_back_extra_field(); config = nt.add_field(back_extra.as_ref()); config.tag = Some(ClozeField::BackExtra as u32); - let qfmt = format!("{{{{cloze:{}}}}}", text); - let afmt = format!("{}
\n{{{{{}}}}}", qfmt, back_extra); + let qfmt = format!("{{{{cloze:{text}}}}}"); + let afmt = format!("{qfmt}
\n{{{{{back_extra}}}}}"); nt.add_template(nt.name.clone(), qfmt, afmt); nt } diff --git a/rslib/src/scheduler/answering/mod.rs b/rslib/src/scheduler/answering/mod.rs index 64d3649ea..eab89b783 100644 --- a/rslib/src/scheduler/answering/mod.rs +++ b/rslib/src/scheduler/answering/mod.rs @@ -33,6 +33,7 @@ use crate::deckconfig::LeechAction; use crate::decks::Deck; use crate::prelude::*; use crate::scheduler::fsrs::memory_state::fsrs_item_for_memory_state; +use crate::scheduler::fsrs::memory_state::get_decay_from_params; use crate::scheduler::states::PreviewState; use crate::search::SearchNode; @@ -439,7 +440,9 @@ impl Collection { let config = self.home_deck_config(deck.config_id(), card.original_deck_id)?; let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs); let fsrs_next_states = if fsrs_enabled { - let fsrs = FSRS::new(Some(config.fsrs_params()))?; + let params = config.fsrs_params(); + let fsrs = FSRS::new(Some(params))?; + card.decay = Some(get_decay_from_params(params)); if card.memory_state.is_none() && card.ctype != CardType::New { // Card has been moved or imported into an FSRS deck after params were set, // and will need its initial memory state to be calculated based on review @@ -895,22 +898,20 @@ pub(crate) mod test { ) -> Result<()> { // Change due time to fake card answer_time, // works since answer_time is calculated as due - last_ivl - let update_due_string = format!("update cards set due={}", shift_due_time); + let update_due_string = format!("update cards set due={shift_due_time}"); col.storage.db.execute_batch(&update_due_string)?; col.clear_study_queues(); let current_card_state = current_state(col, post_answer.card_id); let state = match current_card_state { CardState::Normal(NormalState::Learning(state)) => state, - _ => panic!("State is not Normal: {:?}", current_card_state), + _ => panic!("State is not Normal: {current_card_state:?}"), }; let elapsed_secs = state.elapsed_secs as i32; // Give a 1 second leeway when the test runs on the off chance // that the test runs as a second rolls over. assert!( (elapsed_secs - expected_elapsed_secs).abs() <= 1, - "elapsed_secs: {} != expected_elapsed_secs: {}", - elapsed_secs, - expected_elapsed_secs + "elapsed_secs: {elapsed_secs} != expected_elapsed_secs: {expected_elapsed_secs}" ); Ok(()) diff --git a/rslib/src/scheduler/filtered/mod.rs b/rslib/src/scheduler/filtered/mod.rs index f1f3cc07d..331e54e5d 100644 --- a/rslib/src/scheduler/filtered/mod.rs +++ b/rslib/src/scheduler/filtered/mod.rs @@ -64,7 +64,8 @@ impl Collection { pub fn empty_filtered_deck(&mut self, did: DeckId) -> Result> { self.transact(Op::EmptyFilteredDeck, |col| { - col.return_all_cards_in_filtered_deck(did) + let deck = col.get_deck(did)?.or_not_found(did)?; + col.return_all_cards_in_filtered_deck(&deck) }) } @@ -78,8 +79,11 @@ impl Collection { } impl Collection { - pub(crate) fn return_all_cards_in_filtered_deck(&mut self, did: DeckId) -> Result<()> { - let cids = self.storage.all_cards_in_single_deck(did)?; + pub(crate) fn return_all_cards_in_filtered_deck(&mut self, deck: &Deck) -> Result<()> { + if !deck.is_filtered() { + return Err(FilteredDeckError::FilteredDeckRequired.into()); + } + let cids = self.storage.all_cards_in_single_deck(deck.id)?; self.return_cards_to_home_deck(&cids) } @@ -195,7 +199,7 @@ impl Collection { timing, }; - self.return_all_cards_in_filtered_deck(deck.id)?; + self.return_all_cards_in_filtered_deck(deck)?; self.build_filtered_deck(ctx) } @@ -214,14 +218,14 @@ impl Collection { .search_terms .get_mut(0) .unwrap(); - term1.search = format!("{} is:due", search); + term1.search = format!("{search} is:due"); let term2 = deck .filtered_mut() .unwrap() .search_terms .get_mut(1) .unwrap(); - term2.search = format!("{} is:new", search); + term2.search = format!("{search} is:new"); } } diff --git a/rslib/src/scheduler/fsrs/memory_state.rs b/rslib/src/scheduler/fsrs/memory_state.rs index d01cde767..425d8da69 100644 --- a/rslib/src/scheduler/fsrs/memory_state.rs +++ b/rslib/src/scheduler/fsrs/memory_state.rs @@ -32,7 +32,7 @@ pub struct ComputeMemoryProgress { /// Helper function to determine the appropriate decay value based on FSRS /// parameters -fn get_decay_from_params(params: &[f32]) -> f32 { +pub(crate) fn get_decay_from_params(params: &[f32]) -> f32 { if params.is_empty() { FSRS6_DEFAULT_DECAY // default decay for FSRS-6 } else if params.len() < 21 { @@ -188,9 +188,7 @@ impl Collection { } } else { // clear FSRS data if FSRS is disabled - card.memory_state = None; - card.desired_retention = None; - card.decay = None; + card.clear_fsrs_data(); } self.update_card_inner(&mut card, original, usn)?; } diff --git a/rslib/src/scheduler/fsrs/params.rs b/rslib/src/scheduler/fsrs/params.rs index 76bc206be..63bdebe79 100644 --- a/rslib/src/scheduler/fsrs/params.rs +++ b/rslib/src/scheduler/fsrs/params.rs @@ -299,6 +299,33 @@ impl Collection { .is_ok() })?) } + + pub fn evaluate_params_legacy( + &mut self, + params: &Params, + search: &str, + ignore_revlogs_before: TimestampMillis, + ) -> Result { + let timing = self.timing_today()?; + let mut anki_progress = self.new_progress_handler::(); + let guard = self.search_cards_into_table(search, SortMode::NoOrder)?; + let revlogs: Vec = guard + .col + .storage + .get_revlog_entries_for_searched_cards_in_card_order()?; + let (items, review_count) = + fsrs_items_for_training(revlogs, timing.next_day_at, ignore_revlogs_before); + anki_progress.state.reviews = review_count as u32; + let fsrs = FSRS::new(Some(params))?; + Ok(fsrs.evaluate(items, |ip| { + anki_progress + .update(false, |p| { + p.total_iterations = ip.total as u32; + p.current_iteration = ip.current as u32; + }) + .is_ok() + })?) + } } #[derive(Default, Clone, Copy, Debug)] diff --git a/rslib/src/scheduler/service/mod.rs b/rslib/src/scheduler/service/mod.rs index 993fd1dbe..43d694e4f 100644 --- a/rslib/src/scheduler/service/mod.rs +++ b/rslib/src/scheduler/service/mod.rs @@ -307,6 +307,21 @@ impl crate::services::SchedulerService for Collection { }) } + fn evaluate_params_legacy( + &mut self, + input: scheduler::EvaluateParamsLegacyRequest, + ) -> Result { + let ret = self.evaluate_params_legacy( + &input.params, + &input.search, + input.ignore_revlogs_before_ms.into(), + )?; + Ok(scheduler::EvaluateParamsResponse { + log_loss: ret.log_loss, + rmse_bins: ret.rmse_bins, + }) + } + fn get_optimal_retention_parameters( &mut self, input: scheduler::GetOptimalRetentionParametersRequest, diff --git a/rslib/src/scheduler/timespan.rs b/rslib/src/scheduler/timespan.rs index c779d33bc..b015e3e1e 100644 --- a/rslib/src/scheduler/timespan.rs +++ b/rslib/src/scheduler/timespan.rs @@ -25,7 +25,7 @@ pub fn answer_button_time_collapsible(seconds: u32, collapse_secs: u32, tr: &I18 if seconds == 0 { tr.scheduling_end().into() } else if seconds < collapse_secs { - format!("<{}", string) + format!("<{string}") } else { string } diff --git a/rslib/src/search/builder.rs b/rslib/src/search/builder.rs index 452f4d832..a76af0560 100644 --- a/rslib/src/search/builder.rs +++ b/rslib/src/search/builder.rs @@ -219,7 +219,7 @@ impl From for SearchNode { impl From for SearchNode { fn from(n: NoteId) -> Self { - SearchNode::NoteIds(format!("{}", n)) + SearchNode::NoteIds(format!("{n}")) } } diff --git a/rslib/src/search/mod.rs b/rslib/src/search/mod.rs index 63096dad8..ff21bf4ca 100644 --- a/rslib/src/search/mod.rs +++ b/rslib/src/search/mod.rs @@ -240,7 +240,7 @@ impl Collection { } else { self.storage.setup_searched_cards_table()?; } - let sql = format!("insert into search_cids {}", sql); + let sql = format!("insert into search_cids {sql}"); let cards = self .storage @@ -307,7 +307,7 @@ impl Collection { let (sql, args) = writer.build_query(&top_node, mode.required_table())?; self.storage.setup_searched_notes_table()?; - let sql = format!("insert into search_nids {}", sql); + let sql = format!("insert into search_nids {sql}"); let notes = self .storage diff --git a/rslib/src/search/parser.rs b/rslib/src/search/parser.rs index 041ec4948..ae166ef54 100644 --- a/rslib/src/search/parser.rs +++ b/rslib/src/search/parser.rs @@ -94,6 +94,7 @@ pub enum SearchNode { WholeCollection, Regex(String), NoCombining(String), + StripClozes(String), WordBoundary(String), CustomData(String), Preset(String), @@ -277,7 +278,7 @@ fn unquoted_term(s: &str) -> IResult { Err(parse_failure( s, FailKind::UnknownEscape { - provided: format!("\\{}", c), + provided: format!("\\{c}"), }, )) } else if "\"() \u{3000}".contains(s.chars().next().unwrap()) { @@ -358,6 +359,7 @@ fn search_node_for_text_with_argument<'a>( "cid" => SearchNode::CardIds(check_id_list(val, key)?.into()), "re" => SearchNode::Regex(unescape_quotes(val)), "nc" => SearchNode::NoCombining(unescape(val)?), + "sc" => SearchNode::StripClozes(unescape(val)?), "w" => SearchNode::WordBoundary(unescape(val)?), "dupe" => parse_dupe(val)?, "has-cd" => SearchNode::CustomData(unescape(val)?), @@ -637,7 +639,7 @@ fn check_id_list<'a>(s: &'a str, context: &str) -> ParseResult<'a, &'a str> { s, // id lists are undocumented, so no translation FailKind::Other { - info: Some(format!("expected only digits and commas in {}:", context)), + info: Some(format!("expected only digits and commas in {context}:")), }, )) } @@ -1110,19 +1112,19 @@ mod test { for term in &["added", "edited", "rated", "resched"] { assert!(matches!( - failkind(&format!("{}:1.1", term)), + failkind(&format!("{term}:1.1")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); assert!(matches!( - failkind(&format!("{}:-1", term)), + failkind(&format!("{term}:-1")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); assert!(matches!( - failkind(&format!("{}:", term)), + failkind(&format!("{term}:")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); assert!(matches!( - failkind(&format!("{}:foo", term)), + failkind(&format!("{term}:foo")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); } @@ -1223,19 +1225,19 @@ mod test { for term in &["ivl", "reps", "lapses", "pos"] { assert!(matches!( - failkind(&format!("prop:{}>", term)), + failkind(&format!("prop:{term}>")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); assert!(matches!( - failkind(&format!("prop:{}=0.5", term)), + failkind(&format!("prop:{term}=0.5")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); assert!(matches!( - failkind(&format!("prop:{}!=-1", term)), + failkind(&format!("prop:{term}!=-1")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); assert!(matches!( - failkind(&format!("prop:{} { self.write_unqualified( text, self.col.get_config_bool(BoolKey::IgnoreAccentsInSearch), + false, )? } SearchNode::SingleField { field, text, is_re } => { @@ -143,7 +145,14 @@ impl SqlWriter<'_> { self.write_dupe(*notetype_id, &self.norm_note(text))? } SearchNode::Regex(re) => self.write_regex(&self.norm_note(re), false)?, - SearchNode::NoCombining(text) => self.write_unqualified(&self.norm_note(text), true)?, + SearchNode::NoCombining(text) => { + self.write_unqualified(&self.norm_note(text), true, false)? + } + SearchNode::StripClozes(text) => self.write_unqualified( + &self.norm_note(text), + self.col.get_config_bool(BoolKey::IgnoreAccentsInSearch), + true, + )?, SearchNode::WordBoundary(text) => self.write_word_boundary(&self.norm_note(text))?, // other @@ -158,13 +167,12 @@ impl SqlWriter<'_> { }, SearchNode::Deck(deck) => self.write_deck(&norm(deck))?, SearchNode::NotetypeId(ntid) => { - write!(self.sql, "n.mid = {}", ntid).unwrap(); + write!(self.sql, "n.mid = {ntid}").unwrap(); } SearchNode::DeckIdsWithoutChildren(dids) => { write!( self.sql, - "c.did in ({}) or (c.odid != 0 and c.odid in ({}))", - dids, dids + "c.did in ({dids}) or (c.odid != 0 and c.odid in ({dids}))" ) .unwrap(); } @@ -175,13 +183,13 @@ impl SqlWriter<'_> { SearchNode::Tag { tag, is_re } => self.write_tag(&norm(tag), *is_re), SearchNode::State(state) => self.write_state(state)?, SearchNode::Flag(flag) => { - write!(self.sql, "(c.flags & 7) == {}", flag).unwrap(); + write!(self.sql, "(c.flags & 7) == {flag}").unwrap(); } SearchNode::NoteIds(nids) => { write!(self.sql, "{} in ({})", self.note_id_column(), nids).unwrap(); } SearchNode::CardIds(cids) => { - write!(self.sql, "c.id in ({})", cids).unwrap(); + write!(self.sql, "c.id in ({cids})").unwrap(); } SearchNode::Property { operator, kind } => self.write_prop(operator, kind)?, SearchNode::CustomData(key) => self.write_custom_data(key)?, @@ -191,7 +199,12 @@ impl SqlWriter<'_> { Ok(()) } - fn write_unqualified(&mut self, text: &str, no_combining: bool) -> Result<()> { + fn write_unqualified( + &mut self, + text: &str, + no_combining: bool, + strip_clozes: bool, + ) -> Result<()> { let text = to_sql(text); let text = if no_combining { without_combining(&text) @@ -199,21 +212,41 @@ impl SqlWriter<'_> { text }; // implicitly wrap in % - let text = format!("%{}%", text); + let text = format!("%{text}%"); self.args.push(text); let arg_idx = self.args.len(); - let sfld_expr = if no_combining { - "coalesce(without_combining(cast(n.sfld as text)), n.sfld)" + let mut process_text_flags = ProcessTextFlags::empty(); + if no_combining { + process_text_flags.insert(ProcessTextFlags::NoCombining); + } + if strip_clozes { + process_text_flags.insert(ProcessTextFlags::StripClozes); + } + + let (sfld_expr, flds_expr) = if !process_text_flags.is_empty() { + let bits = process_text_flags.bits(); + ( + Cow::from(format!( + "coalesce(process_text(cast(n.sfld as text), {bits}), n.sfld)" + )), + Cow::from(format!("coalesce(process_text(n.flds, {bits}), n.flds)")), + ) } else { - "n.sfld" - }; - let flds_expr = if no_combining { - "coalesce(without_combining(n.flds), n.flds)" - } else { - "n.flds" + (Cow::from("n.sfld"), Cow::from("n.flds")) }; + if strip_clozes { + let cloze_notetypes_only_clause = self + .col + .get_all_notetypes()? + .iter() + .filter(|nt| nt.is_cloze()) + .map(|nt| format!("n.mid = {}", nt.id)) + .join(" or "); + write!(self.sql, "({cloze_notetypes_only_clause}) and ").unwrap(); + } + if let Some(field_indicies_by_notetype) = self.included_fields_by_notetype()? { let field_idx_str = format!("' || ?{arg_idx} || '"); let other_idx_str = "%".to_string(); @@ -279,7 +312,7 @@ impl SqlWriter<'_> { text => { write!(self.sql, "n.tags regexp ?").unwrap(); let re = &to_custom_re(text, r"\S"); - self.args.push(format!("(?i).* {}(::| ).*", re)); + self.args.push(format!("(?i).* {re}(::| ).*")); } } } @@ -293,10 +326,10 @@ impl SqlWriter<'_> { write!(self.sql, "c.id in (select cid from revlog where id").unwrap(); match op { - ">" => write!(self.sql, " >= {}", target_cutoff_ms), - ">=" => write!(self.sql, " >= {}", day_before_cutoff_ms), - "<" => write!(self.sql, " < {}", day_before_cutoff_ms), - "<=" => write!(self.sql, " < {}", target_cutoff_ms), + ">" => write!(self.sql, " >= {target_cutoff_ms}"), + ">=" => write!(self.sql, " >= {day_before_cutoff_ms}"), + "<" => write!(self.sql, " < {day_before_cutoff_ms}"), + "<=" => write!(self.sql, " < {target_cutoff_ms}"), "=" => write!( self.sql, " between {} and {}", @@ -314,7 +347,7 @@ impl SqlWriter<'_> { .unwrap(); match ease { - RatingKind::AnswerButton(u) => write!(self.sql, " and ease = {})", u), + RatingKind::AnswerButton(u) => write!(self.sql, " and ease = {u})"), RatingKind::AnyAnswerButton => write!(self.sql, " and ease > 0)"), RatingKind::ManualReschedule => write!(self.sql, " and ease = 0)"), } @@ -356,9 +389,9 @@ impl SqlWriter<'_> { pos = pos ) .unwrap(), - PropertyKind::Interval(ivl) => write!(self.sql, "ivl {} {}", op, ivl).unwrap(), - PropertyKind::Reps(reps) => write!(self.sql, "reps {} {}", op, reps).unwrap(), - PropertyKind::Lapses(days) => write!(self.sql, "lapses {} {}", op, days).unwrap(), + PropertyKind::Interval(ivl) => write!(self.sql, "ivl {op} {ivl}").unwrap(), + PropertyKind::Reps(reps) => write!(self.sql, "reps {op} {reps}").unwrap(), + PropertyKind::Lapses(days) => write!(self.sql, "lapses {op} {days}").unwrap(), PropertyKind::Ease(ease) => { write!(self.sql, "factor {} {}", op, (ease * 1000.0) as u32).unwrap() } @@ -474,7 +507,7 @@ impl SqlWriter<'_> { }; // convert to a regex that includes child decks - self.args.push(format!("(?i)^{}($|\x1f)", native_deck)); + self.args.push(format!("(?i)^{native_deck}($|\x1f)")); let arg_idx = self.args.len(); self.sql.push_str(&format!(concat!( "(c.did in (select id from decks where name regexp ?{n})", @@ -491,7 +524,7 @@ impl SqlWriter<'_> { let ids = self.col.storage.deck_id_with_children(&parent)?; let mut buf = String::new(); ids_to_string(&mut buf, &ids); - write!(self.sql, "c.did in {}", buf,).unwrap(); + write!(self.sql, "c.did in {buf}",).unwrap(); } else { self.sql.push_str("false") } @@ -502,7 +535,7 @@ impl SqlWriter<'_> { fn write_template(&mut self, template: &TemplateKind) { match template { TemplateKind::Ordinal(n) => { - write!(self.sql, "c.ord = {}", n).unwrap(); + write!(self.sql, "c.ord = {n}").unwrap(); } TemplateKind::Name(name) => { if is_glob(name) { @@ -550,7 +583,7 @@ impl SqlWriter<'_> { } fn write_all_fields_regexp(&mut self, val: &str) { - self.args.push(format!("(?i){}", val)); + self.args.push(format!("(?i){val}")); write!(self.sql, "regexp_fields(?{}, n.flds)", self.args.len()).unwrap(); } @@ -566,7 +599,7 @@ impl SqlWriter<'_> { return Ok(()); } - self.args.push(format!("(?i){}", val)); + self.args.push(format!("(?i){val}")); let arg_idx = self.args.len(); let all_notetype_clauses = field_indicies_by_notetype @@ -775,13 +808,13 @@ impl SqlWriter<'_> { fn write_added(&mut self, days: u32) -> Result<()> { let cutoff = self.previous_day_cutoff(days)?.as_millis(); - write!(self.sql, "c.id > {}", cutoff).unwrap(); + write!(self.sql, "c.id > {cutoff}").unwrap(); Ok(()) } fn write_edited(&mut self, days: u32) -> Result<()> { let cutoff = self.previous_day_cutoff(days)?; - write!(self.sql, "n.mod > {}", cutoff).unwrap(); + write!(self.sql, "n.mod > {cutoff}").unwrap(); Ok(()) } @@ -804,16 +837,19 @@ impl SqlWriter<'_> { fn write_regex(&mut self, word: &str, no_combining: bool) -> Result<()> { let flds_expr = if no_combining { - "coalesce(without_combining(n.flds), n.flds)" + Cow::from(format!( + "coalesce(process_text(n.flds, {}), n.flds)", + ProcessTextFlags::NoCombining.bits() + )) } else { - "n.flds" + Cow::from("n.flds") }; let word = if no_combining { without_combining(word) } else { std::borrow::Cow::Borrowed(word) }; - self.args.push(format!(r"(?i){}", word)); + self.args.push(format!(r"(?i){word}")); let arg_idx = self.args.len(); if let Some(field_indices_by_notetype) = self.included_fields_for_unqualified_regex()? { let notetype_clause = |ctx: &UnqualifiedRegexSearchContext| -> String { @@ -996,6 +1032,7 @@ impl SearchNode { SearchNode::Duplicates { .. } => RequiredTable::Notes, SearchNode::Regex(_) => RequiredTable::Notes, SearchNode::NoCombining(_) => RequiredTable::Notes, + SearchNode::StripClozes(_) => RequiredTable::Notes, SearchNode::WordBoundary(_) => RequiredTable::Notes, SearchNode::NotetypeId(_) => RequiredTable::Notes, SearchNode::Notetype(_) => RequiredTable::Notes, @@ -1300,6 +1337,9 @@ c.odue != 0 then c.odue else c.due end) != {days}) or (c.queue in (1,4) and "((c.did in (1) or c.odid in (1)))" ); assert_eq!(&s(ctx, "preset:typo").0, "(false)"); + + // strip clozes + assert_eq!(&s(ctx, "sc:abcdef").0, "((n.mid = 1581236385343) and (coalesce(process_text(cast(n.sfld as text), 2), n.sfld) like ?1 escape '\\' or coalesce(process_text(n.flds, 2), n.flds) like ?1 escape '\\'))"); } #[test] diff --git a/rslib/src/search/writer.rs b/rslib/src/search/writer.rs index 600a18fd6..3bbe6fd0a 100644 --- a/rslib/src/search/writer.rs +++ b/rslib/src/search/writer.rs @@ -70,30 +70,31 @@ fn write_search_node(node: &SearchNode) -> String { match node { UnqualifiedText(s) => maybe_quote(&s.replace(':', "\\:")), SingleField { field, text, is_re } => write_single_field(field, text, *is_re), - AddedInDays(u) => format!("added:{}", u), - EditedInDays(u) => format!("edited:{}", u), - IntroducedInDays(u) => format!("introduced:{}", u), + AddedInDays(u) => format!("added:{u}"), + EditedInDays(u) => format!("edited:{u}"), + IntroducedInDays(u) => format!("introduced:{u}"), CardTemplate(t) => write_template(t), - Deck(s) => maybe_quote(&format!("deck:{}", s)), - DeckIdsWithoutChildren(s) => format!("did:{}", s), + Deck(s) => maybe_quote(&format!("deck:{s}")), + DeckIdsWithoutChildren(s) => format!("did:{s}"), // not exposed on the GUI end DeckIdWithChildren(_) => "".to_string(), - NotetypeId(NotetypeIdType(i)) => format!("mid:{}", i), - Notetype(s) => maybe_quote(&format!("note:{}", s)), + NotetypeId(NotetypeIdType(i)) => format!("mid:{i}"), + Notetype(s) => maybe_quote(&format!("note:{s}")), Rated { days, ease } => write_rated(days, ease), Tag { tag, is_re } => write_single_field("tag", tag, *is_re), Duplicates { notetype_id, text } => write_dupe(notetype_id, text), State(k) => write_state(k), - Flag(u) => format!("flag:{}", u), - NoteIds(s) => format!("nid:{}", s), - CardIds(s) => format!("cid:{}", s), + Flag(u) => format!("flag:{u}"), + NoteIds(s) => format!("nid:{s}"), + CardIds(s) => format!("cid:{s}"), Property { operator, kind } => write_property(operator, kind), WholeCollection => "deck:*".to_string(), - Regex(s) => maybe_quote(&format!("re:{}", s)), - NoCombining(s) => maybe_quote(&format!("nc:{}", s)), - WordBoundary(s) => maybe_quote(&format!("w:{}", s)), - CustomData(k) => maybe_quote(&format!("has-cd:{}", k)), - Preset(s) => maybe_quote(&format!("preset:{}", s)), + Regex(s) => maybe_quote(&format!("re:{s}")), + NoCombining(s) => maybe_quote(&format!("nc:{s}")), + StripClozes(s) => maybe_quote(&format!("sc:{s}")), + WordBoundary(s) => maybe_quote(&format!("w:{s}")), + CustomData(k) => maybe_quote(&format!("has-cd:{k}")), + Preset(s) => maybe_quote(&format!("preset:{s}")), } } @@ -128,23 +129,23 @@ fn write_single_field(field: &str, text: &str, is_re: bool) -> String { fn write_template(template: &TemplateKind) -> String { match template { TemplateKind::Ordinal(u) => format!("card:{}", u + 1), - TemplateKind::Name(s) => maybe_quote(&format!("card:{}", s)), + TemplateKind::Name(s) => maybe_quote(&format!("card:{s}")), } } fn write_rated(days: &u32, ease: &RatingKind) -> String { use RatingKind::*; match ease { - AnswerButton(n) => format!("rated:{}:{}", days, n), - AnyAnswerButton => format!("rated:{}", days), - ManualReschedule => format!("resched:{}", days), + AnswerButton(n) => format!("rated:{days}:{n}"), + AnyAnswerButton => format!("rated:{days}"), + ManualReschedule => format!("resched:{days}"), } } /// Escape double quotes and backslashes: \" fn write_dupe(notetype_id: &NotetypeId, text: &str) -> String { let esc = text.replace('\\', r"\\"); - maybe_quote(&format!("dupe:{},{}", notetype_id, esc)) + maybe_quote(&format!("dupe:{notetype_id},{esc}")) } fn write_state(kind: &StateKind) -> String { @@ -167,19 +168,19 @@ fn write_state(kind: &StateKind) -> String { fn write_property(operator: &str, kind: &PropertyKind) -> String { use PropertyKind::*; match kind { - Due(i) => format!("prop:due{}{}", operator, i), - Interval(u) => format!("prop:ivl{}{}", operator, u), - Reps(u) => format!("prop:reps{}{}", operator, u), - Lapses(u) => format!("prop:lapses{}{}", operator, u), - Ease(f) => format!("prop:ease{}{}", operator, f), - Position(u) => format!("prop:pos{}{}", operator, u), - Stability(u) => format!("prop:s{}{}", operator, u), - Difficulty(u) => format!("prop:d{}{}", operator, u), - Retrievability(u) => format!("prop:r{}{}", operator, u), + Due(i) => format!("prop:due{operator}{i}"), + Interval(u) => format!("prop:ivl{operator}{u}"), + Reps(u) => format!("prop:reps{operator}{u}"), + Lapses(u) => format!("prop:lapses{operator}{u}"), + Ease(f) => format!("prop:ease{operator}{f}"), + Position(u) => format!("prop:pos{operator}{u}"), + Stability(u) => format!("prop:s{operator}{u}"), + Difficulty(u) => format!("prop:d{operator}{u}"), + Retrievability(u) => format!("prop:r{operator}{u}"), Rated(u, ease) => match ease { - RatingKind::AnswerButton(val) => format!("prop:rated{}{}:{}", operator, u, val), - RatingKind::AnyAnswerButton => format!("prop:rated{}{}", operator, u), - RatingKind::ManualReschedule => format!("prop:resched{}{}", operator, u), + RatingKind::AnswerButton(val) => format!("prop:rated{operator}{u}:{val}"), + RatingKind::AnyAnswerButton => format!("prop:rated{operator}{u}"), + RatingKind::ManualReschedule => format!("prop:resched{operator}{u}"), }, CustomDataNumber { key, value } => format!("prop:cdn:{key}{operator}{value}"), CustomDataString { key, value } => { diff --git a/rslib/src/storage/card/mod.rs b/rslib/src/storage/card/mod.rs index e7da70897..35a229e93 100644 --- a/rslib/src/storage/card/mod.rs +++ b/rslib/src/storage/card/mod.rs @@ -830,8 +830,7 @@ impl fmt::Display for ReviewOrderSubclause { ReviewOrderSubclause::RetrievabilitySm2 { today, order } => { temp_string = format!( // - (elapsed days+0.001)/(scheduled interval) - "-(1 + cast({today}-due+0.001 as real)/ivl) {order}", - today = today + "-(1 + cast({today}-due+0.001 as real)/ivl) {order}" ); &temp_string } @@ -845,7 +844,7 @@ impl fmt::Display for ReviewOrderSubclause { ReviewOrderSubclause::Added => "nid asc, ord asc", ReviewOrderSubclause::ReverseAdded => "nid desc, ord asc", }; - write!(f, "{}", clause) + write!(f, "{clause}") } } diff --git a/rslib/src/storage/deck/mod.rs b/rslib/src/storage/deck/mod.rs index 7b1e08d58..d47d03894 100644 --- a/rslib/src/storage/deck/mod.rs +++ b/rslib/src/storage/deck/mod.rs @@ -33,7 +33,7 @@ fn row_to_deck(row: &Row) -> Result { common, kind: kind.kind.ok_or_else(|| { AnkiError::db_error( - format!("invalid deck kind: {}", id), + format!("invalid deck kind: {id}"), DbErrorKind::MissingEntity, ) })?, @@ -347,8 +347,8 @@ impl SqliteStorage { ))?; let top = current.name.as_native_str(); - let prefix_start = &format!("{}\x1f", top); - let prefix_end = &format!("{}\x20", top); + let prefix_start = &format!("{top}\x1f"); + let prefix_end = &format!("{top}\x20"); self.db .prepare_cached(include_str!("update_active.sql"))? @@ -379,7 +379,7 @@ impl SqliteStorage { let decks = self .get_schema11_decks() .map_err(|e| AnkiError::JsonError { - info: format!("decoding decks: {}", e), + info: format!("decoding decks: {e}"), })?; let mut names = HashSet::new(); for (_id, deck) in decks { diff --git a/rslib/src/storage/deckconfig/mod.rs b/rslib/src/storage/deckconfig/mod.rs index 2103e1512..5cc39cfc8 100644 --- a/rslib/src/storage/deckconfig/mod.rs +++ b/rslib/src/storage/deckconfig/mod.rs @@ -197,7 +197,7 @@ impl SqliteStorage { serde_json::from_value(conf) }) .map_err(|e| AnkiError::JsonError { - info: format!("decoding deck config: {}", e), + info: format!("decoding deck config: {e}"), }) })?; for (id, mut conf) in conf.into_iter() { diff --git a/rslib/src/storage/mod.rs b/rslib/src/storage/mod.rs index f240555eb..015f4fdc7 100644 --- a/rslib/src/storage/mod.rs +++ b/rslib/src/storage/mod.rs @@ -19,6 +19,7 @@ mod upgrades; use std::fmt::Write; +pub(crate) use sqlite::ProcessTextFlags; pub(crate) use sqlite::SqliteStorage; #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -52,7 +53,7 @@ where { let mut trailing_sep = false; for id in ids { - write!(buf, "{},", id).unwrap(); + write!(buf, "{id},").unwrap(); trailing_sep = true; } if trailing_sep { diff --git a/rslib/src/storage/notetype/mod.rs b/rslib/src/storage/notetype/mod.rs index 88c4074ac..692b68887 100644 --- a/rslib/src/storage/notetype/mod.rs +++ b/rslib/src/storage/notetype/mod.rs @@ -345,7 +345,7 @@ impl SqliteStorage { let nts = self .get_schema11_notetypes() .map_err(|e| AnkiError::JsonError { - info: format!("decoding models: {:?}", e), + info: format!("decoding models: {e:?}"), })?; let mut names = HashSet::new(); for (mut ntid, nt) in nts { diff --git a/rslib/src/storage/sqlite.rs b/rslib/src/storage/sqlite.rs index 211cf44b1..e4b6f60f0 100644 --- a/rslib/src/storage/sqlite.rs +++ b/rslib/src/storage/sqlite.rs @@ -9,6 +9,7 @@ use std::hash::Hasher; use std::path::Path; use std::sync::Arc; +use bitflags::bitflags; use fnv::FnvHasher; use fsrs::FSRS; use fsrs::FSRS5_DEFAULT_DECAY; @@ -24,6 +25,7 @@ use super::upgrades::SCHEMA_MAX_VERSION; use super::upgrades::SCHEMA_MIN_VERSION; use super::upgrades::SCHEMA_STARTING_VERSION; use super::SchemaVersion; +use crate::cloze::strip_clozes; use crate::config::schema11::schema11_config_as_string; use crate::error::DbErrorKind; use crate::prelude::*; @@ -31,6 +33,7 @@ use crate::scheduler::timing::local_minutes_west_for_stamp; use crate::scheduler::timing::v1_creation_date; use crate::storage::card::data::CardData; use crate::text::without_combining; +use crate::text::CowMapping; fn unicase_compare(s1: &str, s2: &str) -> Ordering { UniCase::new(s1).cmp(&UniCase::new(s2)) @@ -74,7 +77,7 @@ fn open_or_create_collection_db(path: &Path) -> Result { add_regexp_function(&db)?; add_regexp_fields_function(&db)?; add_regexp_tags_function(&db)?; - add_without_combining_function(&db)?; + add_process_text_function(&db)?; add_fnvhash_function(&db)?; add_extract_original_position_function(&db)?; add_extract_custom_data_function(&db)?; @@ -111,17 +114,28 @@ fn add_field_index_function(db: &Connection) -> rusqlite::Result<()> { ) } -fn add_without_combining_function(db: &Connection) -> rusqlite::Result<()> { +bitflags! { + pub(crate) struct ProcessTextFlags: u8 { + const NoCombining = 1; + const StripClozes = 1 << 1; + } +} + +fn add_process_text_function(db: &Connection) -> rusqlite::Result<()> { db.create_scalar_function( - "without_combining", - 1, + "process_text", + 2, FunctionFlags::SQLITE_DETERMINISTIC, |ctx| { - let text = ctx.get_raw(0).as_str()?; - Ok(match without_combining(text) { - Cow::Borrowed(_) => None, - Cow::Owned(o) => Some(o), - }) + let mut text = Cow::from(ctx.get_raw(0).as_str()?); + let opt = ProcessTextFlags::from_bits_truncate(ctx.get_raw(1).as_i64()? as u8); + if opt.contains(ProcessTextFlags::StripClozes) { + text = text.map_cow(strip_clozes); + } + if opt.contains(ProcessTextFlags::NoCombining) { + text = text.map_cow(without_combining); + } + Ok(text.get_owned()) }, ) } @@ -601,7 +615,7 @@ impl SqliteStorage { }) { Ok(corrupt) => corrupt, Err(e) => { - println!("error: {:?}", e); + println!("error: {e:?}"); true } } diff --git a/rslib/src/storage/sync.rs b/rslib/src/storage/sync.rs index 4bd0e5242..256566d68 100644 --- a/rslib/src/storage/sync.rs +++ b/rslib/src/storage/sync.rs @@ -54,7 +54,7 @@ impl SqliteStorage { if let Some(new_usn) = server_usn_if_client { let mut stmt = self .db - .prepare_cached(&format!("update {} set usn=? where id=?", table))?; + .prepare_cached(&format!("update {table} set usn=? where id=?"))?; for id in ids { stmt.execute(params![new_usn, id])?; } diff --git a/rslib/src/storage/sync_check.rs b/rslib/src/storage/sync_check.rs index 50e92f7d0..7693a5921 100644 --- a/rslib/src/storage/sync_check.rs +++ b/rslib/src/storage/sync_check.rs @@ -11,7 +11,7 @@ impl SqliteStorage { fn table_has_usn(&self, table: &str) -> Result { Ok(self .db - .prepare(&format!("select null from {} where usn=-1", table))? + .prepare(&format!("select null from {table} where usn=-1"))? .query([])? .next()? .is_some()) @@ -19,7 +19,7 @@ impl SqliteStorage { fn table_count(&self, table: &str) -> Result { self.db - .query_row(&format!("select count() from {}", table), [], |r| r.get(0)) + .query_row(&format!("select count() from {table}"), [], |r| r.get(0)) .map_err(Into::into) } @@ -36,7 +36,7 @@ impl SqliteStorage { ] { if self.table_has_usn(table)? { return Err(AnkiError::sync_error( - format!("table had usn=-1: {}", table), + format!("table had usn=-1: {table}"), SyncErrorKind::Other, )); } diff --git a/rslib/src/sync/collection/tests.rs b/rslib/src/sync/collection/tests.rs index abf82262f..a7aa6cc8d 100644 --- a/rslib/src/sync/collection/tests.rs +++ b/rslib/src/sync/collection/tests.rs @@ -100,7 +100,7 @@ where _lock = LOCK.lock().await; endpoint } else { - format!("http://{}/", addr) + format!("http://{addr}/") }; let endpoint = Url::try_from(endpoint.as_str()).unwrap(); let auth = SyncAuth { @@ -734,7 +734,7 @@ async fn regular_sync(ctx: &SyncTestContext) -> Result<()> { for table in &["cards", "notes", "decks"] { assert_eq!( col1.storage - .db_scalar::(&format!("select count() from {}", table))?, + .db_scalar::(&format!("select count() from {table}"))?, 2 ); } @@ -754,7 +754,7 @@ async fn regular_sync(ctx: &SyncTestContext) -> Result<()> { for table in &["cards", "notes", "decks"] { assert_eq!( col2.storage - .db_scalar::(&format!("select count() from {}", table))?, + .db_scalar::(&format!("select count() from {table}"))?, 1 ); } diff --git a/rslib/src/sync/media/database/client/mod.rs b/rslib/src/sync/media/database/client/mod.rs index 5fe493679..f9c6e5ed1 100644 --- a/rslib/src/sync/media/database/client/mod.rs +++ b/rslib/src/sync/media/database/client/mod.rs @@ -285,7 +285,7 @@ fn row_to_name_and_checksum(row: &Row) -> error::Result<(String, Sha1Hash)> { fn trace(event: rusqlite::trace::TraceEvent) { if let rusqlite::trace::TraceEvent::Stmt(_, sql) = event { - println!("sql: {}", sql); + println!("sql: {sql}"); } } diff --git a/rslib/src/tags/findreplace.rs b/rslib/src/tags/findreplace.rs index 5db6e3ed2..b60b5ed88 100644 --- a/rslib/src/tags/findreplace.rs +++ b/rslib/src/tags/findreplace.rs @@ -35,7 +35,7 @@ impl Collection { }; if !match_case { - search = format!("(?i){}", search).into(); + search = format!("(?i){search}").into(); } self.transact(Op::UpdateTag, |col| { diff --git a/rslib/src/tags/matcher.rs b/rslib/src/tags/matcher.rs index b4961015e..d3c6ad88b 100644 --- a/rslib/src/tags/matcher.rs +++ b/rslib/src/tags/matcher.rs @@ -33,7 +33,7 @@ impl TagMatcher { (?:^|\ ) # 1: the tag prefix ( - {} + {tags} ) (?: # 2: an optional child separator @@ -41,8 +41,7 @@ impl TagMatcher { # or a space/end of string the end of the string |\ |$ ) - "#, - tags + "# ))?; Ok(Self { @@ -61,7 +60,7 @@ impl TagMatcher { let out = self.regex.replace(tag, |caps: &Captures| { // if we captured the child separator, add it to the replacement if caps.get(2).is_some() { - Cow::Owned(format!("{}::", replacement)) + Cow::Owned(format!("{replacement}::")) } else { Cow::Borrowed(replacement) } @@ -92,7 +91,7 @@ impl TagMatcher { let replacement = replacer(caps.get(1).unwrap().as_str()); // if we captured the child separator, add it to the replacement if caps.get(2).is_some() { - format!("{}::", replacement) + format!("{replacement}::") } else { replacement } diff --git a/rslib/src/tags/reparent.rs b/rslib/src/tags/reparent.rs index cbab806ff..4976b760e 100644 --- a/rslib/src/tags/reparent.rs +++ b/rslib/src/tags/reparent.rs @@ -109,7 +109,7 @@ fn reparented_name(existing_name: &str, new_parent: Option<&str>) -> Option baz::bar - let new_name = format!("{}::{}", new_parent, existing_base); + let new_name = format!("{new_parent}::{existing_base}"); if new_name != existing_name { Some(new_name) } else { diff --git a/rslib/src/template.rs b/rslib/src/template.rs index e3a900a2b..4895cc162 100644 --- a/rslib/src/template.rs +++ b/rslib/src/template.rs @@ -265,10 +265,8 @@ fn template_error_to_anki_error( }; let details = htmlescape::encode_minimal(&localized_template_error(tr, err)); let more_info = tr.card_template_rendering_more_info(); - let source = format!( - "{}
{}
{}", - header, details, TEMPLATE_ERROR_LINK, more_info - ); + let source = + format!("{header}
{details}
{more_info}"); AnkiError::TemplateError { info: source } } @@ -279,32 +277,29 @@ fn localized_template_error(tr: &I18n, err: TemplateError) -> String { .card_template_rendering_no_closing_brackets("}}", tag) .into(), TemplateError::ConditionalNotClosed(tag) => tr - .card_template_rendering_conditional_not_closed(format!("{{{{/{}}}}}", tag)) + .card_template_rendering_conditional_not_closed(format!("{{{{/{tag}}}}}")) .into(), TemplateError::ConditionalNotOpen { closed, currently_open, } => if let Some(open) = currently_open { tr.card_template_rendering_wrong_conditional_closed( - format!("{{{{/{}}}}}", closed), - format!("{{{{/{}}}}}", open), + format!("{{{{/{closed}}}}}"), + format!("{{{{/{open}}}}}"), ) } else { tr.card_template_rendering_conditional_not_open( - format!("{{{{/{}}}}}", closed), - format!("{{{{#{}}}}}", closed), - format!("{{{{^{}}}}}", closed), + format!("{{{{/{closed}}}}}"), + format!("{{{{#{closed}}}}}"), + format!("{{{{^{closed}}}}}"), ) } .into(), TemplateError::FieldNotFound { field, filters } => tr - .card_template_rendering_no_such_field(format!("{{{{{}{}}}}}", filters, field), field) + .card_template_rendering_no_such_field(format!("{{{{{filters}{field}}}}}"), field) .into(), TemplateError::NoSuchConditional(condition) => tr - .card_template_rendering_no_such_field( - format!("{{{{{}}}}}", condition), - &condition[1..], - ) + .card_template_rendering_no_such_field(format!("{{{{{condition}}}}}"), &condition[1..]) .into(), } } @@ -523,10 +518,7 @@ impl RenderContext<'_> { Ok(false ^ negated) } else { let prefix = if negated { "^" } else { "#" }; - Err(TemplateError::NoSuchConditional(format!( - "{}{}", - prefix, key - ))) + Err(TemplateError::NoSuchConditional(format!("{prefix}{key}"))) } } } @@ -858,14 +850,14 @@ fn nodes_to_string(buf: &mut String, nodes: &[ParsedNode]) { .unwrap(); } ParsedNode::Conditional { key, children } => { - write!(buf, "{{{{#{}}}}}", key).unwrap(); + write!(buf, "{{{{#{key}}}}}").unwrap(); nodes_to_string(buf, children); - write!(buf, "{{{{/{}}}}}", key).unwrap(); + write!(buf, "{{{{/{key}}}}}").unwrap(); } ParsedNode::NegatedConditional { key, children } => { - write!(buf, "{{{{^{}}}}}", key).unwrap(); + write!(buf, "{{{{^{key}}}}}").unwrap(); nodes_to_string(buf, children); - write!(buf, "{{{{/{}}}}}", key).unwrap(); + write!(buf, "{{{{/{key}}}}}").unwrap(); } } } diff --git a/rslib/src/template_filters.rs b/rslib/src/template_filters.rs index cb3504fe3..4949e756d 100644 --- a/rslib/src/template_filters.rs +++ b/rslib/src/template_filters.rs @@ -165,15 +165,15 @@ fn furigana_filter(text: &str) -> Cow { /// convert to [[type:...]] for the gui code to process fn type_filter<'a>(field_name: &str) -> Cow<'a, str> { - format!("[[type:{}]]", field_name).into() + format!("[[type:{field_name}]]").into() } fn type_cloze_filter<'a>(field_name: &str) -> Cow<'a, str> { - format!("[[type:cloze:{}]]", field_name).into() + format!("[[type:cloze:{field_name}]]").into() } fn type_nc_filter<'a>(field_name: &str) -> Cow<'a, str> { - format!("[[type:nc:{}]]", field_name).into() + format!("[[type:nc:{field_name}]]").into() } fn hint_filter<'a>(text: &'a str, field_name: &str) -> Cow<'a, str> { @@ -191,18 +191,17 @@ fn hint_filter<'a>(text: &'a str, field_name: &str) -> Cow<'a, str> { r##" -{} - -"##, - id, field_name, id, text +{field_name} + +"## ) .into() } fn tts_filter(options: &str, text: &str) -> String { - format!("[anki:tts lang={}]{}[/anki:tts]", options, text) + format!("[anki:tts lang={options}]{text}[/anki:tts]") } // Tests diff --git a/rslib/src/text.rs b/rslib/src/text.rs index f83332ff8..590c05b39 100644 --- a/rslib/src/text.rs +++ b/rslib/src/text.rs @@ -484,7 +484,7 @@ pub(crate) fn to_custom_re<'a>(txt: &'a str, wildcard: &str) -> Cow<'a, str> { match s { r"\\" | r"\*" => s.to_string(), r"\_" => "_".to_string(), - "*" => format!("{}*", wildcard), + "*" => format!("{wildcard}*"), "_" => wildcard.to_string(), s => regex::escape(s), } diff --git a/rslib/sync/Cargo.toml b/rslib/sync/Cargo.toml index 7a8f8534a..d23b4f380 100644 --- a/rslib/sync/Cargo.toml +++ b/rslib/sync/Cargo.toml @@ -13,4 +13,9 @@ path = "main.rs" name = "anki-sync-server" [dependencies] + +[target.'cfg(windows)'.dependencies] +anki = { workspace = true, features = ["native-tls"] } + +[target.'cfg(not(windows))'.dependencies] anki = { workspace = true, features = ["rustls"] } diff --git a/rust-toolchain.toml b/rust-toolchain.toml index fa07f7fa5..8a21ec74e 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] # older versions may fail to compile; newer versions may fail the clippy tests -channel = "1.87.0" +channel = "1.88.0" diff --git a/tools/minilints/src/main.rs b/tools/minilints/src/main.rs index 3a3c06f2c..37e213570 100644 --- a/tools/minilints/src/main.rs +++ b/tools/minilints/src/main.rs @@ -21,12 +21,7 @@ use walkdir::WalkDir; const NONSTANDARD_HEADER: &[&str] = &[ "./pylib/anki/_vendor/stringcase.py", - "./pylib/anki/importing/pauker.py", - "./pylib/anki/importing/supermemo_xml.py", "./pylib/anki/statsbg.py", - "./pylib/tools/protoc-gen-mypy.py", - "./python/pyqt/install.py", - "./python/write_wheel.py", "./qt/aqt/mpv.py", "./qt/aqt/winpaths.py", ]; @@ -113,7 +108,7 @@ impl LintContext { LazyCell::force(&self.unstaged_changes); fix_copyright(path)?; } else { - println!("missing standard copyright header: {:?}", path); + println!("missing standard copyright header: {path:?}"); self.found_problems = true; } } @@ -246,7 +241,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html .write(true) .open(path) .with_context(|| format!("opening {path}"))?; - write!(file, "{}{}", header, data).with_context(|| format!("writing {path}"))?; + write!(file, "{header}{data}").with_context(|| format!("writing {path}"))?; Ok(()) } diff --git a/tools/publish b/tools/publish index 4c625c36d..6214f27ed 100755 --- a/tools/publish +++ b/tools/publish @@ -1,9 +1,14 @@ #!/bin/bash set -e +shopt -s extglob #export UV_PUBLISH_TOKEN=$(pass show w/pypi-api-test) #out/extracted/uv/uv publish --index testpypi out/wheels/* export UV_PUBLISH_TOKEN=$(pass show w/pypi-api) -out/extracted/uv/uv publish out/wheels/* + +# Upload all wheels except anki_release*.whl first +out/extracted/uv/uv publish out/wheels/!(anki_release*).whl +# Then upload anki_release*.whl +out/extracted/uv/uv publish out/wheels/anki_release*.whl diff --git a/tools/reload_webviews.py b/tools/reload_webviews.py index 948401d9b..bb84c2554 100755 --- a/tools/reload_webviews.py +++ b/tools/reload_webviews.py @@ -43,11 +43,11 @@ except Exception as e: print_error( f"Could not establish connection to Chromium remote debugger. Is Anki Open? Exception:\n{e}" ) - exit(1) + sys.exit(1) if chrome.tabs is None: print_error("Was unable to get active web views.") - exit(1) + sys.exit(1) for tab_index, tab_data in enumerate(chrome.tabs): print(f"Reloading page: {tab_data['title']}") diff --git a/tools/update-launcher-env b/tools/update-launcher-env new file mode 100755 index 000000000..c84569f55 --- /dev/null +++ b/tools/update-launcher-env @@ -0,0 +1,15 @@ +#!/bin/bash +# +# Install our latest anki/aqt code into the launcher venv + +set -e + +rm -rf out/wheels +./ninja wheels +if [[ "$OSTYPE" == "darwin"* ]]; then + export VIRTUAL_ENV=$HOME/Library/Application\ Support/AnkiProgramFiles/.venv +else + export VIRTUAL_ENV=$HOME/.local/share/AnkiProgramFiles/.venv +fi +./out/extracted/uv/uv pip install out/wheels/* + diff --git a/tools/update-launcher-env.bat b/tools/update-launcher-env.bat new file mode 100644 index 000000000..9b0b814c6 --- /dev/null +++ b/tools/update-launcher-env.bat @@ -0,0 +1,8 @@ +@echo off +rem +rem Install our latest anki/aqt code into the launcher venv + +rmdir /s /q out\wheels 2>nul +call tools\ninja wheels +set VIRTUAL_ENV=%LOCALAPPDATA%\AnkiProgramFiles\.venv +for %%f in (out\wheels\*.whl) do out\extracted\uv\uv pip install "%%f" \ No newline at end of file diff --git a/ts/editor/LabelName.svelte b/ts/editor/LabelName.svelte index 527acdbda..ea3c7e0b4 100644 --- a/ts/editor/LabelName.svelte +++ b/ts/editor/LabelName.svelte @@ -6,9 +6,3 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - - diff --git a/ts/lib/components/HelpSection.svelte b/ts/lib/components/HelpSection.svelte index 71b45fb6c..b52486865 100644 --- a/ts/lib/components/HelpSection.svelte +++ b/ts/lib/components/HelpSection.svelte @@ -8,6 +8,8 @@ import Row from "./Row.svelte"; import type { HelpItem } from "./types"; + import { mdiEarth } from "./icons"; + import Icon from "./Icon.svelte"; export let item: HelpItem; @@ -21,6 +23,11 @@ {/if} {#if item.help} + {#if item.global} +
+ +
+ {/if} {@html renderMarkdown(item.help)} {:else} {@html renderMarkdown( @@ -54,4 +61,12 @@ color: var(--fg-subtle); font-size: small; } + + .icon { + display: inline-block; + width: 1em; + fill: currentColor; + margin-right: 0.25em; + margin-bottom: 1.25em; + } diff --git a/ts/lib/components/types.ts b/ts/lib/components/types.ts index 9a2105d9e..2f94a2778 100644 --- a/ts/lib/components/types.ts +++ b/ts/lib/components/types.ts @@ -9,6 +9,7 @@ export type HelpItem = { help?: string; url?: string; sched?: HelpItemScheduler; + global?: boolean; }; export enum HelpItemScheduler { diff --git a/ts/lib/tag-editor/TagEditor.svelte b/ts/lib/tag-editor/TagEditor.svelte index cbbbf3f57..eb033ef7a 100644 --- a/ts/lib/tag-editor/TagEditor.svelte +++ b/ts/lib/tag-editor/TagEditor.svelte @@ -510,7 +510,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html border: 1px solid var(--border); border-radius: var(--border-radius); padding: 6px; - margin: 1px; + margin: 1px 3px 3px 1px; &:focus-within { outline-offset: -1px; diff --git a/ts/lib/tag-editor/TagInput.svelte b/ts/lib/tag-editor/TagInput.svelte index a8d76bcee..31d3b51f6 100644 --- a/ts/lib/tag-editor/TagInput.svelte +++ b/ts/lib/tag-editor/TagInput.svelte @@ -166,7 +166,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html } function onKeydown(event: KeyboardEvent): void { - switch (event.code) { + switch (event.key) { case "Enter": onEnter(event); break; diff --git a/ts/lib/tslib/keys.ts b/ts/lib/tslib/keys.ts index 9bd6b42d8..58f571fac 100644 --- a/ts/lib/tslib/keys.ts +++ b/ts/lib/tslib/keys.ts @@ -90,7 +90,7 @@ export function keyToPlatformString(key: string): string { } export function isArrowLeft(event: KeyboardEvent): boolean { - if (event.code === "ArrowLeft") { + if (event.key === "ArrowLeft") { return true; } @@ -98,7 +98,7 @@ export function isArrowLeft(event: KeyboardEvent): boolean { } export function isArrowRight(event: KeyboardEvent): boolean { - if (event.code === "ArrowRight") { + if (event.key === "ArrowRight") { return true; } @@ -106,7 +106,7 @@ export function isArrowRight(event: KeyboardEvent): boolean { } export function isArrowUp(event: KeyboardEvent): boolean { - if (event.code === "ArrowUp") { + if (event.key === "ArrowUp") { return true; } @@ -114,7 +114,7 @@ export function isArrowUp(event: KeyboardEvent): boolean { } export function isArrowDown(event: KeyboardEvent): boolean { - if (event.code === "ArrowDown") { + if (event.key === "ArrowDown") { return true; } diff --git a/ts/licenses.json b/ts/licenses.json index 2e88336b3..412d1dae3 100644 --- a/ts/licenses.json +++ b/ts/licenses.json @@ -95,8 +95,8 @@ "repository": "https://github.com/TooTallNate/node-agent-base", "publisher": "Nathan Rajlich", "email": "nathan@tootallnate.net", - "path": "node_modules/http-proxy-agent/node_modules/agent-base", - "licenseFile": "node_modules/http-proxy-agent/node_modules/agent-base/README.md" + "path": "node_modules/https-proxy-agent/node_modules/agent-base", + "licenseFile": "node_modules/https-proxy-agent/node_modules/agent-base/README.md" }, "asynckit@0.4.0": { "licenses": "MIT", diff --git a/ts/routes/deck-options/AdvancedOptions.svelte b/ts/routes/deck-options/AdvancedOptions.svelte index 31c3f0d4c..fb892b7ec 100644 --- a/ts/routes/deck-options/AdvancedOptions.svelte +++ b/ts/routes/deck-options/AdvancedOptions.svelte @@ -82,6 +82,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html title: tr.deckConfigCustomScheduling(), help: tr.deckConfigCustomSchedulingTooltip(), url: "https://faqs.ankiweb.net/the-2021-scheduler.html#add-ons-and-custom-scheduling", + global: true, }, }; const helpSections: HelpItem[] = Object.values(settings); diff --git a/ts/routes/deck-options/DailyLimits.svelte b/ts/routes/deck-options/DailyLimits.svelte index 9b156ca00..ea403c1f4 100644 --- a/ts/routes/deck-options/DailyLimits.svelte +++ b/ts/routes/deck-options/DailyLimits.svelte @@ -133,14 +133,15 @@ }, newCardsIgnoreReviewLimit: { title: tr.deckConfigNewCardsIgnoreReviewLimit(), - help: newCardsIgnoreReviewLimitHelp, url: HelpPage.DeckOptions.newCardsday, + global: true, }, applyAllParentLimits: { title: tr.deckConfigApplyAllParentLimits(), help: applyAllParentLimitsHelp, url: HelpPage.DeckOptions.newCardsday, + global: true, }, }; const helpSections: HelpItem[] = Object.values(settings); diff --git a/ts/routes/deck-options/FsrsOptions.svelte b/ts/routes/deck-options/FsrsOptions.svelte index f573a0278..706407889 100644 --- a/ts/routes/deck-options/FsrsOptions.svelte +++ b/ts/routes/deck-options/FsrsOptions.svelte @@ -10,7 +10,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import { SimulateFsrsReviewRequest } from "@generated/anki/scheduler_pb"; import { computeFsrsParams, - evaluateParams, + evaluateParamsLegacy, getRetentionWorkload, setWantsAbort, } from "@generated/backend"; @@ -31,6 +31,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html GetRetentionWorkloadRequest, UpdateDeckConfigsMode, } from "@generated/anki/deck_config_pb"; + import type Modal from "bootstrap/js/dist/modal"; export let state: DeckOptionsState; export let openHelpModal: (String) => void; @@ -243,10 +244,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html const search = $config.paramSearch ? $config.paramSearch : defaultparamSearch; - const resp = await evaluateParams({ + const resp = await evaluateParamsLegacy({ search, ignoreRevlogsBeforeMs: getIgnoreRevlogsBeforeMs(), - numOfRelearningSteps: $config.relearnSteps.length, + params: fsrsParams($config), }); if (computeParamsProgress) { computeParamsProgress.current = computeParamsProgress.total; @@ -296,7 +297,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html state.save(UpdateDeckConfigsMode.COMPUTE_ALL_PARAMS); } - let showSimulator = false; + let simulatorModal: Modal; - {#if false} - + {#if state.legacyEvaluate}
-
-
diff --git a/ts/routes/deck-options/FsrsOptionsOuter.svelte b/ts/routes/deck-options/FsrsOptionsOuter.svelte index 1f31e0bf9..fa543b5fc 100644 --- a/ts/routes/deck-options/FsrsOptionsOuter.svelte +++ b/ts/routes/deck-options/FsrsOptionsOuter.svelte @@ -35,10 +35,14 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html title: "FSRS", help: tr.deckConfigFsrsTooltip(), url: HelpPage.DeckOptions.fsrs, + global: true, }, desiredRetention: { title: tr.deckConfigDesiredRetention(), - help: tr.deckConfigDesiredRetentionTooltip(), + help: + tr.deckConfigDesiredRetentionTooltip() + + "\n\n" + + tr.deckConfigDesiredRetentionTooltip2(), sched: HelpItemScheduler.FSRS, }, modelParams: { @@ -53,6 +57,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html title: tr.deckConfigRescheduleCardsOnChange(), help: tr.deckConfigRescheduleCardsOnChangeTooltip(), sched: HelpItemScheduler.FSRS, + global: true, }, computeOptimalRetention: { title: tr.deckConfigComputeOptimalRetention(), @@ -62,10 +67,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html healthCheck: { title: tr.deckConfigHealthCheck(), help: + tr.deckConfigAffectsEntireCollection() + + "\n\n" + tr.deckConfigHealthCheckTooltip1() + "\n\n" + tr.deckConfigHealthCheckTooltip2(), sched: HelpItemScheduler.FSRS, + global: true, }, }; const helpSections: HelpItem[] = Object.values(settings); diff --git a/ts/routes/deck-options/SimulatorModal.svelte b/ts/routes/deck-options/SimulatorModal.svelte index 64b712560..546f840d6 100644 --- a/ts/routes/deck-options/SimulatorModal.svelte +++ b/ts/routes/deck-options/SimulatorModal.svelte @@ -33,8 +33,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import EasyDaysInput from "./EasyDaysInput.svelte"; import Warning from "./Warning.svelte"; import type { ComputeRetentionProgress } from "@generated/anki/collection_pb"; + import Modal from "bootstrap/js/dist/modal"; - export let shown = false; export let state: DeckOptionsState; export let simulateFsrsRequest: SimulateFsrsReviewRequest; export let computing: boolean; @@ -234,9 +234,21 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html } $: easyDayPercentages = [...$config.easyDaysPercentages]; + + export let modal: Modal | null = null; + + function setupModal(node: Element) { + modal = new Modal(node); + return { + destroy() { + modal?.dispose(); + modal = null; + }, + }; + } -