Compare commits

..

No commits in common. "main" and "25.06b7" have entirely different histories.

216 changed files with 7407 additions and 8915 deletions

View file

@ -10,6 +10,3 @@ PYTHONDONTWRITEBYTECODE = "1" # prevent junk files on Windows
[term] [term]
color = "always" color = "always"
[target.'cfg(all(target_env = "msvc", target_os = "windows"))']
rustflags = ["-C", "target-feature=+crt-static"]

View file

@ -1 +1 @@
25.09.2 25.06b7

View file

@ -1,2 +1 @@
nodeLinker: node-modules nodeLinker: node-modules
enableScripts: false

View file

@ -49,7 +49,6 @@ Sander Santema <github.com/sandersantema/>
Thomas Brownback <https://github.com/brownbat/> Thomas Brownback <https://github.com/brownbat/>
Andrew Gaul <andrew@gaul.org> Andrew Gaul <andrew@gaul.org>
kenden kenden
Emil Hamrin <github.com/e-hamrin>
Nickolay Yudin <kelciour@gmail.com> Nickolay Yudin <kelciour@gmail.com>
neitrinoweb <github.com/neitrinoweb/> neitrinoweb <github.com/neitrinoweb/>
Andreas Reis <github.com/nwwt> Andreas Reis <github.com/nwwt>
@ -234,16 +233,6 @@ Spiritual Father <https://github.com/spiritualfather>
Emmanuel Ferdman <https://github.com/emmanuel-ferdman> Emmanuel Ferdman <https://github.com/emmanuel-ferdman>
Sunong2008 <https://github.com/Sunrongguo2008> Sunong2008 <https://github.com/Sunrongguo2008>
Marvin Kopf <marvinkopf@outlook.com> Marvin Kopf <marvinkopf@outlook.com>
Kevin Nakamura <grinkers@grinkers.net>
Bradley Szoke <bradleyszoke@gmail.com>
jcznk <https://github.com/jcznk>
Thomas Rixen <thomas.rixen@student.uclouvain.be>
Siyuan Mattuwu Yan <syan4@ualberta.ca>
Lee Doughty <32392044+leedoughty@users.noreply.github.com>
memchr <memchr@proton.me>
Max Romanowski <maxr777@proton.me>
Aldlss <ayaldlss@gmail.com>
******************** ********************
The text of the 3 clause BSD license follows: The text of the 3 clause BSD license follows:

1114
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -33,8 +33,9 @@ git = "https://github.com/ankitects/linkcheck.git"
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca" rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
[workspace.dependencies.fsrs] [workspace.dependencies.fsrs]
version = "5.1.0" version = "4.1.1"
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git" # git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
# rev = "a7f7efc10f0a26b14ee348cc7402155685f2a24f"
# path = "../open-spaced-repetition/fsrs-rs" # path = "../open-spaced-repetition/fsrs-rs"
[workspace.dependencies] [workspace.dependencies]
@ -51,7 +52,7 @@ ninja_gen = { "path" = "build/ninja_gen" }
unicase = "=2.6.0" # any changes could invalidate sqlite indexes unicase = "=2.6.0" # any changes could invalidate sqlite indexes
# normal # normal
ammonia = "4.1.2" ammonia = "4.1.0"
anyhow = "1.0.98" anyhow = "1.0.98"
async-compression = { version = "0.4.24", features = ["zstd", "tokio"] } async-compression = { version = "0.4.24", features = ["zstd", "tokio"] }
async-stream = "0.3.6" async-stream = "0.3.6"
@ -59,7 +60,6 @@ async-trait = "0.1.88"
axum = { version = "0.8.4", features = ["multipart", "macros"] } axum = { version = "0.8.4", features = ["multipart", "macros"] }
axum-client-ip = "1.1.3" axum-client-ip = "1.1.3"
axum-extra = { version = "0.10.1", features = ["typed-header"] } axum-extra = { version = "0.10.1", features = ["typed-header"] }
bitflags = "2.9.1"
blake3 = "1.8.2" blake3 = "1.8.2"
bytes = "1.10.1" bytes = "1.10.1"
camino = "1.1.10" camino = "1.1.10"
@ -109,7 +109,6 @@ prost-types = "0.13"
pulldown-cmark = "0.13.0" pulldown-cmark = "0.13.0"
pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] } pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] }
rand = "0.9.1" rand = "0.9.1"
rayon = "1.10.0"
regex = "1.11.1" regex = "1.11.1"
reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] } reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] }
rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] } rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] }
@ -133,7 +132,7 @@ tokio-util = { version = "0.7.15", features = ["io"] }
tower-http = { version = "0.6.6", features = ["trace"] } tower-http = { version = "0.6.6", features = ["trace"] }
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] } tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] }
tracing-appender = "0.2.3" tracing-appender = "0.2.3"
tracing-subscriber = { version = "0.3.20", features = ["fmt", "env-filter"] } tracing-subscriber = { version = "0.3.19", features = ["fmt", "env-filter"] }
unic-langid = { version = "0.9.6", features = ["macros"] } unic-langid = { version = "0.9.6", features = ["macros"] }
unic-ucd-category = "0.9.0" unic-ucd-category = "0.9.0"
unicode-normalization = "0.1.24" unicode-normalization = "0.1.24"
@ -141,7 +140,7 @@ walkdir = "2.5.0"
which = "8.0.0" which = "8.0.0"
widestring = "1.1.0" widestring = "1.1.0"
winapi = { version = "0.3", features = ["wincon", "winreg"] } winapi = { version = "0.3", features = ["wincon", "winreg"] }
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_Foundation", "Win32_UI_Shell", "Wdk_System_SystemServices"] } windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_Foundation", "Win32_UI_Shell"] }
wiremock = "0.6.3" wiremock = "0.6.3"
xz2 = "0.1.7" xz2 = "0.1.7"
zip = { version = "4.1.0", default-features = false, features = ["deflate", "time"] } zip = { version = "4.1.0", default-features = false, features = ["deflate", "time"] }

View file

@ -1,4 +1,4 @@
# Anki® # Anki
[![Build status](https://badge.buildkite.com/c9edf020a4aec976f9835e54751cc5409d843adbb66d043bd3.svg?branch=main)](https://buildkite.com/ankitects/anki-ci) [![Build status](https://badge.buildkite.com/c9edf020a4aec976f9835e54751cc5409d843adbb66d043bd3.svg?branch=main)](https://buildkite.com/ankitects/anki-ci)

View file

@ -1,6 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::env;
use anyhow::Result; use anyhow::Result;
use ninja_gen::action::BuildAction; use ninja_gen::action::BuildAction;
use ninja_gen::archives::Platform; use ninja_gen::archives::Platform;
@ -123,14 +125,7 @@ impl BuildAction for BuildWheel {
} }
fn files(&mut self, build: &mut impl FilesHandle) { fn files(&mut self, build: &mut impl FilesHandle) {
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") { build.add_inputs("uv", inputs![":uv_binary"]);
let uv_path =
std::env::var("UV_BINARY").expect("UV_BINARY must be set in OFFLINE_BUILD mode");
build.add_inputs("uv", inputs![uv_path]);
} else {
build.add_inputs("uv", inputs![":uv_binary"]);
}
build.add_inputs("", &self.deps); build.add_inputs("", &self.deps);
// Set the project directory based on which package we're building // Set the project directory based on which package we're building
@ -227,19 +222,15 @@ struct Sphinx {
impl BuildAction for Sphinx { impl BuildAction for Sphinx {
fn command(&self) -> &str { fn command(&self) -> &str {
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") { if env::var("OFFLINE_BUILD").is_err() {
"$python python/sphinx/build.py"
} else {
"$uv sync --extra sphinx && $python python/sphinx/build.py" "$uv sync --extra sphinx && $python python/sphinx/build.py"
} else {
"$python python/sphinx/build.py"
} }
} }
fn files(&mut self, build: &mut impl FilesHandle) { fn files(&mut self, build: &mut impl FilesHandle) {
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") { if env::var("OFFLINE_BUILD").is_err() {
let uv_path =
std::env::var("UV_BINARY").expect("UV_BINARY must be set in OFFLINE_BUILD mode");
build.add_inputs("uv", inputs![uv_path]);
} else {
build.add_inputs("uv", inputs![":uv_binary"]); build.add_inputs("uv", inputs![":uv_binary"]);
// Set environment variable to use the existing pyenv // Set environment variable to use the existing pyenv
build.add_variable("pyenv_path", "$builddir/pyenv"); build.add_variable("pyenv_path", "$builddir/pyenv");

View file

@ -169,7 +169,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
pub fn check_rust(build: &mut Build) -> Result<()> { pub fn check_rust(build: &mut Build) -> Result<()> {
let inputs = inputs![ let inputs = inputs![
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**,tools/minilints/**}"), glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**}"),
"Cargo.lock", "Cargo.lock",
"Cargo.toml", "Cargo.toml",
"rust-toolchain.toml", "rust-toolchain.toml",

View file

@ -35,7 +35,3 @@ path = "src/bin/update_uv.rs"
[[bin]] [[bin]]
name = "update_protoc" name = "update_protoc"
path = "src/bin/update_protoc.rs" path = "src/bin/update_protoc.rs"
[[bin]]
name = "update_node"
path = "src/bin/update_node.rs"

View file

@ -49,46 +49,6 @@ pub trait BuildAction {
} }
fn name(&self) -> &'static str { fn name(&self) -> &'static str {
std::any::type_name::<Self>() std::any::type_name::<Self>().split("::").last().unwrap()
.split("::")
.last()
.unwrap()
.split('<')
.next()
.unwrap()
} }
} }
#[cfg(test)]
trait TestBuildAction {}
#[cfg(test)]
impl<T: TestBuildAction + ?Sized> BuildAction for T {
fn command(&self) -> &str {
"test"
}
fn files(&mut self, _build: &mut impl FilesHandle) {}
}
#[allow(dead_code, unused_variables)]
#[test]
fn should_strip_regions_in_type_name() {
struct Bare;
impl TestBuildAction for Bare {}
assert_eq!(Bare {}.name(), "Bare");
struct WithLifeTime<'a>(&'a str);
impl TestBuildAction for WithLifeTime<'_> {}
assert_eq!(WithLifeTime("test").name(), "WithLifeTime");
struct WithMultiLifeTime<'a, 'b>(&'a str, &'b str);
impl TestBuildAction for WithMultiLifeTime<'_, '_> {}
assert_eq!(
WithMultiLifeTime("test", "test").name(),
"WithMultiLifeTime"
);
struct WithGeneric<T>(T);
impl<T> TestBuildAction for WithGeneric<T> {}
assert_eq!(WithGeneric(3).name(), "WithGeneric");
}

View file

@ -67,7 +67,7 @@ impl Platform {
} }
/// Append .exe to path if on Windows. /// Append .exe to path if on Windows.
pub fn with_exe(path: &str) -> Cow<'_, str> { pub fn with_exe(path: &str) -> Cow<str> {
if cfg!(windows) { if cfg!(windows) {
format!("{path}.exe").into() format!("{path}.exe").into()
} else { } else {

View file

@ -1,268 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::error::Error;
use std::fs;
use std::path::Path;
use regex::Regex;
use reqwest::blocking::Client;
use serde_json::Value;
#[derive(Debug)]
struct NodeRelease {
version: String,
files: Vec<NodeFile>,
}
#[derive(Debug)]
struct NodeFile {
filename: String,
url: String,
}
fn main() -> Result<(), Box<dyn Error>> {
let release_info = fetch_node_release_info()?;
let new_text = generate_node_archive_function(&release_info)?;
update_node_text(&new_text)?;
println!("Node.js archive function updated successfully!");
Ok(())
}
fn fetch_node_release_info() -> Result<NodeRelease, Box<dyn Error>> {
let client = Client::new();
// Get the Node.js release info
let response = client
.get("https://nodejs.org/dist/index.json")
.header("User-Agent", "anki-build-updater")
.send()?;
let releases: Vec<Value> = response.json()?;
// Find the latest LTS release
let latest = releases
.iter()
.find(|release| {
// LTS releases have a non-false "lts" field
release["lts"].as_str().is_some() && release["lts"] != false
})
.ok_or("No LTS releases found")?;
let version = latest["version"]
.as_str()
.ok_or("Version not found")?
.to_string();
let files = latest["files"]
.as_array()
.ok_or("Files array not found")?
.iter()
.map(|f| f.as_str().unwrap_or(""))
.collect::<Vec<_>>();
let lts_name = latest["lts"].as_str().unwrap_or("unknown");
println!("Found Node.js LTS version: {version} ({lts_name})");
// Map platforms to their expected file keys and full filenames
let platform_mapping = vec![
(
"linux-x64",
"linux-x64",
format!("node-{version}-linux-x64.tar.xz"),
),
(
"linux-arm64",
"linux-arm64",
format!("node-{version}-linux-arm64.tar.xz"),
),
(
"darwin-x64",
"osx-x64-tar",
format!("node-{version}-darwin-x64.tar.xz"),
),
(
"darwin-arm64",
"osx-arm64-tar",
format!("node-{version}-darwin-arm64.tar.xz"),
),
(
"win-x64",
"win-x64-zip",
format!("node-{version}-win-x64.zip"),
),
(
"win-arm64",
"win-arm64-zip",
format!("node-{version}-win-arm64.zip"),
),
];
let mut node_files = Vec::new();
for (platform, file_key, filename) in platform_mapping {
// Check if this file exists in the release
if files.contains(&file_key) {
let url = format!("https://nodejs.org/dist/{version}/{filename}");
node_files.push(NodeFile {
filename: filename.clone(),
url,
});
println!("Found file for {platform}: {filename} (key: {file_key})");
} else {
return Err(
format!("File not found for {platform} (key: {file_key}): {filename}").into(),
);
}
}
Ok(NodeRelease {
version,
files: node_files,
})
}
fn generate_node_archive_function(release: &NodeRelease) -> Result<String, Box<dyn Error>> {
let client = Client::new();
// Fetch the SHASUMS256.txt file once
println!("Fetching SHA256 checksums...");
let shasums_url = format!("https://nodejs.org/dist/{}/SHASUMS256.txt", release.version);
let shasums_response = client
.get(&shasums_url)
.header("User-Agent", "anki-build-updater")
.send()?;
let shasums_text = shasums_response.text()?;
// Create a mapping from filename patterns to platform names - using the exact
// patterns we stored in files
let platform_mapping = vec![
("linux-x64.tar.xz", "LinuxX64"),
("linux-arm64.tar.xz", "LinuxArm"),
("darwin-x64.tar.xz", "MacX64"),
("darwin-arm64.tar.xz", "MacArm"),
("win-x64.zip", "WindowsX64"),
("win-arm64.zip", "WindowsArm"),
];
let mut platform_blocks = Vec::new();
for (file_pattern, platform_name) in platform_mapping {
// Find the file that ends with this pattern
if let Some(file) = release
.files
.iter()
.find(|f| f.filename.ends_with(file_pattern))
{
// Find the SHA256 for this file
let sha256 = shasums_text
.lines()
.find(|line| line.contains(&file.filename))
.and_then(|line| line.split_whitespace().next())
.ok_or_else(|| format!("SHA256 not found for {}", file.filename))?;
println!(
"Found SHA256 for {}: {} => {}",
platform_name, file.filename, sha256
);
let block = format!(
" Platform::{} => OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }},",
platform_name, file.url, sha256
);
platform_blocks.push(block);
} else {
return Err(format!(
"File not found for platform {platform_name}: no file ending with {file_pattern}"
)
.into());
}
}
let function = format!(
"pub fn node_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}",
platform_blocks.join("\n")
);
Ok(function)
}
fn update_node_text(new_function: &str) -> Result<(), Box<dyn Error>> {
let node_rs_content = read_node_rs()?;
// Regex to match the entire node_archive function with proper multiline
// matching
let re = Regex::new(
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}",
)?;
let updated_content = re.replace(&node_rs_content, new_function);
write_node_rs(&updated_content)?;
Ok(())
}
fn read_node_rs() -> Result<String, Box<dyn Error>> {
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
let manifest_dir =
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
let path = Path::new(&manifest_dir).join("src").join("node.rs");
Ok(fs::read_to_string(path)?)
}
fn write_node_rs(content: &str) -> Result<(), Box<dyn Error>> {
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
let manifest_dir =
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
let path = Path::new(&manifest_dir).join("src").join("node.rs");
fs::write(path, content)?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_regex_replacement() {
let sample_content = r#"Some other code
pub fn node_archive(platform: Platform) -> OnlineArchive {
match platform {
Platform::LinuxX64 => OnlineArchive {
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz",
sha256: "old_hash",
},
Platform::MacX64 => OnlineArchive {
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz",
sha256: "old_hash",
},
}
}
More code here"#;
let new_function = r#"pub fn node_archive(platform: Platform) -> OnlineArchive {
match platform {
Platform::LinuxX64 => OnlineArchive {
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-linux-x64.tar.xz",
sha256: "new_hash",
},
Platform::MacX64 => OnlineArchive {
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-darwin-x64.tar.xz",
sha256: "new_hash",
},
}
}"#;
let re = Regex::new(
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}"
).unwrap();
let result = re.replace(sample_content, new_function);
assert!(result.contains("v21.0.0"));
assert!(result.contains("new_hash"));
assert!(!result.contains("old_hash"));
assert!(result.contains("Some other code"));
assert!(result.contains("More code here"));
}
}

View file

@ -19,28 +19,28 @@ use crate::input::BuildInput;
pub fn node_archive(platform: Platform) -> OnlineArchive { pub fn node_archive(platform: Platform) -> OnlineArchive {
match platform { match platform {
Platform::LinuxX64 => OnlineArchive { Platform::LinuxX64 => OnlineArchive {
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-x64.tar.xz", url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz",
sha256: "325c0f1261e0c61bcae369a1274028e9cfb7ab7949c05512c5b1e630f7e80e12", sha256: "822780369d0ea309e7d218e41debbd1a03f8cdf354ebf8a4420e89f39cc2e612",
}, },
Platform::LinuxArm => OnlineArchive { Platform::LinuxArm => OnlineArchive {
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-arm64.tar.xz", url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-arm64.tar.xz",
sha256: "140aee84be6774f5fb3f404be72adbe8420b523f824de82daeb5ab218dab7b18", sha256: "f6df68c6793244071f69023a9b43a0cf0b13d65cbe86d55925c28e4134d9aafb",
}, },
Platform::MacX64 => OnlineArchive { Platform::MacX64 => OnlineArchive {
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-x64.tar.xz", url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz",
sha256: "f79de1f64df4ac68493a344bb5ab7d289d0275271e87b543d1278392c9de778a", sha256: "d4b4ab81ebf1f7aab09714f834992f27270ad0079600da00c8110f8950ca6c5a",
}, },
Platform::MacArm => OnlineArchive { Platform::MacArm => OnlineArchive {
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-arm64.tar.xz", url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-arm64.tar.xz",
sha256: "cc9cc294eaf782dd93c8c51f460da610cc35753c6a9947411731524d16e97914", sha256: "f18a7438723d48417f5e9be211a2f3c0520ffbf8e02703469e5153137ca0f328",
}, },
Platform::WindowsX64 => OnlineArchive { Platform::WindowsX64 => OnlineArchive {
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-x64.zip", url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-x64.zip",
sha256: "721ab118a3aac8584348b132767eadf51379e0616f0db802cc1e66d7f0d98f85", sha256: "893115cd92ad27bf178802f15247115e93c0ef0c753b93dca96439240d64feb5",
}, },
Platform::WindowsArm => OnlineArchive { Platform::WindowsArm => OnlineArchive {
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-arm64.zip", url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-arm64.zip",
sha256: "78355dc9ca117bb71d3f081e4b1b281855e2b134f3939bb0ca314f7567b0e621", sha256: "89c1f7034dcd6ff5c17f2af61232a96162a1902f862078347dcf274a938b6142",
}, },
} }
} }
@ -98,7 +98,7 @@ impl BuildAction for YarnInstall<'_> {
} }
} }
fn with_cmd_ext(bin: &str) -> Cow<'_, str> { fn with_cmd_ext(bin: &str) -> Cow<str> {
if cfg!(windows) { if cfg!(windows) {
format!("{bin}.cmd").into() format!("{bin}.cmd").into()
} else { } else {

View file

@ -32,19 +32,10 @@ pub fn setup_pyenv(args: PyenvArgs) {
} }
} }
let mut command = Command::new(args.uv_bin);
// remove UV_* environment variables to avoid interference
for (key, _) in std::env::vars() {
if key.starts_with("UV_") || key == "VIRTUAL_ENV" {
command.env_remove(key);
}
}
run_command( run_command(
command Command::new(args.uv_bin)
.env("UV_PROJECT_ENVIRONMENT", args.pyenv_folder.clone()) .env("UV_PROJECT_ENVIRONMENT", args.pyenv_folder.clone())
.args(["sync", "--locked", "--no-config"]) .args(["sync", "--locked"])
.args(args.extra_args), .args(args.extra_args),
); );

View file

@ -28,11 +28,7 @@ pub fn setup_yarn(args: YarnArgs) {
.arg("--ignore-scripts"), .arg("--ignore-scripts"),
); );
} else { } else {
run_command( run_command(Command::new(&args.yarn_bin).arg("install"));
Command::new(&args.yarn_bin)
.arg("install")
.arg("--immutable"),
);
} }
std::fs::write(args.stamp, b"").unwrap(); std::fs::write(args.stamp, b"").unwrap();

File diff suppressed because it is too large Load diff

View file

@ -1,78 +1,35 @@
# This is a user-contributed Dockerfile. No official support is available. # This Dockerfile uses three stages.
# 1. Compile anki (and dependencies) and build python wheels.
# 2. Create a virtual environment containing anki and its dependencies.
# 3. Create a final image that only includes anki's virtual environment and required
# system packages.
ARG PYTHON_VERSION="3.9"
ARG DEBIAN_FRONTEND="noninteractive" ARG DEBIAN_FRONTEND="noninteractive"
FROM ubuntu:24.04 AS build # Build anki.
FROM python:$PYTHON_VERSION AS build
RUN curl -fsSL https://github.com/bazelbuild/bazelisk/releases/download/v1.7.4/bazelisk-linux-amd64 \
> /usr/local/bin/bazel \
&& chmod +x /usr/local/bin/bazel \
# Bazel expects /usr/bin/python
&& ln -s /usr/local/bin/python /usr/bin/python
WORKDIR /opt/anki WORKDIR /opt/anki
ENV PYTHON_VERSION="3.13" COPY . .
# Build python wheels.
# System deps
RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
git \
build-essential \
pkg-config \
libssl-dev \
libbz2-dev \
libreadline-dev \
libsqlite3-dev \
libffi-dev \
zlib1g-dev \
liblzma-dev \
ca-certificates \
ninja-build \
rsync \
libglib2.0-0 \
libgl1 \
libx11-6 \
libxext6 \
libxrender1 \
libxkbcommon0 \
libxkbcommon-x11-0 \
libxcb1 \
libxcb-render0 \
libxcb-shm0 \
libxcb-icccm4 \
libxcb-image0 \
libxcb-keysyms1 \
libxcb-randr0 \
libxcb-shape0 \
libxcb-xfixes0 \
libxcb-xinerama0 \
libxcb-xinput0 \
libsm6 \
libice6 \
&& rm -rf /var/lib/apt/lists/*
# install rust with rustup
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
# Install uv and Python 3.13 with uv
RUN curl -LsSf https://astral.sh/uv/install.sh | sh \
&& ln -s /root/.local/bin/uv /usr/local/bin/uv
ENV PATH="/root/.local/bin:${PATH}"
RUN uv python install ${PYTHON_VERSION} --default
COPY . .
RUN ./tools/build RUN ./tools/build
# Install pre-compiled Anki. # Install pre-compiled Anki.
FROM python:3.13-slim AS installer FROM python:${PYTHON_VERSION}-slim as installer
WORKDIR /opt/anki/ WORKDIR /opt/anki/
COPY --from=build /opt/anki/out/wheels/ wheels/ COPY --from=build /opt/anki/wheels/ wheels/
# Use virtual environment. # Use virtual environment.
RUN python -m venv venv \ RUN python -m venv venv \
&& ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \ && ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \
&& ./venv/bin/python -m pip install --no-cache-dir /opt/anki/wheels/*.whl && ./venv/bin/python -m pip install --no-cache-dir /opt/anki/wheels/*.whl
# We use another build stage here so we don't include the wheels in the final image. # We use another build stage here so we don't include the wheels in the final image.
FROM python:3.13-slim AS final FROM python:${PYTHON_VERSION}-slim as final
COPY --from=installer /opt/anki/venv /opt/anki/venv COPY --from=installer /opt/anki/venv /opt/anki/venv
ENV PATH=/opt/anki/venv/bin:$PATH ENV PATH=/opt/anki/venv/bin:$PATH
# Install run-time dependencies. # Install run-time dependencies.
@ -102,9 +59,9 @@ RUN apt-get update \
libxrender1 \ libxrender1 \
libxtst6 \ libxtst6 \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Add non-root user. # Add non-root user.
RUN useradd --create-home anki RUN useradd --create-home anki
USER anki USER anki
WORKDIR /work WORKDIR /work
ENTRYPOINT ["/opt/anki/venv/bin/anki"] ENTRYPOINT ["/opt/anki/venv/bin/anki"]
LABEL maintainer="Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com>"

@ -1 +1 @@
Subproject commit 480ef0da728c7ea3485c58529ae7ee02be3e5dba Subproject commit 4a65d6012ac022a35f5c80c80b2b665447b6a525

View file

@ -5,11 +5,6 @@ database-check-card-properties =
[one] Fixed { $count } invalid card property. [one] Fixed { $count } invalid card property.
*[other] Fixed { $count } invalid card properties. *[other] Fixed { $count } invalid card properties.
} }
database-check-card-last-review-time-empty =
{ $count ->
[one] Added last review time to { $count } card.
*[other] Added last review time to { $count } cards.
}
database-check-missing-templates = database-check-missing-templates =
{ $count -> { $count ->
[one] Deleted { $count } card with missing template. [one] Deleted { $count } card with missing template.

View file

@ -384,6 +384,8 @@ deck-config-which-deck = Which deck would you like to display options for?
deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }... deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }...
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters. deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters.
deck-config-not-enough-history = Insufficient review history to perform this operation. deck-config-not-enough-history = Insufficient review history to perform this operation.
deck-config-unable-to-determine-desired-retention =
Unable to determine a minimum recommended retention.
deck-config-must-have-400-reviews = deck-config-must-have-400-reviews =
{ $count -> { $count ->
[one] Only { $count } review was found. [one] Only { $count } review was found.
@ -392,6 +394,7 @@ deck-config-must-have-400-reviews =
# Numbers that control how aggressively the FSRS algorithm schedules cards # Numbers that control how aggressively the FSRS algorithm schedules cards
deck-config-weights = FSRS parameters deck-config-weights = FSRS parameters
deck-config-compute-optimal-weights = Optimize FSRS parameters deck-config-compute-optimal-weights = Optimize FSRS parameters
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
deck-config-optimize-button = Optimize Current Preset deck-config-optimize-button = Optimize Current Preset
# Indicates that a given function or label, provided via the "text" variable, operates slowly. # Indicates that a given function or label, provided via the "text" variable, operates slowly.
deck-config-slow-suffix = { $text } (slow) deck-config-slow-suffix = { $text } (slow)
@ -404,6 +407,7 @@ deck-config-historical-retention = Historical retention
deck-config-smaller-is-better = Smaller numbers indicate a better fit to your review history. deck-config-smaller-is-better = Smaller numbers indicate a better fit to your review history.
deck-config-steps-too-large-for-fsrs = When FSRS is enabled, steps of 1 day or more are not recommended. deck-config-steps-too-large-for-fsrs = When FSRS is enabled, steps of 1 day or more are not recommended.
deck-config-get-params = Get Params deck-config-get-params = Get Params
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
deck-config-complete = { $num }% complete. deck-config-complete = { $num }% complete.
deck-config-iterations = Iteration: { $count }... deck-config-iterations = Iteration: { $count }...
deck-config-reschedule-cards-on-change = Reschedule cards on change deck-config-reschedule-cards-on-change = Reschedule cards on change
@ -422,7 +426,7 @@ deck-config-desired-retention-tooltip =
values will greatly increase your workload, and lower values can be demoralizing when you forget values will greatly increase your workload, and lower values can be demoralizing when you forget
a lot of material. a lot of material.
deck-config-desired-retention-tooltip2 = deck-config-desired-retention-tooltip2 =
The workload values provided by the info box are a rough approximation. For a greater level of accuracy, use the simulator. The workload values provided by the tooltip are a rough approximation. For a greater level of accuracy, use the simulator.
deck-config-historical-retention-tooltip = deck-config-historical-retention-tooltip =
When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will
assume that when you did those old reviews, you remembered 90% of the material. If your old retention assume that when you did those old reviews, you remembered 90% of the material. If your old retention
@ -464,7 +468,12 @@ deck-config-compute-optimal-weights-tooltip2 =
By default, parameters will be calculated from the review history of all decks using the current preset. You can By default, parameters will be calculated from the review history of all decks using the current preset. You can
optionally adjust the search before calculating the parameters, if you'd like to alter which cards are used for optionally adjust the search before calculating the parameters, if you'd like to alter which cards are used for
optimizing the parameters. optimizing the parameters.
deck-config-compute-optimal-retention-tooltip4 =
This tool will attempt to find the desired retention value
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if youre
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
deck-config-please-save-your-changes-first = Please save your changes first. deck-config-please-save-your-changes-first = Please save your changes first.
deck-config-workload-factor-change = Approximate workload: {$factor}x deck-config-workload-factor-change = Approximate workload: {$factor}x
(compared to {$previousDR}% desired retention) (compared to {$previousDR}% desired retention)
@ -496,10 +505,7 @@ deck-config-desired-retention-below-optimal = Your desired retention is below op
# Description of the y axis in the FSRS simulation # Description of the y axis in the FSRS simulation
# diagram (Deck options -> FSRS) showing the total number of # diagram (Deck options -> FSRS) showing the total number of
# cards that can be recalled or retrieved on a specific date. # cards that can be recalled or retrieved on a specific date.
deck-config-fsrs-simulator-experimental = FSRS Simulator (Experimental) deck-config-fsrs-simulator-experimental = FSRS simulator (experimental)
deck-config-fsrs-simulate-desired-retention-experimental = FSRS Desired Retention Simulator (Experimental)
deck-config-fsrs-simulate-save-preset = After optimizing, please save your deck preset before running the simulator.
deck-config-fsrs-desired-retention-help-me-decide-experimental = Help Me Decide (Experimental)
deck-config-additional-new-cards-to-simulate = Additional new cards to simulate deck-config-additional-new-cards-to-simulate = Additional new cards to simulate
deck-config-simulate = Simulate deck-config-simulate = Simulate
deck-config-clear-last-simulate = Clear Last Simulation deck-config-clear-last-simulate = Clear Last Simulation
@ -508,14 +514,10 @@ deck-config-advanced-settings = Advanced Settings
deck-config-smooth-graph = Smooth graph deck-config-smooth-graph = Smooth graph
deck-config-suspend-leeches = Suspend leeches deck-config-suspend-leeches = Suspend leeches
deck-config-save-options-to-preset = Save Changes to Preset deck-config-save-options-to-preset = Save Changes to Preset
deck-config-save-options-to-preset-confirm = Overwrite the options in your current preset with the options that are currently set in the simulator?
# Radio button in the FSRS simulation diagram (Deck options -> FSRS) selecting # Radio button in the FSRS simulation diagram (Deck options -> FSRS) selecting
# to show the total number of cards that can be recalled or retrieved on a # to show the total number of cards that can be recalled or retrieved on a
# specific date. # specific date.
deck-config-fsrs-simulator-radio-memorized = Memorized deck-config-fsrs-simulator-radio-memorized = Memorized
deck-config-fsrs-simulator-radio-ratio = Time / Memorized Ratio
# $time here is pre-formatted e.g. "10 Seconds"
deck-config-fsrs-simulator-ratio-tooltip = { $time } per memorized card
## Messages related to the FSRS schedulers health check. The health check determines whether the correlation between FSRS predictions and your memory is good or bad. It can be optionally triggered as part of the "Optimize" function. ## Messages related to the FSRS schedulers health check. The health check determines whether the correlation between FSRS predictions and your memory is good or bad. It can be optionally triggered as part of the "Optimize" function.
@ -525,7 +527,7 @@ deck-config-health-check = Check health when optimizing
deck-config-fsrs-bad-fit-warning = Health Check: deck-config-fsrs-bad-fit-warning = Health Check:
Your memory is difficult for FSRS to predict. Recommendations: Your memory is difficult for FSRS to predict. Recommendations:
- Suspend or reformulate any cards you constantly forget. - Suspend or reformulate leeches.
- Use the answer buttons consistently. Keep in mind that "Hard" is a passing grade, not a failing grade. - Use the answer buttons consistently. Keep in mind that "Hard" is a passing grade, not a failing grade.
- Understand before you memorize. - Understand before you memorize.
@ -536,17 +538,6 @@ deck-config-fsrs-good-fit = Health Check:
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future. ## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
deck-config-unable-to-determine-desired-retention =
Unable to determine a minimum recommended retention.
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
deck-config-compute-optimal-retention-tooltip4 =
This tool will attempt to find the desired retention value
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if youre
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
deck-config-plotted-on-x-axis = (Plotted on the X-axis)
deck-config-a-100-day-interval = deck-config-a-100-day-interval =
{ $days -> { $days ->
[one] A 100 day interval will become { $days } day. [one] A 100 day interval will become { $days } day.

View file

@ -48,7 +48,6 @@ importing-merge-notetypes-help =
Warning: This will require a one-way sync, and may mark existing notes as modified. Warning: This will require a one-way sync, and may mark existing notes as modified.
importing-mnemosyne-20-deck-db = Mnemosyne 2.0 Deck (*.db) importing-mnemosyne-20-deck-db = Mnemosyne 2.0 Deck (*.db)
importing-multicharacter-separators-are-not-supported-please = Multi-character separators are not supported. Please enter one character only. importing-multicharacter-separators-are-not-supported-please = Multi-character separators are not supported. Please enter one character only.
importing-new-deck-will-be-created = A new deck will be created: { $name }
importing-notes-added-from-file = Notes added from file: { $val } importing-notes-added-from-file = Notes added from file: { $val }
importing-notes-found-in-file = Notes found in file: { $val } importing-notes-found-in-file = Notes found in file: { $val }
importing-notes-skipped-as-theyre-already-in = Notes skipped, as up-to-date copies are already in your collection: { $val } importing-notes-skipped-as-theyre-already-in = Notes skipped, as up-to-date copies are already in your collection: { $val }

View file

@ -34,7 +34,7 @@ preferences-when-adding-default-to-current-deck = When adding, default to curren
preferences-you-can-restore-backups-via-fileswitch = You can restore backups via File > Switch Profile. preferences-you-can-restore-backups-via-fileswitch = You can restore backups via File > Switch Profile.
preferences-legacy-timezone-handling = Legacy timezone handling (buggy, but required for AnkiDroid <= 2.14) preferences-legacy-timezone-handling = Legacy timezone handling (buggy, but required for AnkiDroid <= 2.14)
preferences-default-search-text = Default search text preferences-default-search-text = Default search text
preferences-default-search-text-example = e.g. "deck:current" preferences-default-search-text-example = eg. 'deck:current '
preferences-theme = Theme preferences-theme = Theme
preferences-theme-follow-system = Follow System preferences-theme-follow-system = Follow System
preferences-theme-light = Light preferences-theme-light = Light

View file

@ -80,7 +80,7 @@ statistics-reviews =
# This fragment of the tooltip in the FSRS simulation # This fragment of the tooltip in the FSRS simulation
# diagram (Deck options -> FSRS) shows the total number of # diagram (Deck options -> FSRS) shows the total number of
# cards that can be recalled or retrieved on a specific date. # cards that can be recalled or retrieved on a specific date.
statistics-memorized = {$memorized} cards memorized statistics-memorized = {$memorized} memorized
statistics-today-title = Today statistics-today-title = Today
statistics-today-again-count = Again count: statistics-today-again-count = Again count:
statistics-today-type-counts = Learn: { $learnCount }, Review: { $reviewCount }, Relearn: { $relearnCount }, Filtered: { $filteredCount } statistics-today-type-counts = Learn: { $learnCount }, Review: { $reviewCount }, Relearn: { $relearnCount }, Filtered: { $filteredCount }
@ -99,9 +99,9 @@ statistics-counts-relearning-cards = Relearning
statistics-counts-title = Card Counts statistics-counts-title = Card Counts
statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards
## Retention represents your actual retention from past reviews, in ## True Retention represents your actual retention rate from past reviews, in
## comparison to the "desired retention" setting of FSRS, which forecasts ## comparison to the "desired retention" parameter of FSRS, which forecasts
## future retention. Retention is the percentage of all reviewed cards ## future retention. True Retention is the percentage of all reviewed cards
## that were marked as "Hard," "Good," or "Easy" within a specific time period. ## that were marked as "Hard," "Good," or "Easy" within a specific time period.
## ##
## Most of these strings are used as column / row headings in a table. ## Most of these strings are used as column / row headings in a table.
@ -112,9 +112,9 @@ statistics-counts-separate-suspended-buried-cards = Separate suspended/buried ca
## N.B. Stats cards may be very small on mobile devices and when the Stats ## N.B. Stats cards may be very small on mobile devices and when the Stats
## window is certain sizes. ## window is certain sizes.
statistics-true-retention-title = Retention statistics-true-retention-title = True Retention
statistics-true-retention-subtitle = Pass rate of cards with an interval ≥ 1 day. statistics-true-retention-subtitle = Pass rate of cards with an interval ≥ 1 day.
statistics-true-retention-tooltip = If you are using FSRS, your retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data. statistics-true-retention-tooltip = If you are using FSRS, your true retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data.
statistics-true-retention-range = Range statistics-true-retention-range = Range
statistics-true-retention-pass = Pass statistics-true-retention-pass = Pass
statistics-true-retention-fail = Fail statistics-true-retention-fail = Fail

View file

@ -46,20 +46,6 @@ studying-type-answer-unknown-field = Type answer: unknown field { $val }
studying-unbury = Unbury studying-unbury = Unbury
studying-what-would-you-like-to-unbury = What would you like to unbury? studying-what-would-you-like-to-unbury = What would you like to unbury?
studying-you-havent-recorded-your-voice-yet = You haven't recorded your voice yet. studying-you-havent-recorded-your-voice-yet = You haven't recorded your voice yet.
studying-card-studied-in-minute =
{ $cards ->
[one] { $cards } card
*[other] { $cards } cards
} studied in
{ $minutes ->
[one] { $minutes } minute.
*[other] { $minutes } minutes.
}
studying-question-time-elapsed = Question time elapsed
studying-answer-time-elapsed = Answer time elapsed
## OBSOLETE; you do not need to translate this
studying-card-studied-in = studying-card-studied-in =
{ $count -> { $count ->
[one] { $count } card studied in [one] { $count } card studied in
@ -70,3 +56,5 @@ studying-minute =
[one] { $count } minute. [one] { $count } minute.
*[other] { $count } minutes. *[other] { $count } minutes.
} }
studying-question-time-elapsed = Question time elapsed
studying-answer-time-elapsed = Answer time elapsed

@ -1 +1 @@
Subproject commit fd5f984785ad07a0d3dbd893ee3d7e3671eaebd6 Subproject commit f42461a6438cbe844150f543128d79a669bc4ef2

View file

@ -19,8 +19,8 @@
"@poppanator/sveltekit-svg": "^5.0.0", "@poppanator/sveltekit-svg": "^5.0.0",
"@sqltools/formatter": "^1.2.2", "@sqltools/formatter": "^1.2.2",
"@sveltejs/adapter-static": "^3.0.0", "@sveltejs/adapter-static": "^3.0.0",
"@sveltejs/kit": "^2.22.2", "@sveltejs/kit": "^2.20.7",
"@sveltejs/vite-plugin-svelte": "5.1", "@sveltejs/vite-plugin-svelte": "4.0.0",
"@types/bootstrap": "^5.0.12", "@types/bootstrap": "^5.0.12",
"@types/codemirror": "^5.60.0", "@types/codemirror": "^5.60.0",
"@types/d3": "^7.0.0", "@types/d3": "^7.0.0",
@ -30,7 +30,7 @@
"@types/jqueryui": "^1.12.13", "@types/jqueryui": "^1.12.13",
"@types/lodash-es": "^4.17.4", "@types/lodash-es": "^4.17.4",
"@types/marked": "^5.0.0", "@types/marked": "^5.0.0",
"@types/node": "^22", "@types/node": "^20",
"@typescript-eslint/eslint-plugin": "^5.60.1", "@typescript-eslint/eslint-plugin": "^5.60.1",
"@typescript-eslint/parser": "^5.60.1", "@typescript-eslint/parser": "^5.60.1",
"caniuse-lite": "^1.0.30001431", "caniuse-lite": "^1.0.30001431",
@ -48,16 +48,16 @@
"prettier": "^3.4.2", "prettier": "^3.4.2",
"prettier-plugin-svelte": "^3.3.2", "prettier-plugin-svelte": "^3.3.2",
"sass": "<1.77", "sass": "<1.77",
"svelte": "^5.34.9", "svelte": "^5.17.3",
"svelte-check": "^4.2.2", "svelte-check": "^3.4.4",
"svelte-preprocess": "^6.0.3", "svelte-preprocess": "^5.0.4",
"svelte-preprocess-esbuild": "^3.0.1", "svelte-preprocess-esbuild": "^3.0.1",
"svgo": "^3.2.0", "svgo": "^3.2.0",
"tslib": "^2.0.3", "tslib": "^2.0.3",
"tsx": "^4.8.1", "tsx": "^3.12.0",
"typescript": "^5.0.4", "typescript": "^5.0.4",
"vite": "6", "vite": "5.4.19",
"vitest": "^3" "vitest": "^2"
}, },
"dependencies": { "dependencies": {
"@bufbuild/protobuf": "^1.2.1", "@bufbuild/protobuf": "^1.2.1",
@ -81,9 +81,7 @@
}, },
"resolutions": { "resolutions": {
"canvas": "npm:empty-npm-package@1.0.0", "canvas": "npm:empty-npm-package@1.0.0",
"cookie": "0.7.0", "cookie": "0.7.0"
"devalue": "^5.3.2",
"vite": "6"
}, },
"browserslist": [ "browserslist": [
"defaults", "defaults",

View file

@ -51,7 +51,6 @@ message Card {
optional FsrsMemoryState memory_state = 20; optional FsrsMemoryState memory_state = 20;
optional float desired_retention = 21; optional float desired_retention = 21;
optional float decay = 22; optional float decay = 22;
optional int64 last_review_time_secs = 23;
string custom_data = 19; string custom_data = 19;
} }

View file

@ -20,7 +20,6 @@ service CollectionService {
rpc LatestProgress(generic.Empty) returns (Progress); rpc LatestProgress(generic.Empty) returns (Progress);
rpc SetWantsAbort(generic.Empty) returns (generic.Empty); rpc SetWantsAbort(generic.Empty) returns (generic.Empty);
rpc SetLoadBalancerEnabled(generic.Bool) returns (OpChanges); rpc SetLoadBalancerEnabled(generic.Bool) returns (OpChanges);
rpc GetCustomColours(generic.Empty) returns (GetCustomColoursResponse);
} }
// Implicitly includes any of the above methods that are not listed in the // Implicitly includes any of the above methods that are not listed in the
@ -164,7 +163,3 @@ message CreateBackupRequest {
bool force = 2; bool force = 2;
bool wait_for_completion = 3; bool wait_for_completion = 3;
} }
message GetCustomColoursResponse {
repeated string colours = 1;
}

View file

@ -56,7 +56,6 @@ message ConfigKey {
RENDER_LATEX = 25; RENDER_LATEX = 25;
LOAD_BALANCER_ENABLED = 26; LOAD_BALANCER_ENABLED = 26;
FSRS_SHORT_TERM_WITH_STEPS_ENABLED = 27; FSRS_SHORT_TERM_WITH_STEPS_ENABLED = 27;
FSRS_LEGACY_EVALUATE = 28;
} }
enum String { enum String {
SET_DUE_BROWSER = 0; SET_DUE_BROWSER = 0;

View file

@ -40,10 +40,12 @@ message DeckConfigId {
message GetRetentionWorkloadRequest { message GetRetentionWorkloadRequest {
repeated float w = 1; repeated float w = 1;
string search = 2; string search = 2;
float before = 3;
float after = 4;
} }
message GetRetentionWorkloadResponse { message GetRetentionWorkloadResponse {
map<uint32, float> costs = 1; float factor = 1;
} }
message GetIgnoredBeforeCountRequest { message GetIgnoredBeforeCountRequest {
@ -217,8 +219,6 @@ message DeckConfigsForUpdate {
bool review_today_active = 5; bool review_today_active = 5;
// Whether new_today applies to today or a past day. // Whether new_today applies to today or a past day.
bool new_today_active = 6; bool new_today_active = 6;
// Deck-specific desired retention override
optional float desired_retention = 7;
} }
string name = 1; string name = 1;
int64 config_id = 2; int64 config_id = 2;
@ -236,7 +236,6 @@ message DeckConfigsForUpdate {
bool new_cards_ignore_review_limit = 7; bool new_cards_ignore_review_limit = 7;
bool fsrs = 8; bool fsrs = 8;
bool fsrs_health_check = 11; bool fsrs_health_check = 11;
bool fsrs_legacy_evaluate = 12;
bool apply_all_parent_limits = 9; bool apply_all_parent_limits = 9;
uint32 days_since_last_fsrs_optimize = 10; uint32 days_since_last_fsrs_optimize = 10;
} }

View file

@ -83,8 +83,6 @@ message Deck {
optional uint32 new_limit = 7; optional uint32 new_limit = 7;
DayLimit review_limit_today = 8; DayLimit review_limit_today = 8;
DayLimit new_limit_today = 9; DayLimit new_limit_today = 9;
// Deck-specific desired retention override
optional float desired_retention = 10;
reserved 12 to 15; reserved 12 to 15;
} }

View file

@ -27,9 +27,6 @@ service FrontendService {
rpc deckOptionsRequireClose(generic.Empty) returns (generic.Empty); rpc deckOptionsRequireClose(generic.Empty) returns (generic.Empty);
// Warns python that the deck option web view is ready to receive requests. // Warns python that the deck option web view is ready to receive requests.
rpc deckOptionsReady(generic.Empty) returns (generic.Empty); rpc deckOptionsReady(generic.Empty) returns (generic.Empty);
// Save colour picker's custom colour palette
rpc SaveCustomColours(generic.Empty) returns (generic.Empty);
} }
service BackendFrontendService {} service BackendFrontendService {}

View file

@ -176,12 +176,9 @@ message CsvMetadata {
// to determine the number of columns. // to determine the number of columns.
repeated string column_labels = 5; repeated string column_labels = 5;
oneof deck { oneof deck {
// id of an existing deck
int64 deck_id = 6; int64 deck_id = 6;
// One-based. 0 means n/a. // One-based. 0 means n/a.
uint32 deck_column = 7; uint32 deck_column = 7;
// name of new deck to be created
string deck_name = 17;
} }
oneof notetype { oneof notetype {
// One notetype for all rows with given column mapping. // One notetype for all rows with given column mapping.

View file

@ -59,7 +59,7 @@ message AddNoteRequest {
} }
message AddNoteResponse { message AddNoteResponse {
collection.OpChangesWithCount changes = 1; collection.OpChanges changes = 1;
int64 note_id = 2; int64 note_id = 2;
} }

View file

@ -55,11 +55,7 @@ service SchedulerService {
returns (ComputeOptimalRetentionResponse); returns (ComputeOptimalRetentionResponse);
rpc SimulateFsrsReview(SimulateFsrsReviewRequest) rpc SimulateFsrsReview(SimulateFsrsReviewRequest)
returns (SimulateFsrsReviewResponse); returns (SimulateFsrsReviewResponse);
rpc SimulateFsrsWorkload(SimulateFsrsReviewRequest)
returns (SimulateFsrsWorkloadResponse);
rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse); rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse);
rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest)
returns (EvaluateParamsResponse);
rpc ComputeMemoryState(cards.CardId) returns (ComputeMemoryStateResponse); rpc ComputeMemoryState(cards.CardId) returns (ComputeMemoryStateResponse);
// The number of days the calculated interval was fuzzed by on the previous // The number of days the calculated interval was fuzzed by on the previous
// review (if any). Utilized by the FSRS add-on. // review (if any). Utilized by the FSRS add-on.
@ -406,9 +402,31 @@ message SimulateFsrsReviewRequest {
repeated float easy_days_percentages = 10; repeated float easy_days_percentages = 10;
deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11; deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11;
optional uint32 suspend_after_lapse_count = 12; optional uint32 suspend_after_lapse_count = 12;
float historical_retention = 13; // For CMRR
uint32 learning_step_count = 14; message CMRRTarget {
uint32 relearning_step_count = 15; message Memorized {
float loss_aversion = 1;
};
message Stability {};
message FutureMemorized {
int32 days = 1;
};
message AverageFutureMemorized {
int32 days = 1;
};
oneof kind {
Memorized memorized = 1;
Stability stability = 2;
FutureMemorized future_memorized = 3;
AverageFutureMemorized average_future_memorized = 4;
};
};
optional CMRRTarget target = 13;
} }
message SimulateFsrsReviewResponse { message SimulateFsrsReviewResponse {
@ -418,12 +436,6 @@ message SimulateFsrsReviewResponse {
repeated float daily_time_cost = 4; repeated float daily_time_cost = 4;
} }
message SimulateFsrsWorkloadResponse {
map<uint32, float> cost = 1;
map<uint32, float> memorized = 2;
map<uint32, uint32> review_count = 3;
}
message ComputeOptimalRetentionResponse { message ComputeOptimalRetentionResponse {
float optimal_retention = 1; float optimal_retention = 1;
} }
@ -455,12 +467,6 @@ message EvaluateParamsRequest {
uint32 num_of_relearning_steps = 3; uint32 num_of_relearning_steps = 3;
} }
message EvaluateParamsLegacyRequest {
repeated float params = 1;
string search = 2;
int64 ignore_revlogs_before_ms = 3;
}
message EvaluateParamsResponse { message EvaluateParamsResponse {
float log_loss = 1; float log_loss = 1;
float rmse_bins = 2; float rmse_bins = 2;

View file

@ -74,15 +74,10 @@ message SearchNode {
repeated SearchNode nodes = 1; repeated SearchNode nodes = 1;
Joiner joiner = 2; Joiner joiner = 2;
} }
enum FieldSearchMode {
FIELD_SEARCH_MODE_NORMAL = 0;
FIELD_SEARCH_MODE_REGEX = 1;
FIELD_SEARCH_MODE_NOCOMBINING = 2;
}
message Field { message Field {
string field_name = 1; string field_name = 1;
string text = 2; string text = 2;
FieldSearchMode mode = 3; bool is_re = 3;
} }
oneof filter { oneof filter {

View file

@ -246,7 +246,7 @@ def backend_exception_to_pylib(err: backend_pb2.BackendError) -> Exception:
return BackendError(err.message, help_page, context, backtrace) return BackendError(err.message, help_page, context, backtrace)
elif val == kind.SEARCH_ERROR: elif val == kind.SEARCH_ERROR:
return SearchError(err.message, help_page, context, backtrace) return SearchError(markdown(err.message), help_page, context, backtrace)
elif val == kind.UNDO_EMPTY: elif val == kind.UNDO_EMPTY:
return UndoEmpty(err.message, help_page, context, backtrace) return UndoEmpty(err.message, help_page, context, backtrace)

View file

@ -49,7 +49,6 @@ class Card(DeprecatedNamesMixin):
memory_state: FSRSMemoryState | None memory_state: FSRSMemoryState | None
desired_retention: float | None desired_retention: float | None
decay: float | None decay: float | None
last_review_time: int | None
def __init__( def __init__(
self, self,
@ -104,11 +103,6 @@ class Card(DeprecatedNamesMixin):
card.desired_retention if card.HasField("desired_retention") else None card.desired_retention if card.HasField("desired_retention") else None
) )
self.decay = card.decay if card.HasField("decay") else None self.decay = card.decay if card.HasField("decay") else None
self.last_review_time = (
card.last_review_time_secs
if card.HasField("last_review_time_secs")
else None
)
def _to_backend_card(self) -> cards_pb2.Card: def _to_backend_card(self) -> cards_pb2.Card:
# mtime & usn are set by backend # mtime & usn are set by backend
@ -133,7 +127,6 @@ class Card(DeprecatedNamesMixin):
memory_state=self.memory_state, memory_state=self.memory_state,
desired_retention=self.desired_retention, desired_retention=self.desired_retention,
decay=self.decay, decay=self.decay,
last_review_time_secs=self.last_review_time,
) )
@deprecated(info="please use col.update_card()") @deprecated(info="please use col.update_card()")

View file

@ -528,7 +528,7 @@ class Collection(DeprecatedNamesMixin):
def new_note(self, notetype: NotetypeDict) -> Note: def new_note(self, notetype: NotetypeDict) -> Note:
return Note(self, notetype) return Note(self, notetype)
def add_note(self, note: Note, deck_id: DeckId) -> OpChangesWithCount: def add_note(self, note: Note, deck_id: DeckId) -> OpChanges:
hooks.note_will_be_added(self, note, deck_id) hooks.note_will_be_added(self, note, deck_id)
out = self._backend.add_note(note=note._to_backend_note(), deck_id=deck_id) out = self._backend.add_note(note=note._to_backend_note(), deck_id=deck_id)
note.id = NoteId(out.note_id) note.id = NoteId(out.note_id)

View file

@ -175,8 +175,8 @@ class MnemoFact:
def fact_view(self) -> type[MnemoFactView]: def fact_view(self) -> type[MnemoFactView]:
try: try:
fact_view = self.cards[0].fact_view_id fact_view = self.cards[0].fact_view_id
except IndexError: except IndexError as err:
return FrontOnly raise Exception(f"Fact {id} has no cards") from err
if fact_view.startswith("1.") or fact_view.startswith("1::"): if fact_view.startswith("1.") or fact_view.startswith("1::"):
return FrontOnly return FrontOnly
@ -187,7 +187,7 @@ class MnemoFact:
elif fact_view.startswith("5.1"): elif fact_view.startswith("5.1"):
return Cloze return Cloze
raise Exception(f"Fact {self.id} has unknown fact view: {fact_view}") raise Exception(f"Fact {id} has unknown fact view: {fact_view}")
def anki_fields(self, fact_view: type[MnemoFactView]) -> list[str]: def anki_fields(self, fact_view: type[MnemoFactView]) -> list[str]:
return [munge_field(self.fields.get(k, "")) for k in fact_view.field_keys] return [munge_field(self.fields.get(k, "")) for k in fact_view.field_keys]

View file

@ -18,7 +18,7 @@ from anki._legacy import DeprecatedNamesMixinForModule
TR = anki._fluent.LegacyTranslationEnum TR = anki._fluent.LegacyTranslationEnum
FormatTimeSpan = _pb.FormatTimespanRequest FormatTimeSpan = _pb.FormatTimespanRequest
# When adding new languages here, check lang_to_disk_lang() below
langs = sorted( langs = sorted(
[ [
("Afrikaans", "af_ZA"), ("Afrikaans", "af_ZA"),
@ -38,7 +38,6 @@ langs = sorted(
("Italiano", "it_IT"), ("Italiano", "it_IT"),
("lo jbobau", "jbo_EN"), ("lo jbobau", "jbo_EN"),
("Lenga d'òc", "oc_FR"), ("Lenga d'òc", "oc_FR"),
("Қазақша", "kk_KZ"),
("Magyar", "hu_HU"), ("Magyar", "hu_HU"),
("Nederlands", "nl_NL"), ("Nederlands", "nl_NL"),
("Norsk", "nb_NO"), ("Norsk", "nb_NO"),
@ -65,7 +64,6 @@ langs = sorted(
("Українська мова", "uk_UA"), ("Українська мова", "uk_UA"),
("Հայերեն", "hy_AM"), ("Հայերեն", "hy_AM"),
("עִבְרִית", "he_IL"), ("עִבְרִית", "he_IL"),
("ייִדיש", "yi"),
("العربية", "ar_SA"), ("العربية", "ar_SA"),
("فارسی", "fa_IR"), ("فارسی", "fa_IR"),
("ภาษาไทย", "th_TH"), ("ภาษาไทย", "th_TH"),
@ -75,7 +73,6 @@ langs = sorted(
("ଓଡ଼ିଆ", "or_OR"), ("ଓଡ଼ିଆ", "or_OR"),
("Filipino", "tl"), ("Filipino", "tl"),
("ئۇيغۇر", "ug"), ("ئۇيغۇر", "ug"),
("Oʻzbekcha", "uz_UZ"),
] ]
) )
@ -106,7 +103,6 @@ compatMap = {
"it": "it_IT", "it": "it_IT",
"ja": "ja_JP", "ja": "ja_JP",
"jbo": "jbo_EN", "jbo": "jbo_EN",
"kk": "kk_KZ",
"ko": "ko_KR", "ko": "ko_KR",
"la": "la_LA", "la": "la_LA",
"mn": "mn_MN", "mn": "mn_MN",
@ -127,9 +123,7 @@ compatMap = {
"th": "th_TH", "th": "th_TH",
"tr": "tr_TR", "tr": "tr_TR",
"uk": "uk_UA", "uk": "uk_UA",
"uz": "uz_UZ",
"vi": "vi_VN", "vi": "vi_VN",
"yi": "yi",
} }
@ -237,7 +231,7 @@ def get_def_lang(user_lang: str | None = None) -> tuple[int, str]:
def is_rtl(lang: str) -> bool: def is_rtl(lang: str) -> bool:
return lang in ("he", "ar", "fa", "ug", "yi") return lang in ("he", "ar", "fa", "ug")
# strip off unicode isolation markers from a translated string # strip off unicode isolation markers from a translated string

View file

@ -7,7 +7,7 @@ dependencies = [
"decorator", "decorator",
"markdown", "markdown",
"orjson", "orjson",
"protobuf>=6.0,<8.0", "protobuf>=4.21",
"requests[socks]", "requests[socks]",
# remove after we update to min python 3.11+ # remove after we update to min python 3.11+
"typing_extensions", "typing_extensions",

View file

@ -32,7 +32,6 @@ def test_find_cards():
note = col.newNote() note = col.newNote()
note["Front"] = "cat" note["Front"] = "cat"
note["Back"] = "sheep" note["Back"] = "sheep"
note.tags.append("conjunção größte")
col.addNote(note) col.addNote(note)
catCard = note.cards()[0] catCard = note.cards()[0]
m = col.models.current() m = col.models.current()
@ -69,8 +68,6 @@ def test_find_cards():
col.tags.bulk_remove(col.db.list("select id from notes"), "foo") col.tags.bulk_remove(col.db.list("select id from notes"), "foo")
assert len(col.find_cards("tag:foo")) == 0 assert len(col.find_cards("tag:foo")) == 0
assert len(col.find_cards("tag:bar")) == 5 assert len(col.find_cards("tag:bar")) == 5
assert len(col.find_cards("tag:conjuncao tag:groste")) == 0
assert len(col.find_cards("tag:nc:conjuncao tag:nc:groste")) == 1
# text searches # text searches
assert len(col.find_cards("cat")) == 2 assert len(col.find_cards("cat")) == 2
assert len(col.find_cards("cat -dog")) == 1 assert len(col.find_cards("cat -dog")) == 1

View file

@ -66,14 +66,13 @@ def show(mw: aqt.AnkiQt) -> QDialog:
# WebView contents # WebView contents
###################################################################### ######################################################################
abouttext = "<center><img src='/_anki/imgs/anki-logo-thin.png'></center>" abouttext = "<center><img src='/_anki/imgs/anki-logo-thin.png'></center>"
lede = tr.about_anki_is_a_friendly_intelligent_spaced().replace("Anki", "Anki®") abouttext += f"<p>{tr.about_anki_is_a_friendly_intelligent_spaced()}"
abouttext += f"<p>{lede}"
abouttext += f"<p>{tr.about_anki_is_licensed_under_the_agpl3()}" abouttext += f"<p>{tr.about_anki_is_licensed_under_the_agpl3()}"
abouttext += f"<p>{tr.about_version(val=version_with_build())}<br>" abouttext += f"<p>{tr.about_version(val=version_with_build())}<br>"
abouttext += ("Python %s Qt %s Chromium %s<br>") % ( abouttext += ("Python %s Qt %s PyQt %s<br>") % (
platform.python_version(), platform.python_version(),
qVersion(), qVersion(),
(qWebEngineChromiumVersion() or "").split(".")[0], PYQT_VERSION_STR,
) )
abouttext += ( abouttext += (
without_unicode_isolation(tr.about_visit_website(val=aqt.appWebsite)) without_unicode_isolation(tr.about_visit_website(val=aqt.appWebsite))
@ -224,9 +223,6 @@ def show(mw: aqt.AnkiQt) -> QDialog:
"Mukunda Madhav Dey", "Mukunda Madhav Dey",
"Adnane Taghi", "Adnane Taghi",
"Anon_0000", "Anon_0000",
"Bilolbek Normuminov",
"Sagiv Marzini",
"Zhanibek Rassululy",
) )
) )

View file

@ -8,7 +8,7 @@ from collections.abc import Callable
import aqt.editor import aqt.editor
import aqt.forms import aqt.forms
from anki._legacy import deprecated from anki._legacy import deprecated
from anki.collection import OpChanges, OpChangesWithCount, SearchNode from anki.collection import OpChanges, SearchNode
from anki.decks import DeckId from anki.decks import DeckId
from anki.models import NotetypeId from anki.models import NotetypeId
from anki.notes import Note, NoteFieldsCheckResult, NoteId from anki.notes import Note, NoteFieldsCheckResult, NoteId
@ -294,13 +294,13 @@ class AddCards(QMainWindow):
target_deck_id = self.deck_chooser.selected_deck_id target_deck_id = self.deck_chooser.selected_deck_id
def on_success(changes: OpChangesWithCount) -> None: def on_success(changes: OpChanges) -> None:
# only used for detecting changed sticky fields on close # only used for detecting changed sticky fields on close
self._last_added_note = note self._last_added_note = note
self.addHistory(note) self.addHistory(note)
tooltip(tr.importing_cards_added(count=changes.count), period=500) tooltip(tr.adding_added(), period=500)
av_player.stop_and_clear_queue() av_player.stop_and_clear_queue()
self._load_new_note(sticky_fields_from=note) self._load_new_note(sticky_fields_from=note)
gui_hooks.add_cards_did_add_note(note) gui_hooks.add_cards_did_add_note(note)

View file

@ -10,8 +10,6 @@ import re
from collections.abc import Callable, Sequence from collections.abc import Callable, Sequence
from typing import Any, cast from typing import Any, cast
from markdown import markdown
import aqt import aqt
import aqt.browser import aqt.browser
import aqt.editor import aqt.editor
@ -22,7 +20,7 @@ from anki.cards import Card, CardId
from anki.collection import Collection, Config, OpChanges, SearchNode from anki.collection import Collection, Config, OpChanges, SearchNode
from anki.consts import * from anki.consts import *
from anki.decks import DeckId from anki.decks import DeckId
from anki.errors import NotFoundError, SearchError from anki.errors import NotFoundError
from anki.lang import without_unicode_isolation from anki.lang import without_unicode_isolation
from anki.models import NotetypeId from anki.models import NotetypeId
from anki.notes import NoteId from anki.notes import NoteId
@ -500,8 +498,6 @@ class Browser(QMainWindow):
text = self.current_search() text = self.current_search()
try: try:
normed = self.col.build_search_string(text) normed = self.col.build_search_string(text)
except SearchError as err:
showWarning(markdown(str(err)))
except Exception as err: except Exception as err:
showWarning(str(err)) showWarning(str(err))
else: else:

View file

@ -51,7 +51,6 @@ class CardInfoDialog(QDialog):
def _setup_ui(self, card_id: CardId | None) -> None: def _setup_ui(self, card_id: CardId | None) -> None:
self.mw.garbage_collect_on_dialog_finish(self) self.mw.garbage_collect_on_dialog_finish(self)
self.setMinimumSize(400, 300)
disable_help_button(self) disable_help_button(self)
restoreGeom(self, self.GEOMETRY_KEY, default_size=(800, 800)) restoreGeom(self, self.GEOMETRY_KEY, default_size=(800, 800))
add_close_shortcut(self) add_close_shortcut(self)

View file

@ -13,7 +13,7 @@ import aqt.browser
from anki.cards import Card from anki.cards import Card
from anki.collection import Config from anki.collection import Config
from anki.tags import MARKED_TAG from anki.tags import MARKED_TAG
from aqt import AnkiQt, gui_hooks, is_mac from aqt import AnkiQt, gui_hooks
from aqt.qt import ( from aqt.qt import (
QCheckBox, QCheckBox,
QDialog, QDialog,
@ -81,15 +81,10 @@ class Previewer(QDialog):
qconnect(self.finished, self._on_finished) qconnect(self.finished, self._on_finished)
self.silentlyClose = True self.silentlyClose = True
self.vbox = QVBoxLayout() self.vbox = QVBoxLayout()
spacing = 6
self.vbox.setContentsMargins(0, 0, 0, 0) self.vbox.setContentsMargins(0, 0, 0, 0)
self.vbox.setSpacing(spacing)
self._web: AnkiWebView | None = AnkiWebView(kind=AnkiWebViewKind.PREVIEWER) self._web: AnkiWebView | None = AnkiWebView(kind=AnkiWebViewKind.PREVIEWER)
self.vbox.addWidget(self._web) self.vbox.addWidget(self._web)
self.bbox = QDialogButtonBox() self.bbox = QDialogButtonBox()
self.bbox.setContentsMargins(
spacing, spacing if is_mac else 0, spacing, spacing
)
self.bbox.setLayoutDirection(Qt.LayoutDirection.LeftToRight) self.bbox.setLayoutDirection(Qt.LayoutDirection.LeftToRight)
gui_hooks.card_review_webview_did_init(self._web, AnkiWebViewKind.PREVIEWER) gui_hooks.card_review_webview_did_init(self._web, AnkiWebViewKind.PREVIEWER)

Binary file not shown.

After

Width:  |  Height:  |  Size: 727 B

View file

@ -1,27 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="21" height="21" viewBox="0 0 21 21" version="1.1"
xmlns="http://www.w3.org/2000/svg">
<g id="Layer-1" transform="translate(0.5,0.5)">
<rect x="0" y="0" width="20" height="20" fill="none"/>
<g transform="translate(14.8974,6.3648)">
<path d="M0,0C0,3.403 -2.042,6.161 -4.56,6.161C-7.078,6.161 -9.12,3.403 -9.12,0C-9.12,-3.403 -7.078,-6.161 -4.56,-6.161C-2.042,-6.161 0,-3.403 0,0"
fill="black" fill-rule="nonzero"/>
</g>
<g transform="matrix(0,-1,-1,0,10.3374,1.8048)">
<ellipse cx="-4.56" cy="0" rx="6.161" ry="4.56"
fill="none" stroke="black" stroke-width="0.25"/>
</g>
<g transform="translate(3.1987,14.4958)">
<path d="M0,-9.484C-0.76,-4.212 3.287,0 7.12,-0.046C10.864,-0.09 14.742,-4.199 14.076,-9.343"
fill="none" stroke="black" stroke-width="2" fill-rule="nonzero"/>
</g>
<g transform="matrix(-1,0,0,1,20.573,18.613)">
<rect x="5.387" y="0.601" width="9.799" height="0.185"
fill="none" stroke="black" stroke-width="2"/>
</g>
<g transform="matrix(-1,0,0,1,20.741,13.51)">
<rect x="9.899" y="1.163" width="0.943" height="4.164"
fill="none" stroke="black" stroke-width="2"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View file

@ -151,7 +151,6 @@ class Editor:
self.add_webview() self.add_webview()
self.setupWeb() self.setupWeb()
self.setupShortcuts() self.setupShortcuts()
self.setupColourPalette()
gui_hooks.editor_did_init(self) gui_hooks.editor_did_init(self)
# Initial setup # Initial setup
@ -350,14 +349,6 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too
keys, fn, _ = row keys, fn, _ = row
QShortcut(QKeySequence(keys), self.widget, activated=fn) # type: ignore QShortcut(QKeySequence(keys), self.widget, activated=fn) # type: ignore
def setupColourPalette(self) -> None:
if not (colors := self.mw.col.get_config("customColorPickerPalette")):
return
for i, colour in enumerate(colors[: QColorDialog.customCount()]):
if not QColor.isValidColorName(colour):
continue
QColorDialog.setCustomColor(i, QColor.fromString(colour))
def _addFocusCheck(self, fn: Callable) -> Callable: def _addFocusCheck(self, fn: Callable) -> Callable:
def checkFocus() -> None: def checkFocus() -> None:
if self.currentField is None: if self.currentField is None:

View file

@ -23,36 +23,25 @@ from aqt.utils import openHelp, showWarning, supportText, tooltip, tr
if TYPE_CHECKING: if TYPE_CHECKING:
from aqt.main import AnkiQt from aqt.main import AnkiQt
# so we can be non-modal/non-blocking, without Python deallocating the message
# box ahead of time
_mbox: QMessageBox | None = None
def show_exception(*, parent: QWidget, exception: Exception) -> None: def show_exception(*, parent: QWidget, exception: Exception) -> None:
"Present a caught exception to the user using a pop-up." "Present a caught exception to the user using a pop-up."
if isinstance(exception, Interrupted): if isinstance(exception, Interrupted):
# nothing to do # nothing to do
return return
global _mbox
error_lines = []
help_page = HelpPage.TROUBLESHOOTING
if isinstance(exception, BackendError): if isinstance(exception, BackendError):
if exception.context: if exception.context:
error_lines.append(exception.context) print(exception.context)
if exception.backtrace: if exception.backtrace:
error_lines.append(exception.backtrace) print(exception.backtrace)
if exception.help_page is not None: showWarning(str(exception), parent=parent, help=exception.help_page)
help_page = exception.help_page
else: else:
# if the error is not originating from the backend, dump # if the error is not originating from the backend, dump
# a traceback to the console to aid in debugging # a traceback to the console to aid in debugging
error_lines = traceback.format_exception( traceback.print_exception(
None, exception, exception.__traceback__ None, exception, exception.__traceback__, file=sys.stdout
) )
error_text = "\n".join(error_lines) showWarning(str(exception), parent=parent)
print(error_lines)
_mbox = _init_message_box(str(exception), error_text, help_page)
_mbox.show()
def is_chromium_cert_error(error: str) -> bool: def is_chromium_cert_error(error: str) -> bool:
@ -169,39 +158,9 @@ if not os.environ.get("DEBUG"):
sys.excepthook = excepthook sys.excepthook = excepthook
# so we can be non-modal/non-blocking, without Python deallocating the message
def _init_message_box( # box ahead of time
user_text: str, debug_text: str, help_page=HelpPage.TROUBLESHOOTING _mbox: QMessageBox | None = None
):
global _mbox
_mbox = QMessageBox()
_mbox.setWindowTitle("Anki")
_mbox.setText(user_text)
_mbox.setIcon(QMessageBox.Icon.Warning)
_mbox.setTextFormat(Qt.TextFormat.PlainText)
def show_help():
openHelp(help_page)
def copy_debug_info():
QApplication.clipboard().setText(debug_text)
tooltip(tr.errors_copied_to_clipboard(), parent=_mbox)
help = _mbox.addButton(QMessageBox.StandardButton.Help)
if debug_text:
debug_info = _mbox.addButton(
tr.errors_copy_debug_info_button(), QMessageBox.ButtonRole.ActionRole
)
debug_info.disconnect()
debug_info.clicked.connect(copy_debug_info)
cancel = _mbox.addButton(QMessageBox.StandardButton.Cancel)
cancel.setText(tr.actions_close())
help.disconnect()
help.clicked.connect(show_help)
return _mbox
class ErrorHandler(QObject): class ErrorHandler(QObject):
@ -293,7 +252,33 @@ class ErrorHandler(QObject):
user_text += "\n\n" + self._addonText(error) user_text += "\n\n" + self._addonText(error)
debug_text += addon_debug_info() debug_text += addon_debug_info()
_mbox = _init_message_box(user_text, debug_text) def show_troubleshooting():
openHelp(HelpPage.TROUBLESHOOTING)
def copy_debug_info():
QApplication.clipboard().setText(debug_text)
tooltip(tr.errors_copied_to_clipboard(), parent=_mbox)
global _mbox
_mbox = QMessageBox()
_mbox.setWindowTitle("Anki")
_mbox.setText(user_text)
_mbox.setIcon(QMessageBox.Icon.Warning)
_mbox.setTextFormat(Qt.TextFormat.PlainText)
troubleshooting = _mbox.addButton(
tr.errors_troubleshooting_button(), QMessageBox.ButtonRole.ActionRole
)
debug_info = _mbox.addButton(
tr.errors_copy_debug_info_button(), QMessageBox.ButtonRole.ActionRole
)
cancel = _mbox.addButton(QMessageBox.StandardButton.Cancel)
cancel.setText(tr.actions_close())
troubleshooting.disconnect()
troubleshooting.clicked.connect(show_troubleshooting)
debug_info.disconnect()
debug_info.clicked.connect(copy_debug_info)
if self.fatal_error_encountered: if self.fatal_error_encountered:
_mbox.exec() _mbox.exec()

View file

@ -1292,10 +1292,9 @@
<tabstop>daily_backups</tabstop> <tabstop>daily_backups</tabstop>
<tabstop>weekly_backups</tabstop> <tabstop>weekly_backups</tabstop>
<tabstop>monthly_backups</tabstop> <tabstop>monthly_backups</tabstop>
<tabstop>tabWidget</tabstop>
<tabstop>syncAnkiHubLogout</tabstop> <tabstop>syncAnkiHubLogout</tabstop>
<tabstop>syncAnkiHubLogin</tabstop> <tabstop>syncAnkiHubLogin</tabstop>
<tabstop>buttonBox</tabstop>
<tabstop>tabWidget</tabstop>
</tabstops> </tabstops>
<resources/> <resources/>
<connections> <connections>

View file

@ -1309,7 +1309,7 @@ title="{}" {}>{}</button>""".format(
if not askUser(tr.qt_misc_open_anki_launcher()): if not askUser(tr.qt_misc_open_anki_launcher()):
return return
from aqt.package import update_and_restart from aqt.update import update_and_restart
update_and_restart() update_and_restart()
@ -1394,7 +1394,7 @@ title="{}" {}>{}</button>""".format(
########################################################################## ##########################################################################
def setupMenus(self) -> None: def setupMenus(self) -> None:
from aqt.package import launcher_executable from aqt.update import have_launcher
m = self.form m = self.form
@ -1426,7 +1426,7 @@ title="{}" {}>{}</button>""".format(
qconnect(m.actionEmptyCards.triggered, self.onEmptyCards) qconnect(m.actionEmptyCards.triggered, self.onEmptyCards)
qconnect(m.actionNoteTypes.triggered, self.onNoteTypes) qconnect(m.actionNoteTypes.triggered, self.onNoteTypes)
qconnect(m.action_upgrade_downgrade.triggered, self.on_upgrade_downgrade) qconnect(m.action_upgrade_downgrade.triggered, self.on_upgrade_downgrade)
if not launcher_executable(): if not have_launcher():
m.action_upgrade_downgrade.setVisible(False) m.action_upgrade_downgrade.setVisible(False)
qconnect(m.actionPreferences.triggered, self.onPrefs) qconnect(m.actionPreferences.triggered, self.onPrefs)

View file

@ -170,42 +170,13 @@ def favicon() -> Response:
def _mime_for_path(path: str) -> str: def _mime_for_path(path: str) -> str:
"Mime type for provided path/filename." "Mime type for provided path/filename."
if path.endswith(".css"):
_, ext = os.path.splitext(path) # some users may have invalid mime type in the Windows registry
ext = ext.lower() return "text/css"
elif path.endswith(".js") or path.endswith(".mjs"):
# Badly-behaved apps on Windows can alter the standard mime types in the registry, which can completely return "application/javascript"
# break Anki's UI. So we hard-code the most common extensions.
mime_types = {
".css": "text/css",
".js": "application/javascript",
".mjs": "application/javascript",
".html": "text/html",
".htm": "text/html",
".svg": "image/svg+xml",
".png": "image/png",
".jpg": "image/jpeg",
".jpeg": "image/jpeg",
".gif": "image/gif",
".webp": "image/webp",
".ico": "image/x-icon",
".json": "application/json",
".woff": "font/woff",
".woff2": "font/woff2",
".ttf": "font/ttf",
".otf": "font/otf",
".mp3": "audio/mpeg",
".mp4": "video/mp4",
".webm": "video/webm",
".ogg": "audio/ogg",
".pdf": "application/pdf",
".txt": "text/plain",
}
if mime := mime_types.get(ext):
return mime
else: else:
# fallback to mimetypes, which may consult the registry # autodetect
mime, _encoding = mimetypes.guess_type(path) mime, _encoding = mimetypes.guess_type(path)
return mime or "application/octet-stream" return mime or "application/octet-stream"
@ -512,7 +483,7 @@ def update_deck_configs() -> bytes:
update.abort = True update.abort = True
def on_success(changes: OpChanges) -> None: def on_success(changes: OpChanges) -> None:
if isinstance(window := aqt.mw.app.activeModalWidget(), DeckOptionsDialog): if isinstance(window := aqt.mw.app.activeWindow(), DeckOptionsDialog):
window.reject() window.reject()
def handle_on_main() -> None: def handle_on_main() -> None:
@ -540,7 +511,7 @@ def set_scheduling_states() -> bytes:
def import_done() -> bytes: def import_done() -> bytes:
def update_window_modality() -> None: def update_window_modality() -> None:
if window := aqt.mw.app.activeModalWidget(): if window := aqt.mw.app.activeWindow():
from aqt.import_export.import_dialog import ImportDialog from aqt.import_export.import_dialog import ImportDialog
if isinstance(window, ImportDialog): if isinstance(window, ImportDialog):
@ -558,7 +529,7 @@ def import_request(endpoint: str) -> bytes:
response.ParseFromString(output) response.ParseFromString(output)
def handle_on_main() -> None: def handle_on_main() -> None:
window = aqt.mw.app.activeModalWidget() window = aqt.mw.app.activeWindow()
on_op_finished(aqt.mw, response, window) on_op_finished(aqt.mw, response, window)
aqt.mw.taskman.run_on_main(handle_on_main) aqt.mw.taskman.run_on_main(handle_on_main)
@ -598,7 +569,7 @@ def change_notetype() -> bytes:
data = request.data data = request.data
def handle_on_main() -> None: def handle_on_main() -> None:
window = aqt.mw.app.activeModalWidget() window = aqt.mw.app.activeWindow()
if isinstance(window, ChangeNotetypeDialog): if isinstance(window, ChangeNotetypeDialog):
window.save(data) window.save(data)
@ -608,7 +579,7 @@ def change_notetype() -> bytes:
def deck_options_require_close() -> bytes: def deck_options_require_close() -> bytes:
def handle_on_main() -> None: def handle_on_main() -> None:
window = aqt.mw.app.activeModalWidget() window = aqt.mw.app.activeWindow()
if isinstance(window, DeckOptionsDialog): if isinstance(window, DeckOptionsDialog):
window.require_close() window.require_close()
@ -620,7 +591,7 @@ def deck_options_require_close() -> bytes:
def deck_options_ready() -> bytes: def deck_options_ready() -> bytes:
def handle_on_main() -> None: def handle_on_main() -> None:
window = aqt.mw.app.activeModalWidget() window = aqt.mw.app.activeWindow()
if isinstance(window, DeckOptionsDialog): if isinstance(window, DeckOptionsDialog):
window.set_ready() window.set_ready()
@ -628,15 +599,6 @@ def deck_options_ready() -> bytes:
return b"" return b""
def save_custom_colours() -> bytes:
colors = [
QColorDialog.customColor(i).name(QColor.NameFormat.HexRgb)
for i in range(QColorDialog.customCount())
]
aqt.mw.col.set_config("customColorPickerPalette", colors)
return b""
post_handler_list = [ post_handler_list = [
congrats_info, congrats_info,
get_deck_configs_for_update, get_deck_configs_for_update,
@ -652,14 +614,12 @@ post_handler_list = [
search_in_browser, search_in_browser,
deck_options_require_close, deck_options_require_close,
deck_options_ready, deck_options_ready,
save_custom_colours,
] ]
exposed_backend_list = [ exposed_backend_list = [
# CollectionService # CollectionService
"latest_progress", "latest_progress",
"get_custom_colours",
# DeckService # DeckService
"get_deck_names", "get_deck_names",
# I18nService # I18nService
@ -691,10 +651,9 @@ exposed_backend_list = [
"compute_fsrs_params", "compute_fsrs_params",
"compute_optimal_retention", "compute_optimal_retention",
"set_wants_abort", "set_wants_abort",
"evaluate_params_legacy", "evaluate_params",
"get_optimal_retention_parameters", "get_optimal_retention_parameters",
"simulate_fsrs_review", "simulate_fsrs_review",
"simulate_fsrs_workload",
# DeckConfigService # DeckConfigService
"get_ignored_before_count", "get_ignored_before_count",
"get_retention_workload", "get_retention_workload",

View file

@ -18,7 +18,7 @@ def add_note(
parent: QWidget, parent: QWidget,
note: Note, note: Note,
target_deck_id: DeckId, target_deck_id: DeckId,
) -> CollectionOp[OpChangesWithCount]: ) -> CollectionOp[OpChanges]:
return CollectionOp(parent, lambda col: col.add_note(note, target_deck_id)) return CollectionOp(parent, lambda col: col.add_note(note, target_deck_id))

View file

@ -5,13 +5,10 @@
from __future__ import annotations from __future__ import annotations
import contextlib
import os
import subprocess import subprocess
import sys
from pathlib import Path from pathlib import Path
from anki.utils import is_mac, is_win from anki.utils import is_mac
# ruff: noqa: F401 # ruff: noqa: F401
@ -68,106 +65,3 @@ def first_run_setup() -> None:
# Wait for both commands to complete # Wait for both commands to complete
for proc in processes: for proc in processes:
proc.wait() proc.wait()
def uv_binary() -> str | None:
"""Return the path to the uv binary."""
return os.environ.get("ANKI_LAUNCHER_UV")
def launcher_root() -> str | None:
"""Return the path to the launcher root directory (AnkiProgramFiles)."""
return os.environ.get("UV_PROJECT")
def venv_binary(cmd: str) -> str | None:
"""Return the path to a binary in the launcher's venv."""
root = launcher_root()
if not root:
return None
root_path = Path(root)
if is_win:
binary_path = root_path / ".venv" / "Scripts" / cmd
else:
binary_path = root_path / ".venv" / "bin" / cmd
return str(binary_path)
def add_python_requirements(reqs: list[str]) -> tuple[bool, str]:
"""Add Python requirements to the launcher venv using uv add.
Returns (success, output)"""
binary = uv_binary()
if not binary:
return (False, "Not in packaged build.")
uv_cmd = [binary, "add"] + reqs
result = subprocess.run(uv_cmd, capture_output=True, text=True, check=False)
if result.returncode == 0:
root = launcher_root()
if root:
sync_marker = Path(root) / ".sync_complete"
sync_marker.touch()
return (True, result.stdout)
else:
return (False, result.stderr)
def launcher_executable() -> str | None:
"""Return the path to the Anki launcher executable."""
return os.getenv("ANKI_LAUNCHER")
def trigger_launcher_run() -> None:
"""Create a trigger file to request launcher UI on next run."""
try:
root = launcher_root()
if not root:
return
trigger_path = Path(root) / ".want-launcher"
trigger_path.touch()
except Exception as e:
print(e)
def update_and_restart() -> None:
"""Update and restart Anki using the launcher."""
from aqt import mw
launcher = launcher_executable()
assert launcher
trigger_launcher_run()
with contextlib.suppress(ResourceWarning):
env = os.environ.copy()
env["ANKI_LAUNCHER_WANT_TERMINAL"] = "1"
# fixes a bug where launcher fails to appear if opening it
# straight after updating
if "GNOME_TERMINAL_SCREEN" in env:
del env["GNOME_TERMINAL_SCREEN"]
creationflags = 0
if sys.platform == "win32":
creationflags = (
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
)
# On Windows 10, changing the handles breaks ANSI display
io = None if sys.platform == "win32" else subprocess.DEVNULL
subprocess.Popen(
[launcher],
start_new_session=True,
stdin=io,
stdout=io,
stderr=io,
env=env,
creationflags=creationflags,
)
mw.app.quit()

View file

@ -82,14 +82,11 @@ class Preferences(QDialog):
) )
group = self.form.preferences_answer_keys group = self.form.preferences_answer_keys
group.setLayout(layout := QFormLayout()) group.setLayout(layout := QFormLayout())
tab_widget: QWidget = self.form.url_schemes
for ease, label in ease_labels: for ease, label in ease_labels:
layout.addRow( layout.addRow(
label, label,
line_edit := QLineEdit(self.mw.pm.get_answer_key(ease) or ""), line_edit := QLineEdit(self.mw.pm.get_answer_key(ease) or ""),
) )
QWidget.setTabOrder(tab_widget, line_edit)
tab_widget = line_edit
qconnect( qconnect(
line_edit.textChanged, line_edit.textChanged,
functools.partial(self.mw.pm.set_answer_key, ease), functools.partial(self.mw.pm.set_answer_key, ease),

View file

@ -17,7 +17,6 @@ import aqt.browser
import aqt.operations import aqt.operations
from anki.cards import Card, CardId from anki.cards import Card, CardId
from anki.collection import Config, OpChanges, OpChangesWithCount from anki.collection import Config, OpChanges, OpChangesWithCount
from anki.lang import with_collapsed_whitespace
from anki.scheduler.base import ScheduleCardsAsNew from anki.scheduler.base import ScheduleCardsAsNew
from anki.scheduler.v3 import ( from anki.scheduler.v3 import (
CardAnswer, CardAnswer,
@ -967,15 +966,11 @@ timerStopped = false;
elapsed = self.mw.col.timeboxReached() elapsed = self.mw.col.timeboxReached()
if elapsed: if elapsed:
assert not isinstance(elapsed, bool) assert not isinstance(elapsed, bool)
cards_val = elapsed[1] part1 = tr.studying_card_studied_in(count=elapsed[1])
minutes_val = int(round(elapsed[0] / 60)) mins = int(round(elapsed[0] / 60))
message = with_collapsed_whitespace( part2 = tr.studying_minute(count=mins)
tr.studying_card_studied_in_minute(
cards=cards_val, minutes=str(minutes_val)
)
)
fin = tr.studying_finish() fin = tr.studying_finish()
diag = askUserDialog(message, [tr.studying_continue(), fin]) diag = askUserDialog(f"{part1} {part2}", [tr.studying_continue(), fin])
diag.setIcon(QMessageBox.Icon.Information) diag.setIcon(QMessageBox.Icon.Information)
if diag.run() == fin: if diag.run() == fin:
self.mw.moveToState("deckBrowser") self.mw.moveToState("deckBrowser")

View file

@ -32,7 +32,6 @@ from aqt._macos_helper import macos_helper
from aqt.mpv import MPV, MPVBase, MPVCommandError from aqt.mpv import MPV, MPVBase, MPVCommandError
from aqt.qt import * from aqt.qt import *
from aqt.taskman import TaskManager from aqt.taskman import TaskManager
from aqt.theme import theme_manager
from aqt.utils import ( from aqt.utils import (
disable_help_button, disable_help_button,
restoreGeom, restoreGeom,
@ -631,44 +630,18 @@ class QtAudioInputRecorder(Recorder):
self.mw = mw self.mw = mw
self._parent = parent self._parent = parent
from PyQt6.QtMultimedia import QAudioSource, QMediaDevices # type: ignore from PyQt6.QtMultimedia import QAudioFormat, QAudioSource # type: ignore
# Get the default audio input device format = QAudioFormat()
device = QMediaDevices.defaultAudioInput() format.setChannelCount(1)
format.setSampleRate(44100)
format.setSampleFormat(QAudioFormat.SampleFormat.Int16)
# Try to use Int16 format first (avoids conversion) source = QAudioSource(format, parent)
preferred_format = device.preferredFormat()
int16_format = preferred_format
int16_format.setSampleFormat(preferred_format.SampleFormat.Int16)
if device.isFormatSupported(int16_format):
# Use Int16 if supported
format = int16_format
else:
# Fall back to device's preferred format
format = preferred_format
# Create the audio source with the chosen format
source = QAudioSource(device, format, parent)
# Store the actual format being used
self._format = source.format() self._format = source.format()
self._audio_input = source self._audio_input = source
def _convert_float_to_int16(self, float_buffer: bytearray) -> bytes:
"""Convert float32 audio samples to int16 format for WAV output."""
import struct
float_count = len(float_buffer) // 4 # 4 bytes per float32
floats = struct.unpack(f"{float_count}f", float_buffer)
# Convert to int16 range, clipping and scaling in one step
int16_samples = [
max(-32768, min(32767, int(max(-1.0, min(1.0, f)) * 32767))) for f in floats
]
return struct.pack(f"{len(int16_samples)}h", *int16_samples)
def start(self, on_done: Callable[[], None]) -> None: def start(self, on_done: Callable[[], None]) -> None:
self._iodevice = self._audio_input.start() self._iodevice = self._audio_input.start()
self._buffer = bytearray() self._buffer = bytearray()
@ -691,32 +664,18 @@ class QtAudioInputRecorder(Recorder):
return return
def write_file() -> None: def write_file() -> None:
from PyQt6.QtMultimedia import QAudioFormat
# swallow the first 300ms to allow audio device to quiesce # swallow the first 300ms to allow audio device to quiesce
bytes_per_frame = self._format.bytesPerFrame() wait = int(44100 * self.STARTUP_DELAY)
frames_to_skip = int(self._format.sampleRate() * self.STARTUP_DELAY) if len(self._buffer) <= wait:
bytes_to_skip = frames_to_skip * bytes_per_frame
if len(self._buffer) <= bytes_to_skip:
return return
self._buffer = self._buffer[bytes_to_skip:] self._buffer = self._buffer[wait:]
# Check if we need to convert float samples to int16 # write out the wave file
if self._format.sampleFormat() == QAudioFormat.SampleFormat.Float:
audio_data = self._convert_float_to_int16(self._buffer)
sample_width = 2 # int16 is 2 bytes
else:
# For integer formats, use the data as-is
audio_data = bytes(self._buffer)
sample_width = self._format.bytesPerSample()
# write out the wave file with the correct format parameters
wf = wave.open(self.output_path, "wb") wf = wave.open(self.output_path, "wb")
wf.setnchannels(self._format.channelCount()) wf.setnchannels(self._format.channelCount())
wf.setsampwidth(sample_width) wf.setsampwidth(2)
wf.setframerate(self._format.sampleRate()) wf.setframerate(self._format.sampleRate())
wf.writeframes(audio_data) wf.writeframes(self._buffer)
wf.close() wf.close()
def and_then(fut: Future) -> None: def and_then(fut: Future) -> None:
@ -784,8 +743,7 @@ class RecordDialog(QDialog):
def _setup_dialog(self) -> None: def _setup_dialog(self) -> None:
self.setWindowTitle("Anki") self.setWindowTitle("Anki")
icon = QLabel() icon = QLabel()
qicon = theme_manager.icon_from_resources("icons:media-record.svg") icon.setPixmap(QPixmap("icons:media-record.png"))
icon.setPixmap(qicon.pixmap(60, 60))
self.label = QLabel("...") self.label = QLabel("...")
hbox = QHBoxLayout() hbox = QHBoxLayout()
hbox.addWidget(icon) hbox.addWidget(icon)

View file

@ -177,13 +177,9 @@ class CustomStyles:
QPushButton:default {{ QPushButton:default {{
border: 1px solid {tm.var(colors.BORDER_FOCUS)}; border: 1px solid {tm.var(colors.BORDER_FOCUS)};
}} }}
QPushButton {{ QPushButton:focus {{
margin: 1px;
}}
QPushButton:focus, QPushButton:default:hover {{
border: 2px solid {tm.var(colors.BORDER_FOCUS)}; border: 2px solid {tm.var(colors.BORDER_FOCUS)};
outline: none; outline: none;
margin: 0px;
}} }}
QPushButton:hover, QPushButton:hover,
QTabBar::tab:hover, QTabBar::tab:hover,
@ -199,6 +195,9 @@ class CustomStyles:
) )
}; };
}} }}
QPushButton:default:hover {{
border-width: 2px;
}}
QPushButton:pressed, QPushButton:pressed,
QPushButton:checked, QPushButton:checked,
QSpinBox::up-button:pressed, QSpinBox::up-button:pressed,

View file

@ -73,7 +73,7 @@ def handle_sync_error(mw: aqt.main.AnkiQt, err: Exception) -> None:
elif isinstance(err, Interrupted): elif isinstance(err, Interrupted):
# no message to show # no message to show
return return
show_warning(str(err), parent=mw) show_warning(str(err))
def on_normal_sync_timer(mw: aqt.main.AnkiQt) -> None: def on_normal_sync_timer(mw: aqt.main.AnkiQt) -> None:
@ -118,7 +118,7 @@ def sync_collection(mw: aqt.main.AnkiQt, on_done: Callable[[], None]) -> None:
if out.new_endpoint: if out.new_endpoint:
mw.pm.set_current_sync_url(out.new_endpoint) mw.pm.set_current_sync_url(out.new_endpoint)
if out.server_message: if out.server_message:
showText(out.server_message, parent=mw) showText(out.server_message)
if out.required == out.NO_CHANGES: if out.required == out.NO_CHANGES:
tooltip(parent=mw, msg=tr.sync_collection_complete()) tooltip(parent=mw, msg=tr.sync_collection_complete())
# all done; track media progress # all done; track media progress

View file

@ -115,7 +115,7 @@ class ThemeManager:
# Workaround for Qt bug. First attempt was percent-escaping the chars, # Workaround for Qt bug. First attempt was percent-escaping the chars,
# but Qt can't handle that. # but Qt can't handle that.
# https://forum.qt.io/topic/55274/solved-qss-with-special-characters/11 # https://forum.qt.io/topic/55274/solved-qss-with-special-characters/11
path = re.sub(r"(['\u00A1-\u00FF])", r"\\\1", path) path = re.sub(r"([\u00A1-\u00FF])", r"\\\1", path)
return path return path
def icon_from_resources(self, path: str | ColoredIcon) -> QIcon: def icon_from_resources(self, path: str | ColoredIcon) -> QIcon:

View file

@ -3,17 +3,16 @@
from __future__ import annotations from __future__ import annotations
import contextlib
import os
import subprocess
from pathlib import Path
import aqt import aqt
from anki.buildinfo import buildhash from anki.buildinfo import buildhash
from anki.collection import CheckForUpdateResponse, Collection from anki.collection import CheckForUpdateResponse, Collection
from anki.utils import dev_mode, int_time, int_version, plat_desc from anki.utils import dev_mode, int_time, int_version, is_mac, is_win, plat_desc
from aqt.operations import QueryOp from aqt.operations import QueryOp
from aqt.package import (
launcher_executable as _launcher_executable,
)
from aqt.package import (
update_and_restart as _update_and_restart,
)
from aqt.qt import * from aqt.qt import *
from aqt.utils import openLink, show_warning, showText, tr from aqt.utils import openLink, show_warning, showText, tr
@ -85,7 +84,67 @@ def prompt_to_update(mw: aqt.AnkiQt, ver: str) -> None:
# ignore this update # ignore this update
mw.pm.meta["suppressUpdate"] = ver mw.pm.meta["suppressUpdate"] = ver
elif ret == QMessageBox.StandardButton.Yes: elif ret == QMessageBox.StandardButton.Yes:
if _launcher_executable(): if have_launcher():
_update_and_restart() update_and_restart()
else: else:
openLink(aqt.appWebsiteDownloadSection) openLink(aqt.appWebsiteDownloadSection)
def _anki_launcher_path() -> str | None:
return os.getenv("ANKI_LAUNCHER")
def have_launcher() -> bool:
return _anki_launcher_path() is not None
def update_and_restart() -> None:
from aqt import mw
launcher = _anki_launcher_path()
assert launcher
_trigger_launcher_run()
with contextlib.suppress(ResourceWarning):
env = os.environ.copy()
creationflags = 0
if sys.platform == "win32":
creationflags = (
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
)
subprocess.Popen(
[launcher],
start_new_session=True,
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
env=env,
creationflags=creationflags,
)
mw.app.quit()
def _trigger_launcher_run() -> None:
"""Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run."""
try:
# Get the local data directory equivalent to Rust's dirs::data_local_dir()
if is_win:
from .winpaths import get_local_appdata
data_dir = Path(get_local_appdata())
elif is_mac:
data_dir = Path.home() / "Library" / "Application Support"
else: # Linux
data_dir = Path(
os.environ.get("XDG_DATA_HOME", Path.home() / ".local" / "share")
)
pyproject_path = data_dir / "AnkiProgramFiles" / "pyproject.toml"
if pyproject_path.exists():
# Touch the file to update its mtime
pyproject_path.touch()
except Exception as e:
print(e)

View file

@ -226,45 +226,29 @@ def ask_user_dialog(
) )
def show_info( def show_info(text: str, callback: Callable | None = None, **kwargs: Any) -> MessageBox:
text: str,
callback: Callable | None = None,
parent: QWidget | None = None,
**kwargs: Any,
) -> MessageBox:
"Show a small info window with an OK button." "Show a small info window with an OK button."
if "icon" not in kwargs: if "icon" not in kwargs:
kwargs["icon"] = QMessageBox.Icon.Information kwargs["icon"] = QMessageBox.Icon.Information
return MessageBox( return MessageBox(
text, text,
callback=(lambda _: callback()) if callback is not None else None, callback=(lambda _: callback()) if callback is not None else None,
parent=parent,
**kwargs, **kwargs,
) )
def show_warning( def show_warning(
text: str, text: str, callback: Callable | None = None, **kwargs: Any
callback: Callable | None = None,
parent: QWidget | None = None,
**kwargs: Any,
) -> MessageBox: ) -> MessageBox:
"Show a small warning window with an OK button." "Show a small warning window with an OK button."
return show_info( return show_info(text, icon=QMessageBox.Icon.Warning, callback=callback, **kwargs)
text, icon=QMessageBox.Icon.Warning, callback=callback, parent=parent, **kwargs
)
def show_critical( def show_critical(
text: str, text: str, callback: Callable | None = None, **kwargs: Any
callback: Callable | None = None,
parent: QWidget | None = None,
**kwargs: Any,
) -> MessageBox: ) -> MessageBox:
"Show a small critical error window with an OK button." "Show a small critical error window with an OK button."
return show_info( return show_info(text, icon=QMessageBox.Icon.Critical, callback=callback, **kwargs)
text, icon=QMessageBox.Icon.Critical, callback=callback, parent=parent, **kwargs
)
def showWarning( def showWarning(

View file

@ -67,12 +67,16 @@ class CustomBuildHook(BuildHookInterface):
def _should_exclude(self, path: Path) -> bool: def _should_exclude(self, path: Path) -> bool:
"""Check if a file should be excluded from the wheel.""" """Check if a file should be excluded from the wheel."""
path_str = str(path)
# Exclude __pycache__ # Exclude __pycache__
if "/__pycache__/" in str(path): if "/__pycache__/" in path_str:
return True return True
if path.suffix in [".ui", ".scss", ".map", ".ts"]: if path.suffix in [".ui", ".scss", ".map", ".ts"]:
return True return True
if path.name.startswith("tsconfig"): if path.name.startswith("tsconfig"):
return True return True
if "/aqt/data" in path_str:
return True
return False return False

View file

@ -13,10 +13,6 @@ anki_process.workspace = true
anyhow.workspace = true anyhow.workspace = true
camino.workspace = true camino.workspace = true
dirs.workspace = true dirs.workspace = true
serde_json.workspace = true
[target.'cfg(all(unix, not(target_os = "macos")))'.dependencies]
libc.workspace = true
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
windows.workspace = true windows.workspace = true

View file

@ -8,103 +8,43 @@ import os
import subprocess import subprocess
import sys import sys
from pathlib import Path from pathlib import Path
from typing import Any
import aqt.sound
from anki.utils import pointVersion from anki.utils import pointVersion
from aqt import mw from aqt import mw
from aqt.qt import QAction from aqt.qt import QAction
from aqt.utils import askUser, is_mac, is_win, showInfo from aqt.utils import askUser, is_mac, is_win, showInfo
def launcher_executable() -> str | None: def _anki_launcher_path() -> str | None:
"""Return the path to the Anki launcher executable."""
return os.getenv("ANKI_LAUNCHER") return os.getenv("ANKI_LAUNCHER")
def uv_binary() -> str | None: def have_launcher() -> bool:
"""Return the path to the uv binary.""" return _anki_launcher_path() is not None
return os.environ.get("ANKI_LAUNCHER_UV")
def launcher_root() -> str | None:
"""Return the path to the launcher root directory (AnkiProgramFiles)."""
return os.environ.get("UV_PROJECT")
def venv_binary(cmd: str) -> str | None:
"""Return the path to a binary in the launcher's venv."""
root = launcher_root()
if not root:
return None
root_path = Path(root)
if is_win:
binary_path = root_path / ".venv" / "Scripts" / cmd
else:
binary_path = root_path / ".venv" / "bin" / cmd
return str(binary_path)
def add_python_requirements(reqs: list[str]) -> tuple[bool, str]:
"""Add Python requirements to the launcher venv using uv add.
Returns (success, output)"""
binary = uv_binary()
if not binary:
return (False, "Not in packaged build.")
uv_cmd = [binary, "add"] + reqs
result = subprocess.run(uv_cmd, capture_output=True, text=True, check=False)
if result.returncode == 0:
root = launcher_root()
if root:
sync_marker = Path(root) / ".sync_complete"
sync_marker.touch()
return (True, result.stdout)
else:
return (False, result.stderr)
def trigger_launcher_run() -> None:
"""Create a trigger file to request launcher UI on next run."""
try:
root = launcher_root()
if not root:
return
trigger_path = Path(root) / ".want-launcher"
trigger_path.touch()
except Exception as e:
print(e)
def update_and_restart() -> None: def update_and_restart() -> None:
"""Update and restart Anki using the launcher.""" from aqt import mw
launcher = launcher_executable()
launcher = _anki_launcher_path()
assert launcher assert launcher
trigger_launcher_run() _trigger_launcher_run()
with contextlib.suppress(ResourceWarning): with contextlib.suppress(ResourceWarning):
env = os.environ.copy() env = os.environ.copy()
env["ANKI_LAUNCHER_WANT_TERMINAL"] = "1"
creationflags = 0 creationflags = 0
if sys.platform == "win32": if sys.platform == "win32":
creationflags = ( creationflags = (
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
) )
# On Windows, changing the handles breaks ANSI display
io = None if sys.platform == "win32" else subprocess.DEVNULL
subprocess.Popen( subprocess.Popen(
[launcher], [launcher],
start_new_session=True, start_new_session=True,
stdin=io, stdin=subprocess.DEVNULL,
stdout=io, stdout=subprocess.DEVNULL,
stderr=io, stderr=subprocess.DEVNULL,
env=env, env=env,
creationflags=creationflags, creationflags=creationflags,
) )
@ -112,6 +52,30 @@ def update_and_restart() -> None:
mw.app.quit() mw.app.quit()
def _trigger_launcher_run() -> None:
"""Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run."""
try:
# Get the local data directory equivalent to Rust's dirs::data_local_dir()
if is_win:
from aqt.winpaths import get_local_appdata
data_dir = Path(get_local_appdata())
elif is_mac:
data_dir = Path.home() / "Library" / "Application Support"
else: # Linux
data_dir = Path(
os.environ.get("XDG_DATA_HOME", Path.home() / ".local" / "share")
)
pyproject_path = data_dir / "AnkiProgramFiles" / "pyproject.toml"
if pyproject_path.exists():
# Touch the file to update its mtime
pyproject_path.touch()
except Exception as e:
print(e)
def confirm_then_upgrade(): def confirm_then_upgrade():
if not askUser("Change to a different Anki version?"): if not askUser("Change to a different Anki version?"):
return return
@ -152,18 +116,10 @@ def _packagedCmd(cmd: list[str]) -> tuple[Any, dict[str, str]]:
return cmd, env return cmd, env
def on_addon_config():
showInfo(
"This add-on is automatically added when installing older Anki versions, so that they work with the launcher. You can remove it if you wish."
)
def setup(): def setup():
mw.addonManager.setConfigAction(__name__, on_addon_config)
if pointVersion() >= 250600: if pointVersion() >= 250600:
return return
if not launcher_executable(): if not have_launcher():
return return
# Add action to tools menu # Add action to tools menu
@ -173,21 +129,7 @@ def setup():
# Monkey-patch audio tools to use anki-audio # Monkey-patch audio tools to use anki-audio
if is_win or is_mac: if is_win or is_mac:
import aqt
import aqt.sound
aqt.sound._packagedCmd = _packagedCmd aqt.sound._packagedCmd = _packagedCmd
# Inject launcher functions into launcher module
import aqt.package
aqt.package.launcher_executable = launcher_executable
aqt.package.update_and_restart = update_and_restart
aqt.package.trigger_launcher_run = trigger_launcher_run
aqt.package.uv_binary = uv_binary
aqt.package.launcher_root = launcher_root
aqt.package.venv_binary = venv_binary
aqt.package.add_python_requirements = add_python_requirements
setup() setup()

View file

@ -13,8 +13,7 @@ HOST_ARCH=$(uname -m)
# Define output paths # Define output paths
OUTPUT_DIR="../../../out/launcher" OUTPUT_DIR="../../../out/launcher"
ANKI_VERSION=$(cat ../../../.version | tr -d '\n') LAUNCHER_DIR="$OUTPUT_DIR/anki-launcher"
LAUNCHER_DIR="$OUTPUT_DIR/anki-launcher-$ANKI_VERSION-linux"
# Clean existing output directory # Clean existing output directory
rm -rf "$LAUNCHER_DIR" rm -rf "$LAUNCHER_DIR"
@ -62,7 +61,6 @@ done
# Copy additional files from parent directory # Copy additional files from parent directory
cp ../pyproject.toml "$LAUNCHER_DIR/" cp ../pyproject.toml "$LAUNCHER_DIR/"
cp ../../../.python-version "$LAUNCHER_DIR/" cp ../../../.python-version "$LAUNCHER_DIR/"
cp ../versions.py "$LAUNCHER_DIR/"
# Set executable permissions # Set executable permissions
chmod +x \ chmod +x \
@ -77,9 +75,10 @@ chmod +x \
# Set proper permissions and create tarball # Set proper permissions and create tarball
chmod -R a+r "$LAUNCHER_DIR" chmod -R a+r "$LAUNCHER_DIR"
# Create tarball using the same options as the Rust template
ZSTD="zstd -c --long -T0 -18" ZSTD="zstd -c --long -T0 -18"
TRANSFORM="s%^.%anki-launcher-$ANKI_VERSION-linux%S" TRANSFORM="s%^.%anki-launcher%S"
TARBALL="$OUTPUT_DIR/anki-launcher-$ANKI_VERSION-linux.tar.zst" TARBALL="$OUTPUT_DIR/anki-launcher.tar.zst"
tar -I "$ZSTD" --transform "$TRANSFORM" -cf "$TARBALL" -C "$LAUNCHER_DIR" . tar -I "$ZSTD" --transform "$TRANSFORM" -cf "$TARBALL" -C "$LAUNCHER_DIR" .

View file

@ -5,11 +5,9 @@
<key>CFBundleDisplayName</key> <key>CFBundleDisplayName</key>
<string>Anki</string> <string>Anki</string>
<key>CFBundleShortVersionString</key> <key>CFBundleShortVersionString</key>
<string>ANKI_VERSION</string> <string>1.0</string>
<key>LSMinimumSystemVersion</key> <key>LSMinimumSystemVersion</key>
<string>12</string> <string>11</string>
<key>LSApplicationCategoryType</key>
<string>public.app-category.education</string>
<key>CFBundleDocumentTypes</key> <key>CFBundleDocumentTypes</key>
<array> <array>
<dict> <dict>

View file

@ -30,33 +30,25 @@ lipo -create \
-output "$APP_LAUNCHER/Contents/MacOS/launcher" -output "$APP_LAUNCHER/Contents/MacOS/launcher"
cp "$OUTPUT_DIR/uv" "$APP_LAUNCHER/Contents/MacOS/" cp "$OUTPUT_DIR/uv" "$APP_LAUNCHER/Contents/MacOS/"
# Build install_name_tool stub
clang -arch arm64 -o "$OUTPUT_DIR/stub_arm64" stub.c
clang -arch x86_64 -o "$OUTPUT_DIR/stub_x86_64" stub.c
lipo -create "$OUTPUT_DIR/stub_arm64" "$OUTPUT_DIR/stub_x86_64" -output "$APP_LAUNCHER/Contents/MacOS/install_name_tool"
rm "$OUTPUT_DIR/stub_arm64" "$OUTPUT_DIR/stub_x86_64"
# Copy support files # Copy support files
ANKI_VERSION=$(cat ../../../.version | tr -d '\n') cp Info.plist "$APP_LAUNCHER/Contents/"
sed "s/ANKI_VERSION/$ANKI_VERSION/g" Info.plist > "$APP_LAUNCHER/Contents/Info.plist"
cp icon/Assets.car "$APP_LAUNCHER/Contents/Resources/" cp icon/Assets.car "$APP_LAUNCHER/Contents/Resources/"
cp ../pyproject.toml "$APP_LAUNCHER/Contents/Resources/" cp ../pyproject.toml "$APP_LAUNCHER/Contents/Resources/"
cp ../../../.python-version "$APP_LAUNCHER/Contents/Resources/" cp ../../../.python-version "$APP_LAUNCHER/Contents/Resources/"
cp ../versions.py "$APP_LAUNCHER/Contents/Resources/"
# Codesign/bundle # Codesign
for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do
codesign --force -vvvv -o runtime -s "Developer ID Application:" \
--entitlements entitlements.python.xml \
"$i"
done
# Check
codesign -vvv "$APP_LAUNCHER"
spctl -a "$APP_LAUNCHER"
# Notarize and bundle (skip if NODMG is set)
if [ -z "$NODMG" ]; then if [ -z "$NODMG" ]; then
for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/install_name_tool" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do
codesign --force -vvvv -o runtime -s "Developer ID Application:" \
--entitlements entitlements.python.xml \
"$i"
done
# Check
codesign -vvv "$APP_LAUNCHER"
spctl -a "$APP_LAUNCHER"
# Notarize and build dmg
./notarize.sh "$OUTPUT_DIR" ./notarize.sh "$OUTPUT_DIR"
./dmg/build.sh "$OUTPUT_DIR" ./dmg/build.sh "$OUTPUT_DIR"
fi fi

View file

@ -6,8 +6,7 @@ set -e
# base folder with Anki.app in it # base folder with Anki.app in it
output="$1" output="$1"
dist="$1/tmp" dist="$1/tmp"
ANKI_VERSION=$(cat ../../../.version | tr -d '\n') dmg_path="$output/Anki.dmg"
dmg_path="$output/anki-launcher-$ANKI_VERSION-mac.dmg"
if [ -d "/Volumes/Anki" ] if [ -d "/Volumes/Anki" ]
then then

View file

@ -1,6 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
int main(void) {
return 0;
}

View file

@ -1,6 +1,6 @@
[project] [project]
name = "anki-launcher" name = "anki-launcher"
version = "1.0.0" version = "0.1.0"
description = "UV-based launcher for Anki." description = "UV-based launcher for Anki."
requires-python = ">=3.9" requires-python = ">=3.9"
dependencies = [ dependencies = [

View file

@ -22,11 +22,6 @@ const NSIS_PATH: &str = "C:\\Program Files (x86)\\NSIS\\makensis.exe";
fn main() -> Result<()> { fn main() -> Result<()> {
println!("Building Windows launcher..."); println!("Building Windows launcher...");
// Read version early so it can be used throughout the build process
let version = std::fs::read_to_string("../../../.version")?
.trim()
.to_string();
let output_dir = PathBuf::from(OUTPUT_DIR); let output_dir = PathBuf::from(OUTPUT_DIR);
let launcher_exe_dir = PathBuf::from(LAUNCHER_EXE_DIR); let launcher_exe_dir = PathBuf::from(LAUNCHER_EXE_DIR);
let nsis_dir = PathBuf::from(NSIS_DIR); let nsis_dir = PathBuf::from(NSIS_DIR);
@ -36,20 +31,16 @@ fn main() -> Result<()> {
extract_nsis_plugins()?; extract_nsis_plugins()?;
copy_files(&output_dir)?; copy_files(&output_dir)?;
sign_binaries(&output_dir)?; sign_binaries(&output_dir)?;
copy_nsis_files(&nsis_dir, &version)?; copy_nsis_files(&nsis_dir)?;
build_uninstaller(&output_dir, &nsis_dir)?; build_uninstaller(&output_dir, &nsis_dir)?;
sign_file(&output_dir.join("uninstall.exe"))?; sign_file(&output_dir.join("uninstall.exe"))?;
generate_install_manifest(&output_dir)?; generate_install_manifest(&output_dir)?;
build_installer(&output_dir, &nsis_dir)?; build_installer(&output_dir, &nsis_dir)?;
sign_file(&PathBuf::from("../../../out/launcher_exe/anki-install.exe"))?;
let installer_filename = format!("anki-launcher-{version}-windows.exe");
let installer_path = PathBuf::from("../../../out/launcher_exe").join(&installer_filename);
sign_file(&installer_path)?;
println!("Build completed successfully!"); println!("Build completed successfully!");
println!("Output directory: {}", output_dir.display()); println!("Output directory: {}", output_dir.display());
println!("Installer: ../../../out/launcher_exe/{installer_filename}"); println!("Installer: ../../../out/launcher_exe/anki-install.exe");
Ok(()) Ok(())
} }
@ -148,9 +139,6 @@ fn copy_files(output_dir: &Path) -> Result<()> {
output_dir.join(".python-version"), output_dir.join(".python-version"),
)?; )?;
// Copy versions.py
copy_file("../versions.py", output_dir.join("versions.py"))?;
Ok(()) Ok(())
} }
@ -244,13 +232,11 @@ fn generate_install_manifest(output_dir: &Path) -> Result<()> {
Ok(()) Ok(())
} }
fn copy_nsis_files(nsis_dir: &Path, version: &str) -> Result<()> { fn copy_nsis_files(nsis_dir: &Path) -> Result<()> {
println!("Copying NSIS support files..."); println!("Copying NSIS support files...");
// Copy anki.template.nsi as anki.nsi and substitute version placeholders // Copy anki.template.nsi as anki.nsi
let template_content = std::fs::read_to_string("anki.template.nsi")?; copy_file("anki.template.nsi", nsis_dir.join("anki.nsi"))?;
let substituted_content = template_content.replace("ANKI_VERSION", version);
write_file(nsis_dir.join("anki.nsi"), substituted_content)?;
// Copy fileassoc.nsh // Copy fileassoc.nsh
copy_file("fileassoc.nsh", nsis_dir.join("fileassoc.nsh"))?; copy_file("fileassoc.nsh", nsis_dir.join("fileassoc.nsh"))?;

File diff suppressed because it is too large Load diff

View file

@ -62,9 +62,8 @@ pub fn prepare_for_launch_after_update(mut cmd: Command, root: &Path) -> Result<
pub fn relaunch_in_terminal() -> Result<()> { pub fn relaunch_in_terminal() -> Result<()> {
let current_exe = std::env::current_exe().context("Failed to get current executable path")?; let current_exe = std::env::current_exe().context("Failed to get current executable path")?;
Command::new("open") Command::new("open")
.args(["-na", "Terminal"]) .args(["-a", "Terminal"])
.arg(current_exe) .arg(current_exe)
.env_remove("ANKI_LAUNCHER_WANT_TERMINAL")
.ensure_spawn()?; .ensure_spawn()?;
std::process::exit(0); std::process::exit(0);
} }

View file

@ -116,9 +116,8 @@ pub use windows::ensure_terminal_shown;
pub fn ensure_terminal_shown() -> Result<()> { pub fn ensure_terminal_shown() -> Result<()> {
use std::io::IsTerminal; use std::io::IsTerminal;
let want_terminal = std::env::var("ANKI_LAUNCHER_WANT_TERMINAL").is_ok();
let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout()); let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout());
if want_terminal || !stdout_is_terminal { if !stdout_is_terminal {
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
mac::relaunch_in_terminal()?; mac::relaunch_in_terminal()?;
#[cfg(not(target_os = "macos"))] #[cfg(not(target_os = "macos"))]
@ -129,13 +128,3 @@ pub fn ensure_terminal_shown() -> Result<()> {
print!("\x1b]2;Anki Launcher\x07"); print!("\x1b]2;Anki Launcher\x07");
Ok(()) Ok(())
} }
pub fn ensure_os_supported() -> Result<()> {
#[cfg(all(unix, not(target_os = "macos")))]
unix::ensure_glibc_supported()?;
#[cfg(target_os = "windows")]
windows::ensure_windows_version_supported()?;
Ok(())
}

View file

@ -9,22 +9,15 @@ use anyhow::Result;
pub fn relaunch_in_terminal() -> Result<()> { pub fn relaunch_in_terminal() -> Result<()> {
let current_exe = std::env::current_exe().context("Failed to get current executable path")?; let current_exe = std::env::current_exe().context("Failed to get current executable path")?;
// Try terminals in roughly most specific to least specific. // Try terminals in order of preference
// First, try commonly used terminals for riced systems.
// Second, try common defaults.
// Finally, try x11 compatibility terminals.
let terminals = [ let terminals = [
// commonly used for riced systems ("x-terminal-emulator", vec!["-e"]),
("gnome-terminal", vec!["--"]),
("konsole", vec!["-e"]),
("xfce4-terminal", vec!["-e"]),
("alacritty", vec!["-e"]), ("alacritty", vec!["-e"]),
("kitty", vec![]), ("kitty", vec![]),
("foot", vec![]), ("foot", vec![]),
// the user's default terminal in Debian/Ubuntu
("x-terminal-emulator", vec!["-e"]),
// default installs for the most common distros
("xfce4-terminal", vec!["-e"]),
("gnome-terminal", vec!["-e"]),
("konsole", vec!["-e"]),
// x11-compatibility terminals
("urxvt", vec!["-e"]), ("urxvt", vec!["-e"]),
("xterm", vec!["-e"]), ("xterm", vec!["-e"]),
]; ];
@ -72,34 +65,3 @@ pub fn finalize_uninstall() {
let mut input = String::new(); let mut input = String::new();
let _ = stdin().read_line(&mut input); let _ = stdin().read_line(&mut input);
} }
pub fn ensure_glibc_supported() -> Result<()> {
use std::ffi::CStr;
let get_glibc_version = || -> Option<(u32, u32)> {
let version_ptr = unsafe { libc::gnu_get_libc_version() };
if version_ptr.is_null() {
return None;
}
let version_cstr = unsafe { CStr::from_ptr(version_ptr) };
let version_str = version_cstr.to_str().ok()?;
// Parse version string (format: "2.36" or "2.36.1")
let version_parts: Vec<&str> = version_str.split('.').collect();
if version_parts.len() < 2 {
return None;
}
let major: u32 = version_parts[0].parse().ok()?;
let minor: u32 = version_parts[1].parse().ok()?;
Some((major, minor))
};
let (major, minor) = get_glibc_version().unwrap_or_default();
if major < 2 || (major == 2 && minor < 36) {
anyhow::bail!("Anki requires a modern Linux distro with glibc 2.36 or later.");
}
Ok(())
}

View file

@ -8,7 +8,6 @@ use anyhow::Context;
use anyhow::Result; use anyhow::Result;
use widestring::u16cstr; use widestring::u16cstr;
use windows::core::PCWSTR; use windows::core::PCWSTR;
use windows::Wdk::System::SystemServices::RtlGetVersion;
use windows::Win32::System::Console::AttachConsole; use windows::Win32::System::Console::AttachConsole;
use windows::Win32::System::Console::GetConsoleWindow; use windows::Win32::System::Console::GetConsoleWindow;
use windows::Win32::System::Console::ATTACH_PARENT_PROCESS; use windows::Win32::System::Console::ATTACH_PARENT_PROCESS;
@ -19,45 +18,8 @@ use windows::Win32::System::Registry::HKEY;
use windows::Win32::System::Registry::HKEY_CURRENT_USER; use windows::Win32::System::Registry::HKEY_CURRENT_USER;
use windows::Win32::System::Registry::KEY_READ; use windows::Win32::System::Registry::KEY_READ;
use windows::Win32::System::Registry::REG_SZ; use windows::Win32::System::Registry::REG_SZ;
use windows::Win32::System::SystemInformation::OSVERSIONINFOW;
use windows::Win32::UI::Shell::SetCurrentProcessExplicitAppUserModelID; use windows::Win32::UI::Shell::SetCurrentProcessExplicitAppUserModelID;
/// Returns true if running on Windows 10 (not Windows 11)
fn is_windows_10() -> bool {
unsafe {
let mut info = OSVERSIONINFOW {
dwOSVersionInfoSize: std::mem::size_of::<OSVERSIONINFOW>() as u32,
..Default::default()
};
if RtlGetVersion(&mut info).is_ok() {
// Windows 10 has build numbers < 22000, Windows 11 >= 22000
info.dwBuildNumber < 22000 && info.dwMajorVersion == 10
} else {
false
}
}
}
/// Ensures Windows 10 version 1809 or later
pub fn ensure_windows_version_supported() -> Result<()> {
unsafe {
let mut info = OSVERSIONINFOW {
dwOSVersionInfoSize: std::mem::size_of::<OSVERSIONINFOW>() as u32,
..Default::default()
};
if RtlGetVersion(&mut info).is_err() {
anyhow::bail!("Failed to get Windows version information");
}
if info.dwBuildNumber >= 17763 {
return Ok(());
}
anyhow::bail!("Windows 10 version 1809 or later is required.")
}
}
pub fn ensure_terminal_shown() -> Result<()> { pub fn ensure_terminal_shown() -> Result<()> {
unsafe { unsafe {
if !GetConsoleWindow().is_invalid() { if !GetConsoleWindow().is_invalid() {
@ -67,14 +29,6 @@ pub fn ensure_terminal_shown() -> Result<()> {
} }
if std::env::var("ANKI_IMPLICIT_CONSOLE").is_ok() && attach_to_parent_console() { if std::env::var("ANKI_IMPLICIT_CONSOLE").is_ok() && attach_to_parent_console() {
// This black magic triggers Windows to switch to the new
// ANSI-supporting console host, which is usually only available
// when the app is built with the console subsystem.
// Only needed on Windows 10, not Windows 11.
if is_windows_10() {
let _ = Command::new("cmd").args(["/C", ""]).status();
}
// Successfully attached to parent console // Successfully attached to parent console
reconnect_stdio_to_console(); reconnect_stdio_to_console();
return Ok(()); return Ok(());

View file

@ -1,44 +0,0 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import json
import sys
import pip_system_certs.wrapt_requests
import requests
pip_system_certs.wrapt_requests.inject_truststore()
def main():
"""Fetch and return all versions from PyPI, sorted by upload time."""
url = "https://pypi.org/pypi/aqt/json"
try:
response = requests.get(url, timeout=30)
response.raise_for_status()
data = response.json()
releases = data.get("releases", {})
# Create list of (version, upload_time) tuples
version_times = []
for version, files in releases.items():
if files: # Only include versions that have files
# Use the upload time of the first file for each version
upload_time = files[0].get("upload_time_iso_8601")
if upload_time:
version_times.append((version, upload_time))
# Sort by upload time
version_times.sort(key=lambda x: x[1])
# Extract just the version names
versions = [version for version, _ in version_times]
print(json.dumps(versions))
except Exception as e:
print(f"Error fetching versions: {e}", file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main()

View file

@ -24,7 +24,7 @@ Name "Anki"
Unicode true Unicode true
; The file to write (relative to nsis directory) ; The file to write (relative to nsis directory)
OutFile "..\launcher_exe\anki-launcher-ANKI_VERSION-windows.exe" OutFile "..\launcher_exe\anki-install.exe"
; Non elevated ; Non elevated
RequestExecutionLevel user RequestExecutionLevel user
@ -214,7 +214,7 @@ Section ""
; Write the uninstall keys for Windows ; Write the uninstall keys for Windows
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayName" "Anki Launcher" WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayName" "Anki Launcher"
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayVersion" "ANKI_VERSION" WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayVersion" "1.0.0"
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "UninstallString" '"$INSTDIR\uninstall.exe"' WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "UninstallString" '"$INSTDIR\uninstall.exe"'
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "QuietUninstallString" '"$INSTDIR\uninstall.exe" /S' WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "QuietUninstallString" '"$INSTDIR\uninstall.exe" /S'
WriteRegDWORD HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "NoModify" 1 WriteRegDWORD HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "NoModify" 1

View file

@ -33,12 +33,6 @@ class _MacOSHelper:
"On completion, file should be saved if no error has arrived." "On completion, file should be saved if no error has arrived."
self._dll.end_wav_record() self._dll.end_wav_record()
def disable_appnap(self) -> None:
self._dll.disable_appnap()
def enable_appnap(self) -> None:
self._dll.enable_appnap()
# this must not be overwritten or deallocated # this must not be overwritten or deallocated
@CFUNCTYPE(None, c_char_p) # type: ignore @CFUNCTYPE(None, c_char_p) # type: ignore

View file

@ -1,25 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import Foundation
private var currentActivity: NSObjectProtocol?
@_cdecl("disable_appnap")
public func disableAppNap() {
// No-op if already assigned
guard currentActivity == nil else { return }
currentActivity = ProcessInfo.processInfo.beginActivity(
options: .userInitiatedAllowingIdleSystemSleep,
reason: "AppNap is disabled"
)
}
@_cdecl("enable_appnap")
public func enableAppNap() {
guard let activity = currentActivity else { return }
ProcessInfo.processInfo.endActivity(activity)
currentActivity = nil
}

View file

@ -15,7 +15,6 @@ echo "Building macOS helper dylib..."
# Create the wheel using uv # Create the wheel using uv
echo "Creating wheel..." echo "Creating wheel..."
cd "$SCRIPT_DIR" cd "$SCRIPT_DIR"
rm -rf dist
"$PROJ_ROOT/out/extracted/uv/uv" build --wheel "$PROJ_ROOT/out/extracted/uv/uv" build --wheel
echo "Build complete!" echo "Build complete!"

View file

@ -1,6 +1,8 @@
# Copyright: Ankitects Pty Ltd and contributors # Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import os
import platform
import subprocess import subprocess
import sys import sys
from pathlib import Path from pathlib import Path

View file

@ -4,7 +4,7 @@ build-backend = "hatchling.build"
[project] [project]
name = "anki-mac-helper" name = "anki-mac-helper"
version = "0.1.1" version = "0.1.0"
description = "Small support library for Anki on Macs" description = "Small support library for Anki on Macs"
requires-python = ">=3.9" requires-python = ">=3.9"
license = { text = "AGPL-3.0-or-later" } license = { text = "AGPL-3.0-or-later" }

View file

@ -1,14 +0,0 @@
#!/bin/bash
#
# Build and install into the launcher venv
set -e
./build.sh
if [[ "$OSTYPE" == "darwin"* ]]; then
export VIRTUAL_ENV=$HOME/Library/Application\ Support/AnkiProgramFiles/.venv
else
export VIRTUAL_ENV=$HOME/.local/share/AnkiProgramFiles/.venv
fi
../../out/extracted/uv/uv pip install dist/*.whl

View file

@ -12,7 +12,7 @@ dependencies = [
"send2trash", "send2trash",
"waitress>=2.0.0", "waitress>=2.0.0",
"pywin32; sys.platform == 'win32'", "pywin32; sys.platform == 'win32'",
"anki-mac-helper>=0.1.1; sys.platform == 'darwin'", "anki-mac-helper; sys.platform == 'darwin'",
"pip-system-certs!=5.1", "pip-system-certs!=5.1",
"pyqt6>=6.2", "pyqt6>=6.2",
"pyqt6-webengine>=6.2", "pyqt6-webengine>=6.2",
@ -37,14 +37,14 @@ qt67 = [
"pyqt6-webengine-qt6==6.7.3", "pyqt6-webengine-qt6==6.7.3",
"pyqt6_sip==13.10.2", "pyqt6_sip==13.10.2",
] ]
qt = [ qt69 = [
"pyqt6==6.9.1", "pyqt6==6.9.1",
"pyqt6-qt6==6.9.1", "pyqt6-qt6==6.9.1",
"pyqt6-webengine==6.8.0", "pyqt6-webengine==6.9.0",
"pyqt6-webengine-qt6==6.8.2", "pyqt6-webengine-qt6==6.9.1",
"pyqt6_sip==13.10.2", "pyqt6_sip==13.10.2",
] ]
qt68 = [ qt = [
"pyqt6==6.8.0", "pyqt6==6.8.0",
"pyqt6-qt6==6.8.1", "pyqt6-qt6==6.8.1",
"pyqt6-webengine==6.8.0", "pyqt6-webengine==6.8.0",
@ -58,7 +58,7 @@ conflicts = [
{ extra = "qt" }, { extra = "qt" },
{ extra = "qt66" }, { extra = "qt66" },
{ extra = "qt67" }, { extra = "qt67" },
{ extra = "qt68" }, { extra = "qt69" },
], ],
] ]
@ -72,12 +72,9 @@ build-backend = "hatchling.build"
[project.scripts] [project.scripts]
anki = "aqt:run" anki = "aqt:run"
[project.gui-scripts]
ankiw = "aqt:run"
[tool.hatch.build.targets.wheel] [tool.hatch.build.targets.wheel]
packages = ["aqt"] packages = ["aqt"]
exclude = ["aqt/data", "**/*.ui"] exclude = ["**/*.scss", "**/*.ui"]
[tool.hatch.version] [tool.hatch.version]
source = "code" source = "code"

View file

@ -48,7 +48,6 @@ async-trait.workspace = true
axum.workspace = true axum.workspace = true
axum-client-ip.workspace = true axum-client-ip.workspace = true
axum-extra.workspace = true axum-extra.workspace = true
bitflags.workspace = true
blake3.workspace = true blake3.workspace = true
bytes.workspace = true bytes.workspace = true
chrono.workspace = true chrono.workspace = true
@ -81,7 +80,6 @@ pin-project.workspace = true
prost.workspace = true prost.workspace = true
pulldown-cmark.workspace = true pulldown-cmark.workspace = true
rand.workspace = true rand.workspace = true
rayon.workspace = true
regex.workspace = true regex.workspace = true
reqwest.workspace = true reqwest.workspace = true
rusqlite.workspace = true rusqlite.workspace = true

View file

@ -22,7 +22,6 @@ inflections.workspace = true
anki_io.workspace = true anki_io.workspace = true
anyhow.workspace = true anyhow.workspace = true
itertools.workspace = true itertools.workspace = true
regex.workspace = true
[dependencies] [dependencies]
fluent.workspace = true fluent.workspace = true

View file

@ -4,5 +4,6 @@
// Include auto-generated content // Include auto-generated content
#![allow(clippy::all)] #![allow(clippy::all)]
#![allow(text_direction_codepoint_in_literal)]
include!(concat!(env!("OUT_DIR"), "/strings.rs")); include!(concat!(env!("OUT_DIR"), "/strings.rs"));

View file

@ -195,30 +195,12 @@ pub(crate) const {lang_name}: phf::Map<&str, &str> = phf::phf_map! {{",
.unwrap(); .unwrap();
for (module, contents) in modules { for (module, contents) in modules {
let escaped_contents = escape_unicode_control_chars(contents); writeln!(buf, r###" "{module}" => r##"{contents}"##,"###).unwrap();
writeln!(
buf,
r###" "{module}" => r##"{escaped_contents}"##,"###
)
.unwrap();
} }
buf.push_str("};\n"); buf.push_str("};\n");
} }
fn escape_unicode_control_chars(input: &str) -> String {
use regex::Regex;
static RE: std::sync::OnceLock<Regex> = std::sync::OnceLock::new();
let re = RE.get_or_init(|| Regex::new(r"[\u{202a}-\u{202e}\u{2066}-\u{2069}]").unwrap());
re.replace_all(input, |caps: &regex::Captures| {
let c = caps.get(0).unwrap().as_str().chars().next().unwrap();
format!("\\u{{{:04x}}}", c as u32)
})
.into_owned()
}
fn lang_constant_name(lang: &str) -> String { fn lang_constant_name(lang: &str) -> String {
lang.to_ascii_uppercase().replace('-', "_") lang.to_ascii_uppercase().replace('-', "_")
} }

View file

@ -42,14 +42,14 @@ enum CheckableUrl {
} }
impl CheckableUrl { impl CheckableUrl {
fn url(&self) -> Cow<'_, str> { fn url(&self) -> Cow<str> {
match *self { match *self {
Self::HelpPage(page) => help_page_to_link(page).into(), Self::HelpPage(page) => help_page_to_link(page).into(),
Self::String(s) => s.into(), Self::String(s) => s.into(),
} }
} }
fn anchor(&self) -> Cow<'_, str> { fn anchor(&self) -> Cow<str> {
match *self { match *self {
Self::HelpPage(page) => help_page_link_suffix(page).into(), Self::HelpPage(page) => help_page_link_suffix(page).into(),
Self::String(s) => s.split('#').next_back().unwrap_or_default().into(), Self::String(s) => s.split('#').next_back().unwrap_or_default().into(),

View file

@ -11,24 +11,6 @@ use snafu::ensure;
use snafu::ResultExt; use snafu::ResultExt;
use snafu::Snafu; use snafu::Snafu;
#[derive(Debug)]
pub struct CodeDisplay(Option<i32>);
impl std::fmt::Display for CodeDisplay {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.0 {
Some(code) => write!(f, "{code}"),
None => write!(f, "?"),
}
}
}
impl From<Option<i32>> for CodeDisplay {
fn from(code: Option<i32>) -> Self {
CodeDisplay(code)
}
}
#[derive(Debug, Snafu)] #[derive(Debug, Snafu)]
pub enum Error { pub enum Error {
#[snafu(display("Failed to execute: {cmdline}"))] #[snafu(display("Failed to execute: {cmdline}"))]
@ -36,15 +18,8 @@ pub enum Error {
cmdline: String, cmdline: String,
source: std::io::Error, source: std::io::Error,
}, },
#[snafu(display("Failed to run ({code}): {cmdline}"))] #[snafu(display("Failed with code {code:?}: {cmdline}"))]
ReturnedError { cmdline: String, code: CodeDisplay }, ReturnedError { cmdline: String, code: Option<i32> },
#[snafu(display("Failed to run ({code}): {cmdline}: {stdout}{stderr}"))]
ReturnedWithOutputError {
cmdline: String,
code: CodeDisplay,
stdout: String,
stderr: String,
},
#[snafu(display("Couldn't decode stdout/stderr as utf8"))] #[snafu(display("Couldn't decode stdout/stderr as utf8"))]
InvalidUtf8 { InvalidUtf8 {
cmdline: String, cmdline: String,
@ -96,36 +71,31 @@ impl CommandExt for Command {
status.success(), status.success(),
ReturnedSnafu { ReturnedSnafu {
cmdline: get_cmdline(self), cmdline: get_cmdline(self),
code: CodeDisplay::from(status.code()), code: status.code(),
} }
); );
Ok(self) Ok(self)
} }
fn utf8_output(&mut self) -> Result<Utf8Output> { fn utf8_output(&mut self) -> Result<Utf8Output> {
let cmdline = get_cmdline(self);
let output = self.output().with_context(|_| DidNotExecuteSnafu { let output = self.output().with_context(|_| DidNotExecuteSnafu {
cmdline: cmdline.clone(), cmdline: get_cmdline(self),
})?; })?;
let stdout = String::from_utf8(output.stdout).with_context(|_| InvalidUtf8Snafu {
cmdline: cmdline.clone(),
})?;
let stderr = String::from_utf8(output.stderr).with_context(|_| InvalidUtf8Snafu {
cmdline: cmdline.clone(),
})?;
ensure!( ensure!(
output.status.success(), output.status.success(),
ReturnedWithOutputSnafu { ReturnedSnafu {
cmdline, cmdline: get_cmdline(self),
code: CodeDisplay::from(output.status.code()), code: output.status.code(),
stdout: stdout.clone(),
stderr: stderr.clone(),
} }
); );
Ok(Utf8Output {
Ok(Utf8Output { stdout, stderr }) stdout: String::from_utf8(output.stdout).with_context(|_| InvalidUtf8Snafu {
cmdline: get_cmdline(self),
})?,
stderr: String::from_utf8(output.stderr).with_context(|_| InvalidUtf8Snafu {
cmdline: get_cmdline(self),
})?,
})
} }
fn ensure_spawn(&mut self) -> Result<std::process::Child> { fn ensure_spawn(&mut self) -> Result<std::process::Child> {
@ -165,10 +135,7 @@ mod test {
#[cfg(not(windows))] #[cfg(not(windows))]
assert!(matches!( assert!(matches!(
Command::new("false").ensure_success(), Command::new("false").ensure_success(),
Err(Error::ReturnedError { Err(Error::ReturnedError { code: Some(1), .. })
code: CodeDisplay(_),
..
})
)); ));
} }
} }

View file

@ -94,7 +94,7 @@ impl BackendCollectionService for Backend {
} }
impl Backend { impl Backend {
pub(super) fn lock_open_collection(&self) -> Result<MutexGuard<'_, Option<Collection>>> { pub(super) fn lock_open_collection(&self) -> Result<MutexGuard<Option<Collection>>> {
let guard = self.col.lock().unwrap(); let guard = self.col.lock().unwrap();
guard guard
.is_some() .is_some()
@ -102,7 +102,7 @@ impl Backend {
.ok_or(AnkiError::CollectionNotOpen) .ok_or(AnkiError::CollectionNotOpen)
} }
pub(super) fn lock_closed_collection(&self) -> Result<MutexGuard<'_, Option<Collection>>> { pub(super) fn lock_closed_collection(&self) -> Result<MutexGuard<Option<Collection>>> {
let guard = self.col.lock().unwrap(); let guard = self.col.lock().unwrap();
guard guard
.is_none() .is_none()

View file

@ -39,7 +39,6 @@ impl From<BoolKeyProto> for BoolKey {
BoolKeyProto::RenderLatex => BoolKey::RenderLatex, BoolKeyProto::RenderLatex => BoolKey::RenderLatex,
BoolKeyProto::LoadBalancerEnabled => BoolKey::LoadBalancerEnabled, BoolKeyProto::LoadBalancerEnabled => BoolKey::LoadBalancerEnabled,
BoolKeyProto::FsrsShortTermWithStepsEnabled => BoolKey::FsrsShortTermWithStepsEnabled, BoolKeyProto::FsrsShortTermWithStepsEnabled => BoolKey::FsrsShortTermWithStepsEnabled,
BoolKeyProto::FsrsLegacyEvaluate => BoolKey::FsrsLegacyEvaluate,
} }
} }
} }

View file

@ -105,8 +105,7 @@ impl Card {
/// Returns true if the card has a due date in terms of days. /// Returns true if the card has a due date in terms of days.
fn is_due_in_days(&self) -> bool { fn is_due_in_days(&self) -> bool {
self.ctype != CardType::New && self.original_or_current_due() <= 365_000 // keep consistent with SQL matches!(self.queue, CardQueue::DayLearn | CardQueue::Review)
|| matches!(self.queue, CardQueue::DayLearn | CardQueue::Review)
|| (self.ctype == CardType::Review && self.is_undue_queue()) || (self.ctype == CardType::Review && self.is_undue_queue())
} }
@ -126,20 +125,20 @@ impl Card {
} }
} }
/// If last_review_date isn't stored in the card, this uses card.due and /// This uses card.due and card.ivl to infer the elapsed time. If 'set due
/// card.ivl to infer the elapsed time, which won't be accurate if /// date' or an add-on has changed the due date, this won't be accurate.
/// 'set due date' or an add-on has changed the due date. pub(crate) fn days_since_last_review(&self, timing: &SchedTimingToday) -> Option<u32> {
pub(crate) fn seconds_since_last_review(&self, timing: &SchedTimingToday) -> Option<u32> { if !self.is_due_in_days() {
if let Some(last_review_time) = self.last_review_time { Some(
Some(timing.now.elapsed_secs_since(last_review_time) as u32) (timing.next_day_at.0 as u32).saturating_sub(self.original_or_current_due() as u32)
} else if self.is_due_in_days() { / 86_400,
)
} else {
self.due_time(timing).map(|due| { self.due_time(timing).map(|due| {
(due.adding_secs(-86_400 * self.interval as i64) (due.adding_secs(-86_400 * self.interval as i64)
.elapsed_secs()) as u32 .elapsed_secs()
/ 86_400) as u32
}) })
} else {
let last_review_time = TimestampSecs(self.original_or_current_due() as i64);
Some(timing.now.elapsed_secs_since(last_review_time) as u32)
} }
} }
} }
@ -542,12 +541,12 @@ impl RowContext {
self.cards[0] self.cards[0]
.memory_state .memory_state
.as_ref() .as_ref()
.zip(self.cards[0].seconds_since_last_review(&self.timing)) .zip(self.cards[0].days_since_last_review(&self.timing))
.zip(Some(self.cards[0].decay.unwrap_or(FSRS5_DEFAULT_DECAY))) .zip(Some(self.cards[0].decay.unwrap_or(FSRS5_DEFAULT_DECAY)))
.map(|((state, seconds), decay)| { .map(|((state, days_elapsed), decay)| {
let r = FSRS::new(None).unwrap().current_retrievability_seconds( let r = FSRS::new(None).unwrap().current_retrievability(
(*state).into(), (*state).into(),
seconds, days_elapsed,
decay, decay,
); );
format!("{:.0}%", r * 100.) format!("{:.0}%", r * 100.)

View file

@ -96,7 +96,6 @@ pub struct Card {
pub(crate) memory_state: Option<FsrsMemoryState>, pub(crate) memory_state: Option<FsrsMemoryState>,
pub(crate) desired_retention: Option<f32>, pub(crate) desired_retention: Option<f32>,
pub(crate) decay: Option<f32>, pub(crate) decay: Option<f32>,
pub(crate) last_review_time: Option<TimestampSecs>,
/// JSON object or empty; exposed through the reviewer for persisting custom /// JSON object or empty; exposed through the reviewer for persisting custom
/// state /// state
pub(crate) custom_data: String, pub(crate) custom_data: String,
@ -148,7 +147,6 @@ impl Default for Card {
memory_state: None, memory_state: None,
desired_retention: None, desired_retention: None,
decay: None, decay: None,
last_review_time: None,
custom_data: String::new(), custom_data: String::new(),
} }
} }

Some files were not shown because too many files have changed in this diff Show more