mirror of
https://github.com/ankitects/anki.git
synced 2025-09-24 16:56:36 -04:00
Compare commits
204 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
04a0b10a15 | ||
![]() |
99c67d39cb | ||
![]() |
0d31c6de4a | ||
![]() |
fb332c4fe1 | ||
![]() |
48f774c711 | ||
![]() |
3890e12c9e | ||
![]() |
80cff16250 | ||
![]() |
75d9026be5 | ||
![]() |
6854d13b88 | ||
![]() |
29072654db | ||
![]() |
ec6f09958a | ||
![]() |
c2957746f4 | ||
![]() |
9e415869b8 | ||
![]() |
7e8a1076c1 | ||
![]() |
b97fb45e06 | ||
![]() |
61094d387a | ||
![]() |
90ed4cc115 | ||
![]() |
4506ad0c97 | ||
![]() |
539054c34d | ||
![]() |
cf12c201d8 | ||
![]() |
3b0297d14d | ||
![]() |
58deb14028 | ||
![]() |
5c4d2e87a1 | ||
![]() |
6d31776c25 | ||
![]() |
dda730dfa2 | ||
![]() |
08431106da | ||
![]() |
b4b1c2013f | ||
![]() |
5280cb2f1c | ||
![]() |
b2ab0c0830 | ||
![]() |
6a985c9fb0 | ||
![]() |
db1d04f622 | ||
![]() |
2491eb0316 | ||
![]() |
06f9d41a96 | ||
![]() |
8d5c385c76 | ||
![]() |
153b972dfd | ||
![]() |
4ac80061ca | ||
![]() |
01b825f7c6 | ||
![]() |
157da4c7a7 | ||
![]() |
8ef208e418 | ||
![]() |
65ea013270 | ||
![]() |
ef1a1deb9c | ||
![]() |
c93e11f343 | ||
![]() |
e3d0a30443 | ||
![]() |
4fdb4983dd | ||
![]() |
3521da3ad6 | ||
![]() |
ca60911e19 | ||
![]() |
71ec878780 | ||
![]() |
6dd9daf074 | ||
![]() |
3b33d20849 | ||
![]() |
542c557404 | ||
![]() |
211cbfe660 | ||
![]() |
359231a4d8 | ||
![]() |
d23764b59e | ||
![]() |
1dc31bb360 | ||
![]() |
6fa33777db | ||
![]() |
2fee6f959b | ||
![]() |
3d0a408a2b | ||
![]() |
3e846c8756 | ||
![]() |
79932aad41 | ||
![]() |
2879dc63c3 | ||
![]() |
b92eabf4ae | ||
![]() |
1660a22548 | ||
![]() |
a3b3b0850d | ||
![]() |
562cef1f22 | ||
![]() |
e676e1a484 | ||
![]() |
37f7872565 | ||
![]() |
5c07c899ec | ||
![]() |
054740dd14 | ||
![]() |
78a3b3ef7b | ||
![]() |
f3b4284afb | ||
![]() |
fb2e2bd37a | ||
![]() |
a0c1a398f4 | ||
![]() |
d4862e99da | ||
![]() |
34ed674869 | ||
![]() |
8c7cd80245 | ||
![]() |
68bc4c02cf | ||
![]() |
f4266f0142 | ||
![]() |
d3e8dc6dbf | ||
![]() |
5462d99255 | ||
![]() |
2d60471f36 | ||
![]() |
62e01fe03a | ||
![]() |
5c6e2188e2 | ||
![]() |
ab55440a05 | ||
![]() |
aae9f53e79 | ||
![]() |
a77ffbf4a5 | ||
![]() |
402008950c | ||
![]() |
f7e6e9cb0d | ||
![]() |
2b55882cce | ||
![]() |
0d0c42c6d9 | ||
![]() |
b76918a217 | ||
![]() |
f7974568c9 | ||
![]() |
d13c117e80 | ||
![]() |
8932199513 | ||
![]() |
69d54864a8 | ||
![]() |
baeccfa3e4 | ||
![]() |
e99682a277 | ||
![]() |
4dc00556c1 | ||
![]() |
3dc6b6b3ca | ||
![]() |
c947690aeb | ||
![]() |
1af3c58d40 | ||
![]() |
46bcf4efa6 | ||
![]() |
60750f8e4c | ||
![]() |
661f78557f | ||
![]() |
7172b2d266 | ||
![]() |
78c6db2023 | ||
![]() |
e2692b5ac9 | ||
![]() |
177c483398 | ||
![]() |
20b7bb66db | ||
![]() |
ca0459d8ee | ||
![]() |
e511d63b7e | ||
![]() |
d6e49f8ea5 | ||
![]() |
416e7af02b | ||
![]() |
c74a97a5fa | ||
![]() |
00bc0354c9 | ||
![]() |
aee71afebe | ||
![]() |
ef69f424c1 | ||
![]() |
19f9afba64 | ||
![]() |
229337dbe0 | ||
![]() |
1f3d03f7f8 | ||
![]() |
47c1094195 | ||
![]() |
35a889e1ed | ||
![]() |
65b5aefd07 | ||
![]() |
8c72b03f4c | ||
![]() |
fc845a11a9 | ||
![]() |
aeaf001df7 | ||
![]() |
a3da224511 | ||
![]() |
63ddd0e183 | ||
![]() |
278a84f8d2 | ||
![]() |
0b30155c90 | ||
![]() |
37fe704326 | ||
![]() |
e77cd791de | ||
![]() |
4e29440d6a | ||
![]() |
cc4b0a825e | ||
![]() |
15bbcdd568 | ||
![]() |
12635f4cd2 | ||
![]() |
834fb41015 | ||
![]() |
5a19027185 | ||
![]() |
0375b4aac0 | ||
![]() |
a1934ae9e4 | ||
![]() |
58a8aa7353 | ||
![]() |
4604bc7567 | ||
![]() |
3b18097550 | ||
![]() |
c56fd3ee28 | ||
![]() |
f4e587256c | ||
![]() |
51cf09daf3 | ||
![]() |
dfbb7302e8 | ||
![]() |
1f7f7bc8a3 | ||
![]() |
208729fa3e | ||
![]() |
6744a0a31a | ||
![]() |
1ad82ea8b5 | ||
![]() |
1098d9ac2a | ||
![]() |
778ab76586 | ||
![]() |
c57b7c496d | ||
![]() |
fabed12f4b | ||
![]() |
84658e9cec | ||
![]() |
11c3e60615 | ||
![]() |
3d9fbfd97f | ||
![]() |
80ff9a120c | ||
![]() |
037dfa1bc1 | ||
![]() |
3adcf05ca6 | ||
![]() |
3bd725b6be | ||
![]() |
0009e798e1 | ||
![]() |
436a1d78bc | ||
![]() |
2e74101ca4 | ||
![]() |
7fe201d6bd | ||
![]() |
8a3b72e6e5 | ||
![]() |
d3e1fd1f80 | ||
![]() |
3d6b4761e4 | ||
![]() |
1ca31413f7 | ||
![]() |
b205008a5e | ||
![]() |
b16439fc9c | ||
![]() |
f927aa5788 | ||
![]() |
a83a6b5928 | ||
![]() |
052b9231ec | ||
![]() |
1b51c16e26 | ||
![]() |
de2d1477de | ||
![]() |
ccc0c7cdbb | ||
![]() |
deaf25f757 | ||
![]() |
93dbd6e4cf | ||
![]() |
7b0289b5d3 | ||
![]() |
08a8b6691c | ||
![]() |
fc6447a938 | ||
![]() |
d1793550b0 | ||
![]() |
cedece5cae | ||
![]() |
2594dcb2bb | ||
![]() |
09495d3a8b | ||
![]() |
f5285f359a | ||
![]() |
fba1d7b4b0 | ||
![]() |
4232185735 | ||
![]() |
0b5218706a | ||
![]() |
bb1b289690 | ||
![]() |
e81a7e8b1a | ||
![]() |
da90705346 | ||
![]() |
9e1690774c | ||
![]() |
ee5e8c9230 | ||
![]() |
b6c70f7b75 | ||
![]() |
944e453419 | ||
![]() |
14eb297bbf | ||
![]() |
a07370f565 | ||
![]() |
bf36e10519 | ||
![]() |
b22b3310d6 | ||
![]() |
7720c7de1a | ||
![]() |
0be87b887e | ||
![]() |
bce3cabf9b |
211 changed files with 8082 additions and 6627 deletions
|
@ -10,3 +10,6 @@ PYTHONDONTWRITEBYTECODE = "1" # prevent junk files on Windows
|
|||
|
||||
[term]
|
||||
color = "always"
|
||||
|
||||
[target.'cfg(all(target_env = "msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
|
2
.version
2
.version
|
@ -1 +1 @@
|
|||
25.06b7
|
||||
25.09.2
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
nodeLinker: node-modules
|
||||
enableScripts: false
|
||||
|
|
11
CONTRIBUTORS
11
CONTRIBUTORS
|
@ -49,6 +49,7 @@ Sander Santema <github.com/sandersantema/>
|
|||
Thomas Brownback <https://github.com/brownbat/>
|
||||
Andrew Gaul <andrew@gaul.org>
|
||||
kenden
|
||||
Emil Hamrin <github.com/e-hamrin>
|
||||
Nickolay Yudin <kelciour@gmail.com>
|
||||
neitrinoweb <github.com/neitrinoweb/>
|
||||
Andreas Reis <github.com/nwwt>
|
||||
|
@ -233,6 +234,16 @@ Spiritual Father <https://github.com/spiritualfather>
|
|||
Emmanuel Ferdman <https://github.com/emmanuel-ferdman>
|
||||
Sunong2008 <https://github.com/Sunrongguo2008>
|
||||
Marvin Kopf <marvinkopf@outlook.com>
|
||||
Kevin Nakamura <grinkers@grinkers.net>
|
||||
Bradley Szoke <bradleyszoke@gmail.com>
|
||||
jcznk <https://github.com/jcznk>
|
||||
Thomas Rixen <thomas.rixen@student.uclouvain.be>
|
||||
Siyuan Mattuwu Yan <syan4@ualberta.ca>
|
||||
Lee Doughty <32392044+leedoughty@users.noreply.github.com>
|
||||
memchr <memchr@proton.me>
|
||||
Max Romanowski <maxr777@proton.me>
|
||||
Aldlss <ayaldlss@gmail.com>
|
||||
|
||||
********************
|
||||
|
||||
The text of the 3 clause BSD license follows:
|
||||
|
|
1114
Cargo.lock
generated
1114
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
11
Cargo.toml
11
Cargo.toml
|
@ -33,9 +33,8 @@ git = "https://github.com/ankitects/linkcheck.git"
|
|||
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
|
||||
|
||||
[workspace.dependencies.fsrs]
|
||||
version = "4.1.1"
|
||||
version = "5.1.0"
|
||||
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
|
||||
# rev = "a7f7efc10f0a26b14ee348cc7402155685f2a24f"
|
||||
# path = "../open-spaced-repetition/fsrs-rs"
|
||||
|
||||
[workspace.dependencies]
|
||||
|
@ -52,7 +51,7 @@ ninja_gen = { "path" = "build/ninja_gen" }
|
|||
unicase = "=2.6.0" # any changes could invalidate sqlite indexes
|
||||
|
||||
# normal
|
||||
ammonia = "4.1.0"
|
||||
ammonia = "4.1.2"
|
||||
anyhow = "1.0.98"
|
||||
async-compression = { version = "0.4.24", features = ["zstd", "tokio"] }
|
||||
async-stream = "0.3.6"
|
||||
|
@ -60,6 +59,7 @@ async-trait = "0.1.88"
|
|||
axum = { version = "0.8.4", features = ["multipart", "macros"] }
|
||||
axum-client-ip = "1.1.3"
|
||||
axum-extra = { version = "0.10.1", features = ["typed-header"] }
|
||||
bitflags = "2.9.1"
|
||||
blake3 = "1.8.2"
|
||||
bytes = "1.10.1"
|
||||
camino = "1.1.10"
|
||||
|
@ -109,6 +109,7 @@ prost-types = "0.13"
|
|||
pulldown-cmark = "0.13.0"
|
||||
pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] }
|
||||
rand = "0.9.1"
|
||||
rayon = "1.10.0"
|
||||
regex = "1.11.1"
|
||||
reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] }
|
||||
rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] }
|
||||
|
@ -132,7 +133,7 @@ tokio-util = { version = "0.7.15", features = ["io"] }
|
|||
tower-http = { version = "0.6.6", features = ["trace"] }
|
||||
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] }
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = { version = "0.3.19", features = ["fmt", "env-filter"] }
|
||||
tracing-subscriber = { version = "0.3.20", features = ["fmt", "env-filter"] }
|
||||
unic-langid = { version = "0.9.6", features = ["macros"] }
|
||||
unic-ucd-category = "0.9.0"
|
||||
unicode-normalization = "0.1.24"
|
||||
|
@ -140,7 +141,7 @@ walkdir = "2.5.0"
|
|||
which = "8.0.0"
|
||||
widestring = "1.1.0"
|
||||
winapi = { version = "0.3", features = ["wincon", "winreg"] }
|
||||
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_Foundation", "Win32_UI_Shell"] }
|
||||
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_Foundation", "Win32_UI_Shell", "Wdk_System_SystemServices"] }
|
||||
wiremock = "0.6.3"
|
||||
xz2 = "0.1.7"
|
||||
zip = { version = "4.1.0", default-features = false, features = ["deflate", "time"] }
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Anki
|
||||
# Anki®
|
||||
|
||||
[](https://buildkite.com/ankitects/anki-ci)
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
use ninja_gen::action::BuildAction;
|
||||
use ninja_gen::archives::Platform;
|
||||
|
@ -125,7 +123,14 @@ impl BuildAction for BuildWheel {
|
|||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl FilesHandle) {
|
||||
build.add_inputs("uv", inputs![":uv_binary"]);
|
||||
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") {
|
||||
let uv_path =
|
||||
std::env::var("UV_BINARY").expect("UV_BINARY must be set in OFFLINE_BUILD mode");
|
||||
build.add_inputs("uv", inputs![uv_path]);
|
||||
} else {
|
||||
build.add_inputs("uv", inputs![":uv_binary"]);
|
||||
}
|
||||
|
||||
build.add_inputs("", &self.deps);
|
||||
|
||||
// Set the project directory based on which package we're building
|
||||
|
@ -222,15 +227,19 @@ struct Sphinx {
|
|||
|
||||
impl BuildAction for Sphinx {
|
||||
fn command(&self) -> &str {
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
"$uv sync --extra sphinx && $python python/sphinx/build.py"
|
||||
} else {
|
||||
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") {
|
||||
"$python python/sphinx/build.py"
|
||||
} else {
|
||||
"$uv sync --extra sphinx && $python python/sphinx/build.py"
|
||||
}
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl FilesHandle) {
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") {
|
||||
let uv_path =
|
||||
std::env::var("UV_BINARY").expect("UV_BINARY must be set in OFFLINE_BUILD mode");
|
||||
build.add_inputs("uv", inputs![uv_path]);
|
||||
} else {
|
||||
build.add_inputs("uv", inputs![":uv_binary"]);
|
||||
// Set environment variable to use the existing pyenv
|
||||
build.add_variable("pyenv_path", "$builddir/pyenv");
|
||||
|
|
|
@ -169,7 +169,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
|
|||
|
||||
pub fn check_rust(build: &mut Build) -> Result<()> {
|
||||
let inputs = inputs![
|
||||
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**}"),
|
||||
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**,tools/minilints/**}"),
|
||||
"Cargo.lock",
|
||||
"Cargo.toml",
|
||||
"rust-toolchain.toml",
|
||||
|
|
|
@ -35,3 +35,7 @@ path = "src/bin/update_uv.rs"
|
|||
[[bin]]
|
||||
name = "update_protoc"
|
||||
path = "src/bin/update_protoc.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "update_node"
|
||||
path = "src/bin/update_node.rs"
|
||||
|
|
|
@ -49,6 +49,46 @@ pub trait BuildAction {
|
|||
}
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
std::any::type_name::<Self>().split("::").last().unwrap()
|
||||
std::any::type_name::<Self>()
|
||||
.split("::")
|
||||
.last()
|
||||
.unwrap()
|
||||
.split('<')
|
||||
.next()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
trait TestBuildAction {}
|
||||
|
||||
#[cfg(test)]
|
||||
impl<T: TestBuildAction + ?Sized> BuildAction for T {
|
||||
fn command(&self) -> &str {
|
||||
"test"
|
||||
}
|
||||
fn files(&mut self, _build: &mut impl FilesHandle) {}
|
||||
}
|
||||
|
||||
#[allow(dead_code, unused_variables)]
|
||||
#[test]
|
||||
fn should_strip_regions_in_type_name() {
|
||||
struct Bare;
|
||||
impl TestBuildAction for Bare {}
|
||||
assert_eq!(Bare {}.name(), "Bare");
|
||||
|
||||
struct WithLifeTime<'a>(&'a str);
|
||||
impl TestBuildAction for WithLifeTime<'_> {}
|
||||
assert_eq!(WithLifeTime("test").name(), "WithLifeTime");
|
||||
|
||||
struct WithMultiLifeTime<'a, 'b>(&'a str, &'b str);
|
||||
impl TestBuildAction for WithMultiLifeTime<'_, '_> {}
|
||||
assert_eq!(
|
||||
WithMultiLifeTime("test", "test").name(),
|
||||
"WithMultiLifeTime"
|
||||
);
|
||||
|
||||
struct WithGeneric<T>(T);
|
||||
impl<T> TestBuildAction for WithGeneric<T> {}
|
||||
assert_eq!(WithGeneric(3).name(), "WithGeneric");
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ impl Platform {
|
|||
}
|
||||
|
||||
/// Append .exe to path if on Windows.
|
||||
pub fn with_exe(path: &str) -> Cow<str> {
|
||||
pub fn with_exe(path: &str) -> Cow<'_, str> {
|
||||
if cfg!(windows) {
|
||||
format!("{path}.exe").into()
|
||||
} else {
|
||||
|
|
268
build/ninja_gen/src/bin/update_node.rs
Normal file
268
build/ninja_gen/src/bin/update_node.rs
Normal file
|
@ -0,0 +1,268 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::error::Error;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use regex::Regex;
|
||||
use reqwest::blocking::Client;
|
||||
use serde_json::Value;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct NodeRelease {
|
||||
version: String,
|
||||
files: Vec<NodeFile>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct NodeFile {
|
||||
filename: String,
|
||||
url: String,
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
let release_info = fetch_node_release_info()?;
|
||||
let new_text = generate_node_archive_function(&release_info)?;
|
||||
update_node_text(&new_text)?;
|
||||
println!("Node.js archive function updated successfully!");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn fetch_node_release_info() -> Result<NodeRelease, Box<dyn Error>> {
|
||||
let client = Client::new();
|
||||
|
||||
// Get the Node.js release info
|
||||
let response = client
|
||||
.get("https://nodejs.org/dist/index.json")
|
||||
.header("User-Agent", "anki-build-updater")
|
||||
.send()?;
|
||||
|
||||
let releases: Vec<Value> = response.json()?;
|
||||
|
||||
// Find the latest LTS release
|
||||
let latest = releases
|
||||
.iter()
|
||||
.find(|release| {
|
||||
// LTS releases have a non-false "lts" field
|
||||
release["lts"].as_str().is_some() && release["lts"] != false
|
||||
})
|
||||
.ok_or("No LTS releases found")?;
|
||||
|
||||
let version = latest["version"]
|
||||
.as_str()
|
||||
.ok_or("Version not found")?
|
||||
.to_string();
|
||||
|
||||
let files = latest["files"]
|
||||
.as_array()
|
||||
.ok_or("Files array not found")?
|
||||
.iter()
|
||||
.map(|f| f.as_str().unwrap_or(""))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let lts_name = latest["lts"].as_str().unwrap_or("unknown");
|
||||
println!("Found Node.js LTS version: {version} ({lts_name})");
|
||||
|
||||
// Map platforms to their expected file keys and full filenames
|
||||
let platform_mapping = vec![
|
||||
(
|
||||
"linux-x64",
|
||||
"linux-x64",
|
||||
format!("node-{version}-linux-x64.tar.xz"),
|
||||
),
|
||||
(
|
||||
"linux-arm64",
|
||||
"linux-arm64",
|
||||
format!("node-{version}-linux-arm64.tar.xz"),
|
||||
),
|
||||
(
|
||||
"darwin-x64",
|
||||
"osx-x64-tar",
|
||||
format!("node-{version}-darwin-x64.tar.xz"),
|
||||
),
|
||||
(
|
||||
"darwin-arm64",
|
||||
"osx-arm64-tar",
|
||||
format!("node-{version}-darwin-arm64.tar.xz"),
|
||||
),
|
||||
(
|
||||
"win-x64",
|
||||
"win-x64-zip",
|
||||
format!("node-{version}-win-x64.zip"),
|
||||
),
|
||||
(
|
||||
"win-arm64",
|
||||
"win-arm64-zip",
|
||||
format!("node-{version}-win-arm64.zip"),
|
||||
),
|
||||
];
|
||||
|
||||
let mut node_files = Vec::new();
|
||||
|
||||
for (platform, file_key, filename) in platform_mapping {
|
||||
// Check if this file exists in the release
|
||||
if files.contains(&file_key) {
|
||||
let url = format!("https://nodejs.org/dist/{version}/{filename}");
|
||||
node_files.push(NodeFile {
|
||||
filename: filename.clone(),
|
||||
url,
|
||||
});
|
||||
println!("Found file for {platform}: {filename} (key: {file_key})");
|
||||
} else {
|
||||
return Err(
|
||||
format!("File not found for {platform} (key: {file_key}): {filename}").into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(NodeRelease {
|
||||
version,
|
||||
files: node_files,
|
||||
})
|
||||
}
|
||||
|
||||
fn generate_node_archive_function(release: &NodeRelease) -> Result<String, Box<dyn Error>> {
|
||||
let client = Client::new();
|
||||
|
||||
// Fetch the SHASUMS256.txt file once
|
||||
println!("Fetching SHA256 checksums...");
|
||||
let shasums_url = format!("https://nodejs.org/dist/{}/SHASUMS256.txt", release.version);
|
||||
let shasums_response = client
|
||||
.get(&shasums_url)
|
||||
.header("User-Agent", "anki-build-updater")
|
||||
.send()?;
|
||||
let shasums_text = shasums_response.text()?;
|
||||
|
||||
// Create a mapping from filename patterns to platform names - using the exact
|
||||
// patterns we stored in files
|
||||
let platform_mapping = vec![
|
||||
("linux-x64.tar.xz", "LinuxX64"),
|
||||
("linux-arm64.tar.xz", "LinuxArm"),
|
||||
("darwin-x64.tar.xz", "MacX64"),
|
||||
("darwin-arm64.tar.xz", "MacArm"),
|
||||
("win-x64.zip", "WindowsX64"),
|
||||
("win-arm64.zip", "WindowsArm"),
|
||||
];
|
||||
|
||||
let mut platform_blocks = Vec::new();
|
||||
|
||||
for (file_pattern, platform_name) in platform_mapping {
|
||||
// Find the file that ends with this pattern
|
||||
if let Some(file) = release
|
||||
.files
|
||||
.iter()
|
||||
.find(|f| f.filename.ends_with(file_pattern))
|
||||
{
|
||||
// Find the SHA256 for this file
|
||||
let sha256 = shasums_text
|
||||
.lines()
|
||||
.find(|line| line.contains(&file.filename))
|
||||
.and_then(|line| line.split_whitespace().next())
|
||||
.ok_or_else(|| format!("SHA256 not found for {}", file.filename))?;
|
||||
|
||||
println!(
|
||||
"Found SHA256 for {}: {} => {}",
|
||||
platform_name, file.filename, sha256
|
||||
);
|
||||
|
||||
let block = format!(
|
||||
" Platform::{} => OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }},",
|
||||
platform_name, file.url, sha256
|
||||
);
|
||||
platform_blocks.push(block);
|
||||
} else {
|
||||
return Err(format!(
|
||||
"File not found for platform {platform_name}: no file ending with {file_pattern}"
|
||||
)
|
||||
.into());
|
||||
}
|
||||
}
|
||||
|
||||
let function = format!(
|
||||
"pub fn node_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}",
|
||||
platform_blocks.join("\n")
|
||||
);
|
||||
|
||||
Ok(function)
|
||||
}
|
||||
|
||||
fn update_node_text(new_function: &str) -> Result<(), Box<dyn Error>> {
|
||||
let node_rs_content = read_node_rs()?;
|
||||
|
||||
// Regex to match the entire node_archive function with proper multiline
|
||||
// matching
|
||||
let re = Regex::new(
|
||||
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}",
|
||||
)?;
|
||||
|
||||
let updated_content = re.replace(&node_rs_content, new_function);
|
||||
|
||||
write_node_rs(&updated_content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_node_rs() -> Result<String, Box<dyn Error>> {
|
||||
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
|
||||
let manifest_dir =
|
||||
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
|
||||
let path = Path::new(&manifest_dir).join("src").join("node.rs");
|
||||
Ok(fs::read_to_string(path)?)
|
||||
}
|
||||
|
||||
fn write_node_rs(content: &str) -> Result<(), Box<dyn Error>> {
|
||||
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
|
||||
let manifest_dir =
|
||||
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
|
||||
let path = Path::new(&manifest_dir).join("src").join("node.rs");
|
||||
fs::write(path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_regex_replacement() {
|
||||
let sample_content = r#"Some other code
|
||||
pub fn node_archive(platform: Platform) -> OnlineArchive {
|
||||
match platform {
|
||||
Platform::LinuxX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz",
|
||||
sha256: "old_hash",
|
||||
},
|
||||
Platform::MacX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz",
|
||||
sha256: "old_hash",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
More code here"#;
|
||||
|
||||
let new_function = r#"pub fn node_archive(platform: Platform) -> OnlineArchive {
|
||||
match platform {
|
||||
Platform::LinuxX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-linux-x64.tar.xz",
|
||||
sha256: "new_hash",
|
||||
},
|
||||
Platform::MacX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-darwin-x64.tar.xz",
|
||||
sha256: "new_hash",
|
||||
},
|
||||
}
|
||||
}"#;
|
||||
|
||||
let re = Regex::new(
|
||||
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}"
|
||||
).unwrap();
|
||||
|
||||
let result = re.replace(sample_content, new_function);
|
||||
assert!(result.contains("v21.0.0"));
|
||||
assert!(result.contains("new_hash"));
|
||||
assert!(!result.contains("old_hash"));
|
||||
assert!(result.contains("Some other code"));
|
||||
assert!(result.contains("More code here"));
|
||||
}
|
||||
}
|
|
@ -19,28 +19,28 @@ use crate::input::BuildInput;
|
|||
pub fn node_archive(platform: Platform) -> OnlineArchive {
|
||||
match platform {
|
||||
Platform::LinuxX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz",
|
||||
sha256: "822780369d0ea309e7d218e41debbd1a03f8cdf354ebf8a4420e89f39cc2e612",
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-x64.tar.xz",
|
||||
sha256: "325c0f1261e0c61bcae369a1274028e9cfb7ab7949c05512c5b1e630f7e80e12",
|
||||
},
|
||||
Platform::LinuxArm => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-arm64.tar.xz",
|
||||
sha256: "f6df68c6793244071f69023a9b43a0cf0b13d65cbe86d55925c28e4134d9aafb",
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-arm64.tar.xz",
|
||||
sha256: "140aee84be6774f5fb3f404be72adbe8420b523f824de82daeb5ab218dab7b18",
|
||||
},
|
||||
Platform::MacX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz",
|
||||
sha256: "d4b4ab81ebf1f7aab09714f834992f27270ad0079600da00c8110f8950ca6c5a",
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-x64.tar.xz",
|
||||
sha256: "f79de1f64df4ac68493a344bb5ab7d289d0275271e87b543d1278392c9de778a",
|
||||
},
|
||||
Platform::MacArm => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-arm64.tar.xz",
|
||||
sha256: "f18a7438723d48417f5e9be211a2f3c0520ffbf8e02703469e5153137ca0f328",
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-arm64.tar.xz",
|
||||
sha256: "cc9cc294eaf782dd93c8c51f460da610cc35753c6a9947411731524d16e97914",
|
||||
},
|
||||
Platform::WindowsX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-x64.zip",
|
||||
sha256: "893115cd92ad27bf178802f15247115e93c0ef0c753b93dca96439240d64feb5",
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-x64.zip",
|
||||
sha256: "721ab118a3aac8584348b132767eadf51379e0616f0db802cc1e66d7f0d98f85",
|
||||
},
|
||||
Platform::WindowsArm => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-arm64.zip",
|
||||
sha256: "89c1f7034dcd6ff5c17f2af61232a96162a1902f862078347dcf274a938b6142",
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-arm64.zip",
|
||||
sha256: "78355dc9ca117bb71d3f081e4b1b281855e2b134f3939bb0ca314f7567b0e621",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -98,7 +98,7 @@ impl BuildAction for YarnInstall<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn with_cmd_ext(bin: &str) -> Cow<str> {
|
||||
fn with_cmd_ext(bin: &str) -> Cow<'_, str> {
|
||||
if cfg!(windows) {
|
||||
format!("{bin}.cmd").into()
|
||||
} else {
|
||||
|
|
|
@ -32,10 +32,19 @@ pub fn setup_pyenv(args: PyenvArgs) {
|
|||
}
|
||||
}
|
||||
|
||||
let mut command = Command::new(args.uv_bin);
|
||||
|
||||
// remove UV_* environment variables to avoid interference
|
||||
for (key, _) in std::env::vars() {
|
||||
if key.starts_with("UV_") || key == "VIRTUAL_ENV" {
|
||||
command.env_remove(key);
|
||||
}
|
||||
}
|
||||
|
||||
run_command(
|
||||
Command::new(args.uv_bin)
|
||||
command
|
||||
.env("UV_PROJECT_ENVIRONMENT", args.pyenv_folder.clone())
|
||||
.args(["sync", "--locked"])
|
||||
.args(["sync", "--locked", "--no-config"])
|
||||
.args(args.extra_args),
|
||||
);
|
||||
|
||||
|
|
|
@ -28,7 +28,11 @@ pub fn setup_yarn(args: YarnArgs) {
|
|||
.arg("--ignore-scripts"),
|
||||
);
|
||||
} else {
|
||||
run_command(Command::new(&args.yarn_bin).arg("install"));
|
||||
run_command(
|
||||
Command::new(&args.yarn_bin)
|
||||
.arg("install")
|
||||
.arg("--immutable"),
|
||||
);
|
||||
}
|
||||
|
||||
std::fs::write(args.stamp, b"").unwrap();
|
||||
|
|
5960
cargo/licenses.json
5960
cargo/licenses.json
File diff suppressed because it is too large
Load diff
|
@ -1,35 +1,78 @@
|
|||
# This Dockerfile uses three stages.
|
||||
# 1. Compile anki (and dependencies) and build python wheels.
|
||||
# 2. Create a virtual environment containing anki and its dependencies.
|
||||
# 3. Create a final image that only includes anki's virtual environment and required
|
||||
# system packages.
|
||||
# This is a user-contributed Dockerfile. No official support is available.
|
||||
|
||||
ARG PYTHON_VERSION="3.9"
|
||||
ARG DEBIAN_FRONTEND="noninteractive"
|
||||
|
||||
# Build anki.
|
||||
FROM python:$PYTHON_VERSION AS build
|
||||
RUN curl -fsSL https://github.com/bazelbuild/bazelisk/releases/download/v1.7.4/bazelisk-linux-amd64 \
|
||||
> /usr/local/bin/bazel \
|
||||
&& chmod +x /usr/local/bin/bazel \
|
||||
# Bazel expects /usr/bin/python
|
||||
&& ln -s /usr/local/bin/python /usr/bin/python
|
||||
FROM ubuntu:24.04 AS build
|
||||
WORKDIR /opt/anki
|
||||
COPY . .
|
||||
# Build python wheels.
|
||||
ENV PYTHON_VERSION="3.13"
|
||||
|
||||
|
||||
# System deps
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
git \
|
||||
build-essential \
|
||||
pkg-config \
|
||||
libssl-dev \
|
||||
libbz2-dev \
|
||||
libreadline-dev \
|
||||
libsqlite3-dev \
|
||||
libffi-dev \
|
||||
zlib1g-dev \
|
||||
liblzma-dev \
|
||||
ca-certificates \
|
||||
ninja-build \
|
||||
rsync \
|
||||
libglib2.0-0 \
|
||||
libgl1 \
|
||||
libx11-6 \
|
||||
libxext6 \
|
||||
libxrender1 \
|
||||
libxkbcommon0 \
|
||||
libxkbcommon-x11-0 \
|
||||
libxcb1 \
|
||||
libxcb-render0 \
|
||||
libxcb-shm0 \
|
||||
libxcb-icccm4 \
|
||||
libxcb-image0 \
|
||||
libxcb-keysyms1 \
|
||||
libxcb-randr0 \
|
||||
libxcb-shape0 \
|
||||
libxcb-xfixes0 \
|
||||
libxcb-xinerama0 \
|
||||
libxcb-xinput0 \
|
||||
libsm6 \
|
||||
libice6 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# install rust with rustup
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||
|
||||
# Install uv and Python 3.13 with uv
|
||||
RUN curl -LsSf https://astral.sh/uv/install.sh | sh \
|
||||
&& ln -s /root/.local/bin/uv /usr/local/bin/uv
|
||||
ENV PATH="/root/.local/bin:${PATH}"
|
||||
|
||||
RUN uv python install ${PYTHON_VERSION} --default
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN ./tools/build
|
||||
|
||||
|
||||
# Install pre-compiled Anki.
|
||||
FROM python:${PYTHON_VERSION}-slim as installer
|
||||
FROM python:3.13-slim AS installer
|
||||
WORKDIR /opt/anki/
|
||||
COPY --from=build /opt/anki/wheels/ wheels/
|
||||
COPY --from=build /opt/anki/out/wheels/ wheels/
|
||||
# Use virtual environment.
|
||||
RUN python -m venv venv \
|
||||
&& ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \
|
||||
&& ./venv/bin/python -m pip install --no-cache-dir /opt/anki/wheels/*.whl
|
||||
|
||||
|
||||
# We use another build stage here so we don't include the wheels in the final image.
|
||||
FROM python:${PYTHON_VERSION}-slim as final
|
||||
FROM python:3.13-slim AS final
|
||||
COPY --from=installer /opt/anki/venv /opt/anki/venv
|
||||
ENV PATH=/opt/anki/venv/bin:$PATH
|
||||
# Install run-time dependencies.
|
||||
|
@ -59,9 +102,9 @@ RUN apt-get update \
|
|||
libxrender1 \
|
||||
libxtst6 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Add non-root user.
|
||||
RUN useradd --create-home anki
|
||||
USER anki
|
||||
WORKDIR /work
|
||||
ENTRYPOINT ["/opt/anki/venv/bin/anki"]
|
||||
LABEL maintainer="Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com>"
|
||||
ENTRYPOINT ["/opt/anki/venv/bin/anki"]
|
|
@ -1 +1 @@
|
|||
Subproject commit 4a65d6012ac022a35f5c80c80b2b665447b6a525
|
||||
Subproject commit 480ef0da728c7ea3485c58529ae7ee02be3e5dba
|
|
@ -5,6 +5,11 @@ database-check-card-properties =
|
|||
[one] Fixed { $count } invalid card property.
|
||||
*[other] Fixed { $count } invalid card properties.
|
||||
}
|
||||
database-check-card-last-review-time-empty =
|
||||
{ $count ->
|
||||
[one] Added last review time to { $count } card.
|
||||
*[other] Added last review time to { $count } cards.
|
||||
}
|
||||
database-check-missing-templates =
|
||||
{ $count ->
|
||||
[one] Deleted { $count } card with missing template.
|
||||
|
|
|
@ -384,8 +384,6 @@ deck-config-which-deck = Which deck would you like to display options for?
|
|||
deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }...
|
||||
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters.
|
||||
deck-config-not-enough-history = Insufficient review history to perform this operation.
|
||||
deck-config-unable-to-determine-desired-retention =
|
||||
Unable to determine a minimum recommended retention.
|
||||
deck-config-must-have-400-reviews =
|
||||
{ $count ->
|
||||
[one] Only { $count } review was found.
|
||||
|
@ -394,7 +392,6 @@ deck-config-must-have-400-reviews =
|
|||
# Numbers that control how aggressively the FSRS algorithm schedules cards
|
||||
deck-config-weights = FSRS parameters
|
||||
deck-config-compute-optimal-weights = Optimize FSRS parameters
|
||||
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
|
||||
deck-config-optimize-button = Optimize Current Preset
|
||||
# Indicates that a given function or label, provided via the "text" variable, operates slowly.
|
||||
deck-config-slow-suffix = { $text } (slow)
|
||||
|
@ -407,7 +404,6 @@ deck-config-historical-retention = Historical retention
|
|||
deck-config-smaller-is-better = Smaller numbers indicate a better fit to your review history.
|
||||
deck-config-steps-too-large-for-fsrs = When FSRS is enabled, steps of 1 day or more are not recommended.
|
||||
deck-config-get-params = Get Params
|
||||
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
|
||||
deck-config-complete = { $num }% complete.
|
||||
deck-config-iterations = Iteration: { $count }...
|
||||
deck-config-reschedule-cards-on-change = Reschedule cards on change
|
||||
|
@ -426,7 +422,7 @@ deck-config-desired-retention-tooltip =
|
|||
values will greatly increase your workload, and lower values can be demoralizing when you forget
|
||||
a lot of material.
|
||||
deck-config-desired-retention-tooltip2 =
|
||||
The workload values provided by the tooltip are a rough approximation. For a greater level of accuracy, use the simulator.
|
||||
The workload values provided by the info box are a rough approximation. For a greater level of accuracy, use the simulator.
|
||||
deck-config-historical-retention-tooltip =
|
||||
When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will
|
||||
assume that when you did those old reviews, you remembered 90% of the material. If your old retention
|
||||
|
@ -468,12 +464,7 @@ deck-config-compute-optimal-weights-tooltip2 =
|
|||
By default, parameters will be calculated from the review history of all decks using the current preset. You can
|
||||
optionally adjust the search before calculating the parameters, if you'd like to alter which cards are used for
|
||||
optimizing the parameters.
|
||||
deck-config-compute-optimal-retention-tooltip4 =
|
||||
This tool will attempt to find the desired retention value
|
||||
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
|
||||
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if you’re
|
||||
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
|
||||
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
|
||||
|
||||
deck-config-please-save-your-changes-first = Please save your changes first.
|
||||
deck-config-workload-factor-change = Approximate workload: {$factor}x
|
||||
(compared to {$previousDR}% desired retention)
|
||||
|
@ -505,7 +496,10 @@ deck-config-desired-retention-below-optimal = Your desired retention is below op
|
|||
# Description of the y axis in the FSRS simulation
|
||||
# diagram (Deck options -> FSRS) showing the total number of
|
||||
# cards that can be recalled or retrieved on a specific date.
|
||||
deck-config-fsrs-simulator-experimental = FSRS simulator (experimental)
|
||||
deck-config-fsrs-simulator-experimental = FSRS Simulator (Experimental)
|
||||
deck-config-fsrs-simulate-desired-retention-experimental = FSRS Desired Retention Simulator (Experimental)
|
||||
deck-config-fsrs-simulate-save-preset = After optimizing, please save your deck preset before running the simulator.
|
||||
deck-config-fsrs-desired-retention-help-me-decide-experimental = Help Me Decide (Experimental)
|
||||
deck-config-additional-new-cards-to-simulate = Additional new cards to simulate
|
||||
deck-config-simulate = Simulate
|
||||
deck-config-clear-last-simulate = Clear Last Simulation
|
||||
|
@ -514,10 +508,14 @@ deck-config-advanced-settings = Advanced Settings
|
|||
deck-config-smooth-graph = Smooth graph
|
||||
deck-config-suspend-leeches = Suspend leeches
|
||||
deck-config-save-options-to-preset = Save Changes to Preset
|
||||
deck-config-save-options-to-preset-confirm = Overwrite the options in your current preset with the options that are currently set in the simulator?
|
||||
# Radio button in the FSRS simulation diagram (Deck options -> FSRS) selecting
|
||||
# to show the total number of cards that can be recalled or retrieved on a
|
||||
# specific date.
|
||||
deck-config-fsrs-simulator-radio-memorized = Memorized
|
||||
deck-config-fsrs-simulator-radio-ratio = Time / Memorized Ratio
|
||||
# $time here is pre-formatted e.g. "10 Seconds"
|
||||
deck-config-fsrs-simulator-ratio-tooltip = { $time } per memorized card
|
||||
|
||||
## Messages related to the FSRS scheduler’s health check. The health check determines whether the correlation between FSRS predictions and your memory is good or bad. It can be optionally triggered as part of the "Optimize" function.
|
||||
|
||||
|
@ -527,7 +525,7 @@ deck-config-health-check = Check health when optimizing
|
|||
deck-config-fsrs-bad-fit-warning = Health Check:
|
||||
Your memory is difficult for FSRS to predict. Recommendations:
|
||||
|
||||
- Suspend or reformulate leeches.
|
||||
- Suspend or reformulate any cards you constantly forget.
|
||||
- Use the answer buttons consistently. Keep in mind that "Hard" is a passing grade, not a failing grade.
|
||||
- Understand before you memorize.
|
||||
|
||||
|
@ -538,6 +536,17 @@ deck-config-fsrs-good-fit = Health Check:
|
|||
|
||||
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
|
||||
|
||||
deck-config-unable-to-determine-desired-retention =
|
||||
Unable to determine a minimum recommended retention.
|
||||
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
|
||||
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
|
||||
deck-config-compute-optimal-retention-tooltip4 =
|
||||
This tool will attempt to find the desired retention value
|
||||
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
|
||||
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if you’re
|
||||
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
|
||||
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
|
||||
deck-config-plotted-on-x-axis = (Plotted on the X-axis)
|
||||
deck-config-a-100-day-interval =
|
||||
{ $days ->
|
||||
[one] A 100 day interval will become { $days } day.
|
||||
|
|
|
@ -48,6 +48,7 @@ importing-merge-notetypes-help =
|
|||
Warning: This will require a one-way sync, and may mark existing notes as modified.
|
||||
importing-mnemosyne-20-deck-db = Mnemosyne 2.0 Deck (*.db)
|
||||
importing-multicharacter-separators-are-not-supported-please = Multi-character separators are not supported. Please enter one character only.
|
||||
importing-new-deck-will-be-created = A new deck will be created: { $name }
|
||||
importing-notes-added-from-file = Notes added from file: { $val }
|
||||
importing-notes-found-in-file = Notes found in file: { $val }
|
||||
importing-notes-skipped-as-theyre-already-in = Notes skipped, as up-to-date copies are already in your collection: { $val }
|
||||
|
|
|
@ -34,7 +34,7 @@ preferences-when-adding-default-to-current-deck = When adding, default to curren
|
|||
preferences-you-can-restore-backups-via-fileswitch = You can restore backups via File > Switch Profile.
|
||||
preferences-legacy-timezone-handling = Legacy timezone handling (buggy, but required for AnkiDroid <= 2.14)
|
||||
preferences-default-search-text = Default search text
|
||||
preferences-default-search-text-example = eg. 'deck:current '
|
||||
preferences-default-search-text-example = e.g. "deck:current"
|
||||
preferences-theme = Theme
|
||||
preferences-theme-follow-system = Follow System
|
||||
preferences-theme-light = Light
|
||||
|
|
|
@ -80,7 +80,7 @@ statistics-reviews =
|
|||
# This fragment of the tooltip in the FSRS simulation
|
||||
# diagram (Deck options -> FSRS) shows the total number of
|
||||
# cards that can be recalled or retrieved on a specific date.
|
||||
statistics-memorized = {$memorized} memorized
|
||||
statistics-memorized = {$memorized} cards memorized
|
||||
statistics-today-title = Today
|
||||
statistics-today-again-count = Again count:
|
||||
statistics-today-type-counts = Learn: { $learnCount }, Review: { $reviewCount }, Relearn: { $relearnCount }, Filtered: { $filteredCount }
|
||||
|
@ -99,9 +99,9 @@ statistics-counts-relearning-cards = Relearning
|
|||
statistics-counts-title = Card Counts
|
||||
statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards
|
||||
|
||||
## True Retention represents your actual retention rate from past reviews, in
|
||||
## comparison to the "desired retention" parameter of FSRS, which forecasts
|
||||
## future retention. True Retention is the percentage of all reviewed cards
|
||||
## Retention represents your actual retention from past reviews, in
|
||||
## comparison to the "desired retention" setting of FSRS, which forecasts
|
||||
## future retention. Retention is the percentage of all reviewed cards
|
||||
## that were marked as "Hard," "Good," or "Easy" within a specific time period.
|
||||
##
|
||||
## Most of these strings are used as column / row headings in a table.
|
||||
|
@ -112,9 +112,9 @@ statistics-counts-separate-suspended-buried-cards = Separate suspended/buried ca
|
|||
## N.B. Stats cards may be very small on mobile devices and when the Stats
|
||||
## window is certain sizes.
|
||||
|
||||
statistics-true-retention-title = True Retention
|
||||
statistics-true-retention-title = Retention
|
||||
statistics-true-retention-subtitle = Pass rate of cards with an interval ≥ 1 day.
|
||||
statistics-true-retention-tooltip = If you are using FSRS, your true retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data.
|
||||
statistics-true-retention-tooltip = If you are using FSRS, your retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data.
|
||||
statistics-true-retention-range = Range
|
||||
statistics-true-retention-pass = Pass
|
||||
statistics-true-retention-fail = Fail
|
||||
|
|
|
@ -46,6 +46,20 @@ studying-type-answer-unknown-field = Type answer: unknown field { $val }
|
|||
studying-unbury = Unbury
|
||||
studying-what-would-you-like-to-unbury = What would you like to unbury?
|
||||
studying-you-havent-recorded-your-voice-yet = You haven't recorded your voice yet.
|
||||
studying-card-studied-in-minute =
|
||||
{ $cards ->
|
||||
[one] { $cards } card
|
||||
*[other] { $cards } cards
|
||||
} studied in
|
||||
{ $minutes ->
|
||||
[one] { $minutes } minute.
|
||||
*[other] { $minutes } minutes.
|
||||
}
|
||||
studying-question-time-elapsed = Question time elapsed
|
||||
studying-answer-time-elapsed = Answer time elapsed
|
||||
|
||||
## OBSOLETE; you do not need to translate this
|
||||
|
||||
studying-card-studied-in =
|
||||
{ $count ->
|
||||
[one] { $count } card studied in
|
||||
|
@ -56,5 +70,3 @@ studying-minute =
|
|||
[one] { $count } minute.
|
||||
*[other] { $count } minutes.
|
||||
}
|
||||
studying-question-time-elapsed = Question time elapsed
|
||||
studying-answer-time-elapsed = Answer time elapsed
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit f42461a6438cbe844150f543128d79a669bc4ef2
|
||||
Subproject commit fd5f984785ad07a0d3dbd893ee3d7e3671eaebd6
|
22
package.json
22
package.json
|
@ -19,8 +19,8 @@
|
|||
"@poppanator/sveltekit-svg": "^5.0.0",
|
||||
"@sqltools/formatter": "^1.2.2",
|
||||
"@sveltejs/adapter-static": "^3.0.0",
|
||||
"@sveltejs/kit": "^2.20.7",
|
||||
"@sveltejs/vite-plugin-svelte": "4.0.0",
|
||||
"@sveltejs/kit": "^2.22.2",
|
||||
"@sveltejs/vite-plugin-svelte": "5.1",
|
||||
"@types/bootstrap": "^5.0.12",
|
||||
"@types/codemirror": "^5.60.0",
|
||||
"@types/d3": "^7.0.0",
|
||||
|
@ -30,7 +30,7 @@
|
|||
"@types/jqueryui": "^1.12.13",
|
||||
"@types/lodash-es": "^4.17.4",
|
||||
"@types/marked": "^5.0.0",
|
||||
"@types/node": "^20",
|
||||
"@types/node": "^22",
|
||||
"@typescript-eslint/eslint-plugin": "^5.60.1",
|
||||
"@typescript-eslint/parser": "^5.60.1",
|
||||
"caniuse-lite": "^1.0.30001431",
|
||||
|
@ -48,16 +48,16 @@
|
|||
"prettier": "^3.4.2",
|
||||
"prettier-plugin-svelte": "^3.3.2",
|
||||
"sass": "<1.77",
|
||||
"svelte": "^5.17.3",
|
||||
"svelte-check": "^3.4.4",
|
||||
"svelte-preprocess": "^5.0.4",
|
||||
"svelte": "^5.34.9",
|
||||
"svelte-check": "^4.2.2",
|
||||
"svelte-preprocess": "^6.0.3",
|
||||
"svelte-preprocess-esbuild": "^3.0.1",
|
||||
"svgo": "^3.2.0",
|
||||
"tslib": "^2.0.3",
|
||||
"tsx": "^3.12.0",
|
||||
"tsx": "^4.8.1",
|
||||
"typescript": "^5.0.4",
|
||||
"vite": "5.4.19",
|
||||
"vitest": "^2"
|
||||
"vite": "6",
|
||||
"vitest": "^3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@bufbuild/protobuf": "^1.2.1",
|
||||
|
@ -81,7 +81,9 @@
|
|||
},
|
||||
"resolutions": {
|
||||
"canvas": "npm:empty-npm-package@1.0.0",
|
||||
"cookie": "0.7.0"
|
||||
"cookie": "0.7.0",
|
||||
"devalue": "^5.3.2",
|
||||
"vite": "6"
|
||||
},
|
||||
"browserslist": [
|
||||
"defaults",
|
||||
|
|
|
@ -51,6 +51,7 @@ message Card {
|
|||
optional FsrsMemoryState memory_state = 20;
|
||||
optional float desired_retention = 21;
|
||||
optional float decay = 22;
|
||||
optional int64 last_review_time_secs = 23;
|
||||
string custom_data = 19;
|
||||
}
|
||||
|
||||
|
|
|
@ -56,6 +56,7 @@ message ConfigKey {
|
|||
RENDER_LATEX = 25;
|
||||
LOAD_BALANCER_ENABLED = 26;
|
||||
FSRS_SHORT_TERM_WITH_STEPS_ENABLED = 27;
|
||||
FSRS_LEGACY_EVALUATE = 28;
|
||||
}
|
||||
enum String {
|
||||
SET_DUE_BROWSER = 0;
|
||||
|
|
|
@ -40,12 +40,10 @@ message DeckConfigId {
|
|||
message GetRetentionWorkloadRequest {
|
||||
repeated float w = 1;
|
||||
string search = 2;
|
||||
float before = 3;
|
||||
float after = 4;
|
||||
}
|
||||
|
||||
message GetRetentionWorkloadResponse {
|
||||
float factor = 1;
|
||||
map<uint32, float> costs = 1;
|
||||
}
|
||||
|
||||
message GetIgnoredBeforeCountRequest {
|
||||
|
@ -219,6 +217,8 @@ message DeckConfigsForUpdate {
|
|||
bool review_today_active = 5;
|
||||
// Whether new_today applies to today or a past day.
|
||||
bool new_today_active = 6;
|
||||
// Deck-specific desired retention override
|
||||
optional float desired_retention = 7;
|
||||
}
|
||||
string name = 1;
|
||||
int64 config_id = 2;
|
||||
|
@ -236,6 +236,7 @@ message DeckConfigsForUpdate {
|
|||
bool new_cards_ignore_review_limit = 7;
|
||||
bool fsrs = 8;
|
||||
bool fsrs_health_check = 11;
|
||||
bool fsrs_legacy_evaluate = 12;
|
||||
bool apply_all_parent_limits = 9;
|
||||
uint32 days_since_last_fsrs_optimize = 10;
|
||||
}
|
||||
|
|
|
@ -83,6 +83,8 @@ message Deck {
|
|||
optional uint32 new_limit = 7;
|
||||
DayLimit review_limit_today = 8;
|
||||
DayLimit new_limit_today = 9;
|
||||
// Deck-specific desired retention override
|
||||
optional float desired_retention = 10;
|
||||
|
||||
reserved 12 to 15;
|
||||
}
|
||||
|
|
|
@ -27,6 +27,9 @@ service FrontendService {
|
|||
rpc deckOptionsRequireClose(generic.Empty) returns (generic.Empty);
|
||||
// Warns python that the deck option web view is ready to receive requests.
|
||||
rpc deckOptionsReady(generic.Empty) returns (generic.Empty);
|
||||
|
||||
// Save colour picker's custom colour palette
|
||||
rpc SaveCustomColours(generic.Empty) returns (generic.Empty);
|
||||
}
|
||||
|
||||
service BackendFrontendService {}
|
||||
|
|
|
@ -176,9 +176,12 @@ message CsvMetadata {
|
|||
// to determine the number of columns.
|
||||
repeated string column_labels = 5;
|
||||
oneof deck {
|
||||
// id of an existing deck
|
||||
int64 deck_id = 6;
|
||||
// One-based. 0 means n/a.
|
||||
uint32 deck_column = 7;
|
||||
// name of new deck to be created
|
||||
string deck_name = 17;
|
||||
}
|
||||
oneof notetype {
|
||||
// One notetype for all rows with given column mapping.
|
||||
|
|
|
@ -59,7 +59,7 @@ message AddNoteRequest {
|
|||
}
|
||||
|
||||
message AddNoteResponse {
|
||||
collection.OpChanges changes = 1;
|
||||
collection.OpChangesWithCount changes = 1;
|
||||
int64 note_id = 2;
|
||||
}
|
||||
|
||||
|
|
|
@ -55,7 +55,11 @@ service SchedulerService {
|
|||
returns (ComputeOptimalRetentionResponse);
|
||||
rpc SimulateFsrsReview(SimulateFsrsReviewRequest)
|
||||
returns (SimulateFsrsReviewResponse);
|
||||
rpc SimulateFsrsWorkload(SimulateFsrsReviewRequest)
|
||||
returns (SimulateFsrsWorkloadResponse);
|
||||
rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse);
|
||||
rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest)
|
||||
returns (EvaluateParamsResponse);
|
||||
rpc ComputeMemoryState(cards.CardId) returns (ComputeMemoryStateResponse);
|
||||
// The number of days the calculated interval was fuzzed by on the previous
|
||||
// review (if any). Utilized by the FSRS add-on.
|
||||
|
@ -402,31 +406,9 @@ message SimulateFsrsReviewRequest {
|
|||
repeated float easy_days_percentages = 10;
|
||||
deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11;
|
||||
optional uint32 suspend_after_lapse_count = 12;
|
||||
// For CMRR
|
||||
message CMRRTarget {
|
||||
message Memorized {
|
||||
float loss_aversion = 1;
|
||||
};
|
||||
|
||||
message Stability {};
|
||||
|
||||
message FutureMemorized {
|
||||
int32 days = 1;
|
||||
};
|
||||
|
||||
message AverageFutureMemorized {
|
||||
int32 days = 1;
|
||||
};
|
||||
|
||||
oneof kind {
|
||||
Memorized memorized = 1;
|
||||
Stability stability = 2;
|
||||
FutureMemorized future_memorized = 3;
|
||||
AverageFutureMemorized average_future_memorized = 4;
|
||||
};
|
||||
};
|
||||
|
||||
optional CMRRTarget target = 13;
|
||||
float historical_retention = 13;
|
||||
uint32 learning_step_count = 14;
|
||||
uint32 relearning_step_count = 15;
|
||||
}
|
||||
|
||||
message SimulateFsrsReviewResponse {
|
||||
|
@ -436,6 +418,12 @@ message SimulateFsrsReviewResponse {
|
|||
repeated float daily_time_cost = 4;
|
||||
}
|
||||
|
||||
message SimulateFsrsWorkloadResponse {
|
||||
map<uint32, float> cost = 1;
|
||||
map<uint32, float> memorized = 2;
|
||||
map<uint32, uint32> review_count = 3;
|
||||
}
|
||||
|
||||
message ComputeOptimalRetentionResponse {
|
||||
float optimal_retention = 1;
|
||||
}
|
||||
|
@ -467,6 +455,12 @@ message EvaluateParamsRequest {
|
|||
uint32 num_of_relearning_steps = 3;
|
||||
}
|
||||
|
||||
message EvaluateParamsLegacyRequest {
|
||||
repeated float params = 1;
|
||||
string search = 2;
|
||||
int64 ignore_revlogs_before_ms = 3;
|
||||
}
|
||||
|
||||
message EvaluateParamsResponse {
|
||||
float log_loss = 1;
|
||||
float rmse_bins = 2;
|
||||
|
|
|
@ -74,10 +74,15 @@ message SearchNode {
|
|||
repeated SearchNode nodes = 1;
|
||||
Joiner joiner = 2;
|
||||
}
|
||||
enum FieldSearchMode {
|
||||
FIELD_SEARCH_MODE_NORMAL = 0;
|
||||
FIELD_SEARCH_MODE_REGEX = 1;
|
||||
FIELD_SEARCH_MODE_NOCOMBINING = 2;
|
||||
}
|
||||
message Field {
|
||||
string field_name = 1;
|
||||
string text = 2;
|
||||
bool is_re = 3;
|
||||
FieldSearchMode mode = 3;
|
||||
}
|
||||
|
||||
oneof filter {
|
||||
|
|
|
@ -246,7 +246,7 @@ def backend_exception_to_pylib(err: backend_pb2.BackendError) -> Exception:
|
|||
return BackendError(err.message, help_page, context, backtrace)
|
||||
|
||||
elif val == kind.SEARCH_ERROR:
|
||||
return SearchError(markdown(err.message), help_page, context, backtrace)
|
||||
return SearchError(err.message, help_page, context, backtrace)
|
||||
|
||||
elif val == kind.UNDO_EMPTY:
|
||||
return UndoEmpty(err.message, help_page, context, backtrace)
|
||||
|
|
|
@ -49,6 +49,7 @@ class Card(DeprecatedNamesMixin):
|
|||
memory_state: FSRSMemoryState | None
|
||||
desired_retention: float | None
|
||||
decay: float | None
|
||||
last_review_time: int | None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -103,6 +104,11 @@ class Card(DeprecatedNamesMixin):
|
|||
card.desired_retention if card.HasField("desired_retention") else None
|
||||
)
|
||||
self.decay = card.decay if card.HasField("decay") else None
|
||||
self.last_review_time = (
|
||||
card.last_review_time_secs
|
||||
if card.HasField("last_review_time_secs")
|
||||
else None
|
||||
)
|
||||
|
||||
def _to_backend_card(self) -> cards_pb2.Card:
|
||||
# mtime & usn are set by backend
|
||||
|
@ -127,6 +133,7 @@ class Card(DeprecatedNamesMixin):
|
|||
memory_state=self.memory_state,
|
||||
desired_retention=self.desired_retention,
|
||||
decay=self.decay,
|
||||
last_review_time_secs=self.last_review_time,
|
||||
)
|
||||
|
||||
@deprecated(info="please use col.update_card()")
|
||||
|
|
|
@ -528,7 +528,7 @@ class Collection(DeprecatedNamesMixin):
|
|||
def new_note(self, notetype: NotetypeDict) -> Note:
|
||||
return Note(self, notetype)
|
||||
|
||||
def add_note(self, note: Note, deck_id: DeckId) -> OpChanges:
|
||||
def add_note(self, note: Note, deck_id: DeckId) -> OpChangesWithCount:
|
||||
hooks.note_will_be_added(self, note, deck_id)
|
||||
out = self._backend.add_note(note=note._to_backend_note(), deck_id=deck_id)
|
||||
note.id = NoteId(out.note_id)
|
||||
|
|
|
@ -175,8 +175,8 @@ class MnemoFact:
|
|||
def fact_view(self) -> type[MnemoFactView]:
|
||||
try:
|
||||
fact_view = self.cards[0].fact_view_id
|
||||
except IndexError as err:
|
||||
raise Exception(f"Fact {id} has no cards") from err
|
||||
except IndexError:
|
||||
return FrontOnly
|
||||
|
||||
if fact_view.startswith("1.") or fact_view.startswith("1::"):
|
||||
return FrontOnly
|
||||
|
@ -187,7 +187,7 @@ class MnemoFact:
|
|||
elif fact_view.startswith("5.1"):
|
||||
return Cloze
|
||||
|
||||
raise Exception(f"Fact {id} has unknown fact view: {fact_view}")
|
||||
raise Exception(f"Fact {self.id} has unknown fact view: {fact_view}")
|
||||
|
||||
def anki_fields(self, fact_view: type[MnemoFactView]) -> list[str]:
|
||||
return [munge_field(self.fields.get(k, "")) for k in fact_view.field_keys]
|
||||
|
|
|
@ -18,7 +18,7 @@ from anki._legacy import DeprecatedNamesMixinForModule
|
|||
TR = anki._fluent.LegacyTranslationEnum
|
||||
FormatTimeSpan = _pb.FormatTimespanRequest
|
||||
|
||||
|
||||
# When adding new languages here, check lang_to_disk_lang() below
|
||||
langs = sorted(
|
||||
[
|
||||
("Afrikaans", "af_ZA"),
|
||||
|
@ -38,6 +38,7 @@ langs = sorted(
|
|||
("Italiano", "it_IT"),
|
||||
("lo jbobau", "jbo_EN"),
|
||||
("Lenga d'òc", "oc_FR"),
|
||||
("Қазақша", "kk_KZ"),
|
||||
("Magyar", "hu_HU"),
|
||||
("Nederlands", "nl_NL"),
|
||||
("Norsk", "nb_NO"),
|
||||
|
@ -64,6 +65,7 @@ langs = sorted(
|
|||
("Українська мова", "uk_UA"),
|
||||
("Հայերեն", "hy_AM"),
|
||||
("עִבְרִית", "he_IL"),
|
||||
("ייִדיש", "yi"),
|
||||
("العربية", "ar_SA"),
|
||||
("فارسی", "fa_IR"),
|
||||
("ภาษาไทย", "th_TH"),
|
||||
|
@ -73,6 +75,7 @@ langs = sorted(
|
|||
("ଓଡ଼ିଆ", "or_OR"),
|
||||
("Filipino", "tl"),
|
||||
("ئۇيغۇر", "ug"),
|
||||
("Oʻzbekcha", "uz_UZ"),
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -103,6 +106,7 @@ compatMap = {
|
|||
"it": "it_IT",
|
||||
"ja": "ja_JP",
|
||||
"jbo": "jbo_EN",
|
||||
"kk": "kk_KZ",
|
||||
"ko": "ko_KR",
|
||||
"la": "la_LA",
|
||||
"mn": "mn_MN",
|
||||
|
@ -123,7 +127,9 @@ compatMap = {
|
|||
"th": "th_TH",
|
||||
"tr": "tr_TR",
|
||||
"uk": "uk_UA",
|
||||
"uz": "uz_UZ",
|
||||
"vi": "vi_VN",
|
||||
"yi": "yi",
|
||||
}
|
||||
|
||||
|
||||
|
@ -231,7 +237,7 @@ def get_def_lang(user_lang: str | None = None) -> tuple[int, str]:
|
|||
|
||||
|
||||
def is_rtl(lang: str) -> bool:
|
||||
return lang in ("he", "ar", "fa", "ug")
|
||||
return lang in ("he", "ar", "fa", "ug", "yi")
|
||||
|
||||
|
||||
# strip off unicode isolation markers from a translated string
|
||||
|
|
|
@ -7,7 +7,7 @@ dependencies = [
|
|||
"decorator",
|
||||
"markdown",
|
||||
"orjson",
|
||||
"protobuf>=4.21",
|
||||
"protobuf>=6.0,<8.0",
|
||||
"requests[socks]",
|
||||
# remove after we update to min python 3.11+
|
||||
"typing_extensions",
|
||||
|
|
|
@ -66,13 +66,14 @@ def show(mw: aqt.AnkiQt) -> QDialog:
|
|||
# WebView contents
|
||||
######################################################################
|
||||
abouttext = "<center><img src='/_anki/imgs/anki-logo-thin.png'></center>"
|
||||
abouttext += f"<p>{tr.about_anki_is_a_friendly_intelligent_spaced()}"
|
||||
lede = tr.about_anki_is_a_friendly_intelligent_spaced().replace("Anki", "Anki®")
|
||||
abouttext += f"<p>{lede}"
|
||||
abouttext += f"<p>{tr.about_anki_is_licensed_under_the_agpl3()}"
|
||||
abouttext += f"<p>{tr.about_version(val=version_with_build())}<br>"
|
||||
abouttext += ("Python %s Qt %s PyQt %s<br>") % (
|
||||
abouttext += ("Python %s Qt %s Chromium %s<br>") % (
|
||||
platform.python_version(),
|
||||
qVersion(),
|
||||
PYQT_VERSION_STR,
|
||||
(qWebEngineChromiumVersion() or "").split(".")[0],
|
||||
)
|
||||
abouttext += (
|
||||
without_unicode_isolation(tr.about_visit_website(val=aqt.appWebsite))
|
||||
|
@ -223,6 +224,9 @@ def show(mw: aqt.AnkiQt) -> QDialog:
|
|||
"Mukunda Madhav Dey",
|
||||
"Adnane Taghi",
|
||||
"Anon_0000",
|
||||
"Bilolbek Normuminov",
|
||||
"Sagiv Marzini",
|
||||
"Zhanibek Rassululy",
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ from collections.abc import Callable
|
|||
import aqt.editor
|
||||
import aqt.forms
|
||||
from anki._legacy import deprecated
|
||||
from anki.collection import OpChanges, SearchNode
|
||||
from anki.collection import OpChanges, OpChangesWithCount, SearchNode
|
||||
from anki.decks import DeckId
|
||||
from anki.models import NotetypeId
|
||||
from anki.notes import Note, NoteFieldsCheckResult, NoteId
|
||||
|
@ -294,13 +294,13 @@ class AddCards(QMainWindow):
|
|||
|
||||
target_deck_id = self.deck_chooser.selected_deck_id
|
||||
|
||||
def on_success(changes: OpChanges) -> None:
|
||||
def on_success(changes: OpChangesWithCount) -> None:
|
||||
# only used for detecting changed sticky fields on close
|
||||
self._last_added_note = note
|
||||
|
||||
self.addHistory(note)
|
||||
|
||||
tooltip(tr.adding_added(), period=500)
|
||||
tooltip(tr.importing_cards_added(count=changes.count), period=500)
|
||||
av_player.stop_and_clear_queue()
|
||||
self._load_new_note(sticky_fields_from=note)
|
||||
gui_hooks.add_cards_did_add_note(note)
|
||||
|
|
|
@ -10,6 +10,8 @@ import re
|
|||
from collections.abc import Callable, Sequence
|
||||
from typing import Any, cast
|
||||
|
||||
from markdown import markdown
|
||||
|
||||
import aqt
|
||||
import aqt.browser
|
||||
import aqt.editor
|
||||
|
@ -20,7 +22,7 @@ from anki.cards import Card, CardId
|
|||
from anki.collection import Collection, Config, OpChanges, SearchNode
|
||||
from anki.consts import *
|
||||
from anki.decks import DeckId
|
||||
from anki.errors import NotFoundError
|
||||
from anki.errors import NotFoundError, SearchError
|
||||
from anki.lang import without_unicode_isolation
|
||||
from anki.models import NotetypeId
|
||||
from anki.notes import NoteId
|
||||
|
@ -498,6 +500,8 @@ class Browser(QMainWindow):
|
|||
text = self.current_search()
|
||||
try:
|
||||
normed = self.col.build_search_string(text)
|
||||
except SearchError as err:
|
||||
showWarning(markdown(str(err)))
|
||||
except Exception as err:
|
||||
showWarning(str(err))
|
||||
else:
|
||||
|
|
|
@ -51,6 +51,7 @@ class CardInfoDialog(QDialog):
|
|||
|
||||
def _setup_ui(self, card_id: CardId | None) -> None:
|
||||
self.mw.garbage_collect_on_dialog_finish(self)
|
||||
self.setMinimumSize(400, 300)
|
||||
disable_help_button(self)
|
||||
restoreGeom(self, self.GEOMETRY_KEY, default_size=(800, 800))
|
||||
add_close_shortcut(self)
|
||||
|
|
|
@ -13,7 +13,7 @@ import aqt.browser
|
|||
from anki.cards import Card
|
||||
from anki.collection import Config
|
||||
from anki.tags import MARKED_TAG
|
||||
from aqt import AnkiQt, gui_hooks
|
||||
from aqt import AnkiQt, gui_hooks, is_mac
|
||||
from aqt.qt import (
|
||||
QCheckBox,
|
||||
QDialog,
|
||||
|
@ -81,10 +81,15 @@ class Previewer(QDialog):
|
|||
qconnect(self.finished, self._on_finished)
|
||||
self.silentlyClose = True
|
||||
self.vbox = QVBoxLayout()
|
||||
spacing = 6
|
||||
self.vbox.setContentsMargins(0, 0, 0, 0)
|
||||
self.vbox.setSpacing(spacing)
|
||||
self._web: AnkiWebView | None = AnkiWebView(kind=AnkiWebViewKind.PREVIEWER)
|
||||
self.vbox.addWidget(self._web)
|
||||
self.bbox = QDialogButtonBox()
|
||||
self.bbox.setContentsMargins(
|
||||
spacing, spacing if is_mac else 0, spacing, spacing
|
||||
)
|
||||
self.bbox.setLayoutDirection(Qt.LayoutDirection.LeftToRight)
|
||||
|
||||
gui_hooks.card_review_webview_did_init(self._web, AnkiWebViewKind.PREVIEWER)
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 727 B |
27
qt/aqt/data/qt/icons/media-record.svg
Normal file
27
qt/aqt/data/qt/icons/media-record.svg
Normal file
|
@ -0,0 +1,27 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="21" height="21" viewBox="0 0 21 21" version="1.1"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<g id="Layer-1" transform="translate(0.5,0.5)">
|
||||
<rect x="0" y="0" width="20" height="20" fill="none"/>
|
||||
<g transform="translate(14.8974,6.3648)">
|
||||
<path d="M0,0C0,3.403 -2.042,6.161 -4.56,6.161C-7.078,6.161 -9.12,3.403 -9.12,0C-9.12,-3.403 -7.078,-6.161 -4.56,-6.161C-2.042,-6.161 0,-3.403 0,0"
|
||||
fill="black" fill-rule="nonzero"/>
|
||||
</g>
|
||||
<g transform="matrix(0,-1,-1,0,10.3374,1.8048)">
|
||||
<ellipse cx="-4.56" cy="0" rx="6.161" ry="4.56"
|
||||
fill="none" stroke="black" stroke-width="0.25"/>
|
||||
</g>
|
||||
<g transform="translate(3.1987,14.4958)">
|
||||
<path d="M0,-9.484C-0.76,-4.212 3.287,0 7.12,-0.046C10.864,-0.09 14.742,-4.199 14.076,-9.343"
|
||||
fill="none" stroke="black" stroke-width="2" fill-rule="nonzero"/>
|
||||
</g>
|
||||
<g transform="matrix(-1,0,0,1,20.573,18.613)">
|
||||
<rect x="5.387" y="0.601" width="9.799" height="0.185"
|
||||
fill="none" stroke="black" stroke-width="2"/>
|
||||
</g>
|
||||
<g transform="matrix(-1,0,0,1,20.741,13.51)">
|
||||
<rect x="9.899" y="1.163" width="0.943" height="4.164"
|
||||
fill="none" stroke="black" stroke-width="2"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.3 KiB |
|
@ -151,6 +151,7 @@ class Editor:
|
|||
self.add_webview()
|
||||
self.setupWeb()
|
||||
self.setupShortcuts()
|
||||
self.setupColourPalette()
|
||||
gui_hooks.editor_did_init(self)
|
||||
|
||||
# Initial setup
|
||||
|
@ -349,6 +350,14 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too
|
|||
keys, fn, _ = row
|
||||
QShortcut(QKeySequence(keys), self.widget, activated=fn) # type: ignore
|
||||
|
||||
def setupColourPalette(self) -> None:
|
||||
if not (colors := self.mw.col.get_config("customColorPickerPalette")):
|
||||
return
|
||||
for i, colour in enumerate(colors[: QColorDialog.customCount()]):
|
||||
if not QColor.isValidColorName(colour):
|
||||
continue
|
||||
QColorDialog.setCustomColor(i, QColor.fromString(colour))
|
||||
|
||||
def _addFocusCheck(self, fn: Callable) -> Callable:
|
||||
def checkFocus() -> None:
|
||||
if self.currentField is None:
|
||||
|
|
|
@ -23,25 +23,36 @@ from aqt.utils import openHelp, showWarning, supportText, tooltip, tr
|
|||
if TYPE_CHECKING:
|
||||
from aqt.main import AnkiQt
|
||||
|
||||
# so we can be non-modal/non-blocking, without Python deallocating the message
|
||||
# box ahead of time
|
||||
_mbox: QMessageBox | None = None
|
||||
|
||||
|
||||
def show_exception(*, parent: QWidget, exception: Exception) -> None:
|
||||
"Present a caught exception to the user using a pop-up."
|
||||
if isinstance(exception, Interrupted):
|
||||
# nothing to do
|
||||
return
|
||||
global _mbox
|
||||
error_lines = []
|
||||
help_page = HelpPage.TROUBLESHOOTING
|
||||
if isinstance(exception, BackendError):
|
||||
if exception.context:
|
||||
print(exception.context)
|
||||
error_lines.append(exception.context)
|
||||
if exception.backtrace:
|
||||
print(exception.backtrace)
|
||||
showWarning(str(exception), parent=parent, help=exception.help_page)
|
||||
error_lines.append(exception.backtrace)
|
||||
if exception.help_page is not None:
|
||||
help_page = exception.help_page
|
||||
else:
|
||||
# if the error is not originating from the backend, dump
|
||||
# a traceback to the console to aid in debugging
|
||||
traceback.print_exception(
|
||||
None, exception, exception.__traceback__, file=sys.stdout
|
||||
error_lines = traceback.format_exception(
|
||||
None, exception, exception.__traceback__
|
||||
)
|
||||
showWarning(str(exception), parent=parent)
|
||||
error_text = "\n".join(error_lines)
|
||||
print(error_lines)
|
||||
_mbox = _init_message_box(str(exception), error_text, help_page)
|
||||
_mbox.show()
|
||||
|
||||
|
||||
def is_chromium_cert_error(error: str) -> bool:
|
||||
|
@ -158,9 +169,39 @@ if not os.environ.get("DEBUG"):
|
|||
|
||||
sys.excepthook = excepthook
|
||||
|
||||
# so we can be non-modal/non-blocking, without Python deallocating the message
|
||||
# box ahead of time
|
||||
_mbox: QMessageBox | None = None
|
||||
|
||||
def _init_message_box(
|
||||
user_text: str, debug_text: str, help_page=HelpPage.TROUBLESHOOTING
|
||||
):
|
||||
global _mbox
|
||||
|
||||
_mbox = QMessageBox()
|
||||
_mbox.setWindowTitle("Anki")
|
||||
_mbox.setText(user_text)
|
||||
_mbox.setIcon(QMessageBox.Icon.Warning)
|
||||
_mbox.setTextFormat(Qt.TextFormat.PlainText)
|
||||
|
||||
def show_help():
|
||||
openHelp(help_page)
|
||||
|
||||
def copy_debug_info():
|
||||
QApplication.clipboard().setText(debug_text)
|
||||
tooltip(tr.errors_copied_to_clipboard(), parent=_mbox)
|
||||
|
||||
help = _mbox.addButton(QMessageBox.StandardButton.Help)
|
||||
if debug_text:
|
||||
debug_info = _mbox.addButton(
|
||||
tr.errors_copy_debug_info_button(), QMessageBox.ButtonRole.ActionRole
|
||||
)
|
||||
debug_info.disconnect()
|
||||
debug_info.clicked.connect(copy_debug_info)
|
||||
cancel = _mbox.addButton(QMessageBox.StandardButton.Cancel)
|
||||
cancel.setText(tr.actions_close())
|
||||
|
||||
help.disconnect()
|
||||
help.clicked.connect(show_help)
|
||||
|
||||
return _mbox
|
||||
|
||||
|
||||
class ErrorHandler(QObject):
|
||||
|
@ -252,33 +293,7 @@ class ErrorHandler(QObject):
|
|||
user_text += "\n\n" + self._addonText(error)
|
||||
debug_text += addon_debug_info()
|
||||
|
||||
def show_troubleshooting():
|
||||
openHelp(HelpPage.TROUBLESHOOTING)
|
||||
|
||||
def copy_debug_info():
|
||||
QApplication.clipboard().setText(debug_text)
|
||||
tooltip(tr.errors_copied_to_clipboard(), parent=_mbox)
|
||||
|
||||
global _mbox
|
||||
_mbox = QMessageBox()
|
||||
_mbox.setWindowTitle("Anki")
|
||||
_mbox.setText(user_text)
|
||||
_mbox.setIcon(QMessageBox.Icon.Warning)
|
||||
_mbox.setTextFormat(Qt.TextFormat.PlainText)
|
||||
|
||||
troubleshooting = _mbox.addButton(
|
||||
tr.errors_troubleshooting_button(), QMessageBox.ButtonRole.ActionRole
|
||||
)
|
||||
debug_info = _mbox.addButton(
|
||||
tr.errors_copy_debug_info_button(), QMessageBox.ButtonRole.ActionRole
|
||||
)
|
||||
cancel = _mbox.addButton(QMessageBox.StandardButton.Cancel)
|
||||
cancel.setText(tr.actions_close())
|
||||
|
||||
troubleshooting.disconnect()
|
||||
troubleshooting.clicked.connect(show_troubleshooting)
|
||||
debug_info.disconnect()
|
||||
debug_info.clicked.connect(copy_debug_info)
|
||||
_mbox = _init_message_box(user_text, debug_text)
|
||||
|
||||
if self.fatal_error_encountered:
|
||||
_mbox.exec()
|
||||
|
|
|
@ -1292,9 +1292,10 @@
|
|||
<tabstop>daily_backups</tabstop>
|
||||
<tabstop>weekly_backups</tabstop>
|
||||
<tabstop>monthly_backups</tabstop>
|
||||
<tabstop>tabWidget</tabstop>
|
||||
<tabstop>syncAnkiHubLogout</tabstop>
|
||||
<tabstop>syncAnkiHubLogin</tabstop>
|
||||
<tabstop>buttonBox</tabstop>
|
||||
<tabstop>tabWidget</tabstop>
|
||||
</tabstops>
|
||||
<resources/>
|
||||
<connections>
|
||||
|
|
|
@ -1309,7 +1309,7 @@ title="{}" {}>{}</button>""".format(
|
|||
if not askUser(tr.qt_misc_open_anki_launcher()):
|
||||
return
|
||||
|
||||
from aqt.update import update_and_restart
|
||||
from aqt.package import update_and_restart
|
||||
|
||||
update_and_restart()
|
||||
|
||||
|
@ -1394,7 +1394,7 @@ title="{}" {}>{}</button>""".format(
|
|||
##########################################################################
|
||||
|
||||
def setupMenus(self) -> None:
|
||||
from aqt.update import have_launcher
|
||||
from aqt.package import launcher_executable
|
||||
|
||||
m = self.form
|
||||
|
||||
|
@ -1426,7 +1426,7 @@ title="{}" {}>{}</button>""".format(
|
|||
qconnect(m.actionEmptyCards.triggered, self.onEmptyCards)
|
||||
qconnect(m.actionNoteTypes.triggered, self.onNoteTypes)
|
||||
qconnect(m.action_upgrade_downgrade.triggered, self.on_upgrade_downgrade)
|
||||
if not have_launcher():
|
||||
if not launcher_executable():
|
||||
m.action_upgrade_downgrade.setVisible(False)
|
||||
qconnect(m.actionPreferences.triggered, self.onPrefs)
|
||||
|
||||
|
|
|
@ -170,13 +170,42 @@ def favicon() -> Response:
|
|||
|
||||
def _mime_for_path(path: str) -> str:
|
||||
"Mime type for provided path/filename."
|
||||
if path.endswith(".css"):
|
||||
# some users may have invalid mime type in the Windows registry
|
||||
return "text/css"
|
||||
elif path.endswith(".js") or path.endswith(".mjs"):
|
||||
return "application/javascript"
|
||||
|
||||
_, ext = os.path.splitext(path)
|
||||
ext = ext.lower()
|
||||
|
||||
# Badly-behaved apps on Windows can alter the standard mime types in the registry, which can completely
|
||||
# break Anki's UI. So we hard-code the most common extensions.
|
||||
mime_types = {
|
||||
".css": "text/css",
|
||||
".js": "application/javascript",
|
||||
".mjs": "application/javascript",
|
||||
".html": "text/html",
|
||||
".htm": "text/html",
|
||||
".svg": "image/svg+xml",
|
||||
".png": "image/png",
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".gif": "image/gif",
|
||||
".webp": "image/webp",
|
||||
".ico": "image/x-icon",
|
||||
".json": "application/json",
|
||||
".woff": "font/woff",
|
||||
".woff2": "font/woff2",
|
||||
".ttf": "font/ttf",
|
||||
".otf": "font/otf",
|
||||
".mp3": "audio/mpeg",
|
||||
".mp4": "video/mp4",
|
||||
".webm": "video/webm",
|
||||
".ogg": "audio/ogg",
|
||||
".pdf": "application/pdf",
|
||||
".txt": "text/plain",
|
||||
}
|
||||
|
||||
if mime := mime_types.get(ext):
|
||||
return mime
|
||||
else:
|
||||
# autodetect
|
||||
# fallback to mimetypes, which may consult the registry
|
||||
mime, _encoding = mimetypes.guess_type(path)
|
||||
return mime or "application/octet-stream"
|
||||
|
||||
|
@ -483,7 +512,7 @@ def update_deck_configs() -> bytes:
|
|||
update.abort = True
|
||||
|
||||
def on_success(changes: OpChanges) -> None:
|
||||
if isinstance(window := aqt.mw.app.activeWindow(), DeckOptionsDialog):
|
||||
if isinstance(window := aqt.mw.app.activeModalWidget(), DeckOptionsDialog):
|
||||
window.reject()
|
||||
|
||||
def handle_on_main() -> None:
|
||||
|
@ -511,7 +540,7 @@ def set_scheduling_states() -> bytes:
|
|||
|
||||
def import_done() -> bytes:
|
||||
def update_window_modality() -> None:
|
||||
if window := aqt.mw.app.activeWindow():
|
||||
if window := aqt.mw.app.activeModalWidget():
|
||||
from aqt.import_export.import_dialog import ImportDialog
|
||||
|
||||
if isinstance(window, ImportDialog):
|
||||
|
@ -529,7 +558,7 @@ def import_request(endpoint: str) -> bytes:
|
|||
response.ParseFromString(output)
|
||||
|
||||
def handle_on_main() -> None:
|
||||
window = aqt.mw.app.activeWindow()
|
||||
window = aqt.mw.app.activeModalWidget()
|
||||
on_op_finished(aqt.mw, response, window)
|
||||
|
||||
aqt.mw.taskman.run_on_main(handle_on_main)
|
||||
|
@ -569,7 +598,7 @@ def change_notetype() -> bytes:
|
|||
data = request.data
|
||||
|
||||
def handle_on_main() -> None:
|
||||
window = aqt.mw.app.activeWindow()
|
||||
window = aqt.mw.app.activeModalWidget()
|
||||
if isinstance(window, ChangeNotetypeDialog):
|
||||
window.save(data)
|
||||
|
||||
|
@ -579,7 +608,7 @@ def change_notetype() -> bytes:
|
|||
|
||||
def deck_options_require_close() -> bytes:
|
||||
def handle_on_main() -> None:
|
||||
window = aqt.mw.app.activeWindow()
|
||||
window = aqt.mw.app.activeModalWidget()
|
||||
if isinstance(window, DeckOptionsDialog):
|
||||
window.require_close()
|
||||
|
||||
|
@ -591,7 +620,7 @@ def deck_options_require_close() -> bytes:
|
|||
|
||||
def deck_options_ready() -> bytes:
|
||||
def handle_on_main() -> None:
|
||||
window = aqt.mw.app.activeWindow()
|
||||
window = aqt.mw.app.activeModalWidget()
|
||||
if isinstance(window, DeckOptionsDialog):
|
||||
window.set_ready()
|
||||
|
||||
|
@ -599,6 +628,15 @@ def deck_options_ready() -> bytes:
|
|||
return b""
|
||||
|
||||
|
||||
def save_custom_colours() -> bytes:
|
||||
colors = [
|
||||
QColorDialog.customColor(i).name(QColor.NameFormat.HexArgb)
|
||||
for i in range(QColorDialog.customCount())
|
||||
]
|
||||
aqt.mw.col.set_config("customColorPickerPalette", colors)
|
||||
return b""
|
||||
|
||||
|
||||
post_handler_list = [
|
||||
congrats_info,
|
||||
get_deck_configs_for_update,
|
||||
|
@ -614,6 +652,7 @@ post_handler_list = [
|
|||
search_in_browser,
|
||||
deck_options_require_close,
|
||||
deck_options_ready,
|
||||
save_custom_colours,
|
||||
]
|
||||
|
||||
|
||||
|
@ -651,9 +690,10 @@ exposed_backend_list = [
|
|||
"compute_fsrs_params",
|
||||
"compute_optimal_retention",
|
||||
"set_wants_abort",
|
||||
"evaluate_params",
|
||||
"evaluate_params_legacy",
|
||||
"get_optimal_retention_parameters",
|
||||
"simulate_fsrs_review",
|
||||
"simulate_fsrs_workload",
|
||||
# DeckConfigService
|
||||
"get_ignored_before_count",
|
||||
"get_retention_workload",
|
||||
|
|
|
@ -18,7 +18,7 @@ def add_note(
|
|||
parent: QWidget,
|
||||
note: Note,
|
||||
target_deck_id: DeckId,
|
||||
) -> CollectionOp[OpChanges]:
|
||||
) -> CollectionOp[OpChangesWithCount]:
|
||||
return CollectionOp(parent, lambda col: col.add_note(note, target_deck_id))
|
||||
|
||||
|
||||
|
|
|
@ -5,10 +5,13 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from anki.utils import is_mac
|
||||
from anki.utils import is_mac, is_win
|
||||
|
||||
|
||||
# ruff: noqa: F401
|
||||
|
@ -65,3 +68,106 @@ def first_run_setup() -> None:
|
|||
# Wait for both commands to complete
|
||||
for proc in processes:
|
||||
proc.wait()
|
||||
|
||||
|
||||
def uv_binary() -> str | None:
|
||||
"""Return the path to the uv binary."""
|
||||
return os.environ.get("ANKI_LAUNCHER_UV")
|
||||
|
||||
|
||||
def launcher_root() -> str | None:
|
||||
"""Return the path to the launcher root directory (AnkiProgramFiles)."""
|
||||
return os.environ.get("UV_PROJECT")
|
||||
|
||||
|
||||
def venv_binary(cmd: str) -> str | None:
|
||||
"""Return the path to a binary in the launcher's venv."""
|
||||
root = launcher_root()
|
||||
if not root:
|
||||
return None
|
||||
|
||||
root_path = Path(root)
|
||||
if is_win:
|
||||
binary_path = root_path / ".venv" / "Scripts" / cmd
|
||||
else:
|
||||
binary_path = root_path / ".venv" / "bin" / cmd
|
||||
|
||||
return str(binary_path)
|
||||
|
||||
|
||||
def add_python_requirements(reqs: list[str]) -> tuple[bool, str]:
|
||||
"""Add Python requirements to the launcher venv using uv add.
|
||||
|
||||
Returns (success, output)"""
|
||||
|
||||
binary = uv_binary()
|
||||
if not binary:
|
||||
return (False, "Not in packaged build.")
|
||||
|
||||
uv_cmd = [binary, "add"] + reqs
|
||||
result = subprocess.run(uv_cmd, capture_output=True, text=True, check=False)
|
||||
|
||||
if result.returncode == 0:
|
||||
root = launcher_root()
|
||||
if root:
|
||||
sync_marker = Path(root) / ".sync_complete"
|
||||
sync_marker.touch()
|
||||
|
||||
return (True, result.stdout)
|
||||
else:
|
||||
return (False, result.stderr)
|
||||
|
||||
|
||||
def launcher_executable() -> str | None:
|
||||
"""Return the path to the Anki launcher executable."""
|
||||
return os.getenv("ANKI_LAUNCHER")
|
||||
|
||||
|
||||
def trigger_launcher_run() -> None:
|
||||
"""Create a trigger file to request launcher UI on next run."""
|
||||
try:
|
||||
root = launcher_root()
|
||||
if not root:
|
||||
return
|
||||
|
||||
trigger_path = Path(root) / ".want-launcher"
|
||||
trigger_path.touch()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
def update_and_restart() -> None:
|
||||
"""Update and restart Anki using the launcher."""
|
||||
from aqt import mw
|
||||
|
||||
launcher = launcher_executable()
|
||||
assert launcher
|
||||
|
||||
trigger_launcher_run()
|
||||
|
||||
with contextlib.suppress(ResourceWarning):
|
||||
env = os.environ.copy()
|
||||
env["ANKI_LAUNCHER_WANT_TERMINAL"] = "1"
|
||||
# fixes a bug where launcher fails to appear if opening it
|
||||
# straight after updating
|
||||
if "GNOME_TERMINAL_SCREEN" in env:
|
||||
del env["GNOME_TERMINAL_SCREEN"]
|
||||
creationflags = 0
|
||||
if sys.platform == "win32":
|
||||
creationflags = (
|
||||
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
|
||||
)
|
||||
# On Windows 10, changing the handles breaks ANSI display
|
||||
io = None if sys.platform == "win32" else subprocess.DEVNULL
|
||||
|
||||
subprocess.Popen(
|
||||
[launcher],
|
||||
start_new_session=True,
|
||||
stdin=io,
|
||||
stdout=io,
|
||||
stderr=io,
|
||||
env=env,
|
||||
creationflags=creationflags,
|
||||
)
|
||||
|
||||
mw.app.quit()
|
||||
|
|
|
@ -82,11 +82,14 @@ class Preferences(QDialog):
|
|||
)
|
||||
group = self.form.preferences_answer_keys
|
||||
group.setLayout(layout := QFormLayout())
|
||||
tab_widget: QWidget = self.form.url_schemes
|
||||
for ease, label in ease_labels:
|
||||
layout.addRow(
|
||||
label,
|
||||
line_edit := QLineEdit(self.mw.pm.get_answer_key(ease) or ""),
|
||||
)
|
||||
QWidget.setTabOrder(tab_widget, line_edit)
|
||||
tab_widget = line_edit
|
||||
qconnect(
|
||||
line_edit.textChanged,
|
||||
functools.partial(self.mw.pm.set_answer_key, ease),
|
||||
|
|
|
@ -17,6 +17,7 @@ import aqt.browser
|
|||
import aqt.operations
|
||||
from anki.cards import Card, CardId
|
||||
from anki.collection import Config, OpChanges, OpChangesWithCount
|
||||
from anki.lang import with_collapsed_whitespace
|
||||
from anki.scheduler.base import ScheduleCardsAsNew
|
||||
from anki.scheduler.v3 import (
|
||||
CardAnswer,
|
||||
|
@ -966,11 +967,15 @@ timerStopped = false;
|
|||
elapsed = self.mw.col.timeboxReached()
|
||||
if elapsed:
|
||||
assert not isinstance(elapsed, bool)
|
||||
part1 = tr.studying_card_studied_in(count=elapsed[1])
|
||||
mins = int(round(elapsed[0] / 60))
|
||||
part2 = tr.studying_minute(count=mins)
|
||||
cards_val = elapsed[1]
|
||||
minutes_val = int(round(elapsed[0] / 60))
|
||||
message = with_collapsed_whitespace(
|
||||
tr.studying_card_studied_in_minute(
|
||||
cards=cards_val, minutes=str(minutes_val)
|
||||
)
|
||||
)
|
||||
fin = tr.studying_finish()
|
||||
diag = askUserDialog(f"{part1} {part2}", [tr.studying_continue(), fin])
|
||||
diag = askUserDialog(message, [tr.studying_continue(), fin])
|
||||
diag.setIcon(QMessageBox.Icon.Information)
|
||||
if diag.run() == fin:
|
||||
self.mw.moveToState("deckBrowser")
|
||||
|
|
|
@ -32,6 +32,7 @@ from aqt._macos_helper import macos_helper
|
|||
from aqt.mpv import MPV, MPVBase, MPVCommandError
|
||||
from aqt.qt import *
|
||||
from aqt.taskman import TaskManager
|
||||
from aqt.theme import theme_manager
|
||||
from aqt.utils import (
|
||||
disable_help_button,
|
||||
restoreGeom,
|
||||
|
@ -630,18 +631,44 @@ class QtAudioInputRecorder(Recorder):
|
|||
self.mw = mw
|
||||
self._parent = parent
|
||||
|
||||
from PyQt6.QtMultimedia import QAudioFormat, QAudioSource # type: ignore
|
||||
from PyQt6.QtMultimedia import QAudioSource, QMediaDevices # type: ignore
|
||||
|
||||
format = QAudioFormat()
|
||||
format.setChannelCount(1)
|
||||
format.setSampleRate(44100)
|
||||
format.setSampleFormat(QAudioFormat.SampleFormat.Int16)
|
||||
# Get the default audio input device
|
||||
device = QMediaDevices.defaultAudioInput()
|
||||
|
||||
source = QAudioSource(format, parent)
|
||||
# Try to use Int16 format first (avoids conversion)
|
||||
preferred_format = device.preferredFormat()
|
||||
int16_format = preferred_format
|
||||
int16_format.setSampleFormat(preferred_format.SampleFormat.Int16)
|
||||
|
||||
if device.isFormatSupported(int16_format):
|
||||
# Use Int16 if supported
|
||||
format = int16_format
|
||||
else:
|
||||
# Fall back to device's preferred format
|
||||
format = preferred_format
|
||||
|
||||
# Create the audio source with the chosen format
|
||||
source = QAudioSource(device, format, parent)
|
||||
|
||||
# Store the actual format being used
|
||||
self._format = source.format()
|
||||
self._audio_input = source
|
||||
|
||||
def _convert_float_to_int16(self, float_buffer: bytearray) -> bytes:
|
||||
"""Convert float32 audio samples to int16 format for WAV output."""
|
||||
import struct
|
||||
|
||||
float_count = len(float_buffer) // 4 # 4 bytes per float32
|
||||
floats = struct.unpack(f"{float_count}f", float_buffer)
|
||||
|
||||
# Convert to int16 range, clipping and scaling in one step
|
||||
int16_samples = [
|
||||
max(-32768, min(32767, int(max(-1.0, min(1.0, f)) * 32767))) for f in floats
|
||||
]
|
||||
|
||||
return struct.pack(f"{len(int16_samples)}h", *int16_samples)
|
||||
|
||||
def start(self, on_done: Callable[[], None]) -> None:
|
||||
self._iodevice = self._audio_input.start()
|
||||
self._buffer = bytearray()
|
||||
|
@ -664,18 +691,32 @@ class QtAudioInputRecorder(Recorder):
|
|||
return
|
||||
|
||||
def write_file() -> None:
|
||||
# swallow the first 300ms to allow audio device to quiesce
|
||||
wait = int(44100 * self.STARTUP_DELAY)
|
||||
if len(self._buffer) <= wait:
|
||||
return
|
||||
self._buffer = self._buffer[wait:]
|
||||
from PyQt6.QtMultimedia import QAudioFormat
|
||||
|
||||
# write out the wave file
|
||||
# swallow the first 300ms to allow audio device to quiesce
|
||||
bytes_per_frame = self._format.bytesPerFrame()
|
||||
frames_to_skip = int(self._format.sampleRate() * self.STARTUP_DELAY)
|
||||
bytes_to_skip = frames_to_skip * bytes_per_frame
|
||||
|
||||
if len(self._buffer) <= bytes_to_skip:
|
||||
return
|
||||
self._buffer = self._buffer[bytes_to_skip:]
|
||||
|
||||
# Check if we need to convert float samples to int16
|
||||
if self._format.sampleFormat() == QAudioFormat.SampleFormat.Float:
|
||||
audio_data = self._convert_float_to_int16(self._buffer)
|
||||
sample_width = 2 # int16 is 2 bytes
|
||||
else:
|
||||
# For integer formats, use the data as-is
|
||||
audio_data = bytes(self._buffer)
|
||||
sample_width = self._format.bytesPerSample()
|
||||
|
||||
# write out the wave file with the correct format parameters
|
||||
wf = wave.open(self.output_path, "wb")
|
||||
wf.setnchannels(self._format.channelCount())
|
||||
wf.setsampwidth(2)
|
||||
wf.setsampwidth(sample_width)
|
||||
wf.setframerate(self._format.sampleRate())
|
||||
wf.writeframes(self._buffer)
|
||||
wf.writeframes(audio_data)
|
||||
wf.close()
|
||||
|
||||
def and_then(fut: Future) -> None:
|
||||
|
@ -743,7 +784,8 @@ class RecordDialog(QDialog):
|
|||
def _setup_dialog(self) -> None:
|
||||
self.setWindowTitle("Anki")
|
||||
icon = QLabel()
|
||||
icon.setPixmap(QPixmap("icons:media-record.png"))
|
||||
qicon = theme_manager.icon_from_resources("icons:media-record.svg")
|
||||
icon.setPixmap(qicon.pixmap(60, 60))
|
||||
self.label = QLabel("...")
|
||||
hbox = QHBoxLayout()
|
||||
hbox.addWidget(icon)
|
||||
|
|
|
@ -177,9 +177,13 @@ class CustomStyles:
|
|||
QPushButton:default {{
|
||||
border: 1px solid {tm.var(colors.BORDER_FOCUS)};
|
||||
}}
|
||||
QPushButton:focus {{
|
||||
QPushButton {{
|
||||
margin: 1px;
|
||||
}}
|
||||
QPushButton:focus, QPushButton:default:hover {{
|
||||
border: 2px solid {tm.var(colors.BORDER_FOCUS)};
|
||||
outline: none;
|
||||
margin: 0px;
|
||||
}}
|
||||
QPushButton:hover,
|
||||
QTabBar::tab:hover,
|
||||
|
@ -195,9 +199,6 @@ class CustomStyles:
|
|||
)
|
||||
};
|
||||
}}
|
||||
QPushButton:default:hover {{
|
||||
border-width: 2px;
|
||||
}}
|
||||
QPushButton:pressed,
|
||||
QPushButton:checked,
|
||||
QSpinBox::up-button:pressed,
|
||||
|
|
|
@ -73,7 +73,7 @@ def handle_sync_error(mw: aqt.main.AnkiQt, err: Exception) -> None:
|
|||
elif isinstance(err, Interrupted):
|
||||
# no message to show
|
||||
return
|
||||
show_warning(str(err))
|
||||
show_warning(str(err), parent=mw)
|
||||
|
||||
|
||||
def on_normal_sync_timer(mw: aqt.main.AnkiQt) -> None:
|
||||
|
@ -118,7 +118,7 @@ def sync_collection(mw: aqt.main.AnkiQt, on_done: Callable[[], None]) -> None:
|
|||
if out.new_endpoint:
|
||||
mw.pm.set_current_sync_url(out.new_endpoint)
|
||||
if out.server_message:
|
||||
showText(out.server_message)
|
||||
showText(out.server_message, parent=mw)
|
||||
if out.required == out.NO_CHANGES:
|
||||
tooltip(parent=mw, msg=tr.sync_collection_complete())
|
||||
# all done; track media progress
|
||||
|
|
|
@ -115,7 +115,7 @@ class ThemeManager:
|
|||
# Workaround for Qt bug. First attempt was percent-escaping the chars,
|
||||
# but Qt can't handle that.
|
||||
# https://forum.qt.io/topic/55274/solved-qss-with-special-characters/11
|
||||
path = re.sub(r"([\u00A1-\u00FF])", r"\\\1", path)
|
||||
path = re.sub(r"(['\u00A1-\u00FF])", r"\\\1", path)
|
||||
return path
|
||||
|
||||
def icon_from_resources(self, path: str | ColoredIcon) -> QIcon:
|
||||
|
|
|
@ -3,16 +3,17 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
import aqt
|
||||
from anki.buildinfo import buildhash
|
||||
from anki.collection import CheckForUpdateResponse, Collection
|
||||
from anki.utils import dev_mode, int_time, int_version, is_mac, is_win, plat_desc
|
||||
from anki.utils import dev_mode, int_time, int_version, plat_desc
|
||||
from aqt.operations import QueryOp
|
||||
from aqt.package import (
|
||||
launcher_executable as _launcher_executable,
|
||||
)
|
||||
from aqt.package import (
|
||||
update_and_restart as _update_and_restart,
|
||||
)
|
||||
from aqt.qt import *
|
||||
from aqt.utils import openLink, show_warning, showText, tr
|
||||
|
||||
|
@ -84,67 +85,7 @@ def prompt_to_update(mw: aqt.AnkiQt, ver: str) -> None:
|
|||
# ignore this update
|
||||
mw.pm.meta["suppressUpdate"] = ver
|
||||
elif ret == QMessageBox.StandardButton.Yes:
|
||||
if have_launcher():
|
||||
update_and_restart()
|
||||
if _launcher_executable():
|
||||
_update_and_restart()
|
||||
else:
|
||||
openLink(aqt.appWebsiteDownloadSection)
|
||||
|
||||
|
||||
def _anki_launcher_path() -> str | None:
|
||||
return os.getenv("ANKI_LAUNCHER")
|
||||
|
||||
|
||||
def have_launcher() -> bool:
|
||||
return _anki_launcher_path() is not None
|
||||
|
||||
|
||||
def update_and_restart() -> None:
|
||||
from aqt import mw
|
||||
|
||||
launcher = _anki_launcher_path()
|
||||
assert launcher
|
||||
|
||||
_trigger_launcher_run()
|
||||
|
||||
with contextlib.suppress(ResourceWarning):
|
||||
env = os.environ.copy()
|
||||
creationflags = 0
|
||||
if sys.platform == "win32":
|
||||
creationflags = (
|
||||
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
|
||||
)
|
||||
subprocess.Popen(
|
||||
[launcher],
|
||||
start_new_session=True,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
env=env,
|
||||
creationflags=creationflags,
|
||||
)
|
||||
|
||||
mw.app.quit()
|
||||
|
||||
|
||||
def _trigger_launcher_run() -> None:
|
||||
"""Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run."""
|
||||
try:
|
||||
# Get the local data directory equivalent to Rust's dirs::data_local_dir()
|
||||
if is_win:
|
||||
from .winpaths import get_local_appdata
|
||||
|
||||
data_dir = Path(get_local_appdata())
|
||||
elif is_mac:
|
||||
data_dir = Path.home() / "Library" / "Application Support"
|
||||
else: # Linux
|
||||
data_dir = Path(
|
||||
os.environ.get("XDG_DATA_HOME", Path.home() / ".local" / "share")
|
||||
)
|
||||
|
||||
pyproject_path = data_dir / "AnkiProgramFiles" / "pyproject.toml"
|
||||
|
||||
if pyproject_path.exists():
|
||||
# Touch the file to update its mtime
|
||||
pyproject_path.touch()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
|
|
@ -226,29 +226,45 @@ def ask_user_dialog(
|
|||
)
|
||||
|
||||
|
||||
def show_info(text: str, callback: Callable | None = None, **kwargs: Any) -> MessageBox:
|
||||
def show_info(
|
||||
text: str,
|
||||
callback: Callable | None = None,
|
||||
parent: QWidget | None = None,
|
||||
**kwargs: Any,
|
||||
) -> MessageBox:
|
||||
"Show a small info window with an OK button."
|
||||
if "icon" not in kwargs:
|
||||
kwargs["icon"] = QMessageBox.Icon.Information
|
||||
return MessageBox(
|
||||
text,
|
||||
callback=(lambda _: callback()) if callback is not None else None,
|
||||
parent=parent,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
def show_warning(
|
||||
text: str, callback: Callable | None = None, **kwargs: Any
|
||||
text: str,
|
||||
callback: Callable | None = None,
|
||||
parent: QWidget | None = None,
|
||||
**kwargs: Any,
|
||||
) -> MessageBox:
|
||||
"Show a small warning window with an OK button."
|
||||
return show_info(text, icon=QMessageBox.Icon.Warning, callback=callback, **kwargs)
|
||||
return show_info(
|
||||
text, icon=QMessageBox.Icon.Warning, callback=callback, parent=parent, **kwargs
|
||||
)
|
||||
|
||||
|
||||
def show_critical(
|
||||
text: str, callback: Callable | None = None, **kwargs: Any
|
||||
text: str,
|
||||
callback: Callable | None = None,
|
||||
parent: QWidget | None = None,
|
||||
**kwargs: Any,
|
||||
) -> MessageBox:
|
||||
"Show a small critical error window with an OK button."
|
||||
return show_info(text, icon=QMessageBox.Icon.Critical, callback=callback, **kwargs)
|
||||
return show_info(
|
||||
text, icon=QMessageBox.Icon.Critical, callback=callback, parent=parent, **kwargs
|
||||
)
|
||||
|
||||
|
||||
def showWarning(
|
||||
|
|
|
@ -67,16 +67,12 @@ class CustomBuildHook(BuildHookInterface):
|
|||
|
||||
def _should_exclude(self, path: Path) -> bool:
|
||||
"""Check if a file should be excluded from the wheel."""
|
||||
path_str = str(path)
|
||||
|
||||
# Exclude __pycache__
|
||||
if "/__pycache__/" in path_str:
|
||||
if "/__pycache__/" in str(path):
|
||||
return True
|
||||
|
||||
if path.suffix in [".ui", ".scss", ".map", ".ts"]:
|
||||
return True
|
||||
if path.name.startswith("tsconfig"):
|
||||
return True
|
||||
if "/aqt/data" in path_str:
|
||||
return True
|
||||
return False
|
||||
|
|
|
@ -13,6 +13,10 @@ anki_process.workspace = true
|
|||
anyhow.workspace = true
|
||||
camino.workspace = true
|
||||
dirs.workspace = true
|
||||
serde_json.workspace = true
|
||||
|
||||
[target.'cfg(all(unix, not(target_os = "macos")))'.dependencies]
|
||||
libc.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
windows.workspace = true
|
||||
|
|
|
@ -8,43 +8,103 @@ import os
|
|||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import aqt.sound
|
||||
from anki.utils import pointVersion
|
||||
from aqt import mw
|
||||
from aqt.qt import QAction
|
||||
from aqt.utils import askUser, is_mac, is_win, showInfo
|
||||
|
||||
|
||||
def _anki_launcher_path() -> str | None:
|
||||
def launcher_executable() -> str | None:
|
||||
"""Return the path to the Anki launcher executable."""
|
||||
return os.getenv("ANKI_LAUNCHER")
|
||||
|
||||
|
||||
def have_launcher() -> bool:
|
||||
return _anki_launcher_path() is not None
|
||||
def uv_binary() -> str | None:
|
||||
"""Return the path to the uv binary."""
|
||||
return os.environ.get("ANKI_LAUNCHER_UV")
|
||||
|
||||
|
||||
def launcher_root() -> str | None:
|
||||
"""Return the path to the launcher root directory (AnkiProgramFiles)."""
|
||||
return os.environ.get("UV_PROJECT")
|
||||
|
||||
|
||||
def venv_binary(cmd: str) -> str | None:
|
||||
"""Return the path to a binary in the launcher's venv."""
|
||||
root = launcher_root()
|
||||
if not root:
|
||||
return None
|
||||
|
||||
root_path = Path(root)
|
||||
if is_win:
|
||||
binary_path = root_path / ".venv" / "Scripts" / cmd
|
||||
else:
|
||||
binary_path = root_path / ".venv" / "bin" / cmd
|
||||
|
||||
return str(binary_path)
|
||||
|
||||
|
||||
def add_python_requirements(reqs: list[str]) -> tuple[bool, str]:
|
||||
"""Add Python requirements to the launcher venv using uv add.
|
||||
|
||||
Returns (success, output)"""
|
||||
|
||||
binary = uv_binary()
|
||||
if not binary:
|
||||
return (False, "Not in packaged build.")
|
||||
|
||||
uv_cmd = [binary, "add"] + reqs
|
||||
result = subprocess.run(uv_cmd, capture_output=True, text=True, check=False)
|
||||
|
||||
if result.returncode == 0:
|
||||
root = launcher_root()
|
||||
if root:
|
||||
sync_marker = Path(root) / ".sync_complete"
|
||||
sync_marker.touch()
|
||||
return (True, result.stdout)
|
||||
else:
|
||||
return (False, result.stderr)
|
||||
|
||||
|
||||
def trigger_launcher_run() -> None:
|
||||
"""Create a trigger file to request launcher UI on next run."""
|
||||
try:
|
||||
root = launcher_root()
|
||||
if not root:
|
||||
return
|
||||
|
||||
trigger_path = Path(root) / ".want-launcher"
|
||||
trigger_path.touch()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
def update_and_restart() -> None:
|
||||
from aqt import mw
|
||||
|
||||
launcher = _anki_launcher_path()
|
||||
"""Update and restart Anki using the launcher."""
|
||||
launcher = launcher_executable()
|
||||
assert launcher
|
||||
|
||||
_trigger_launcher_run()
|
||||
trigger_launcher_run()
|
||||
|
||||
with contextlib.suppress(ResourceWarning):
|
||||
env = os.environ.copy()
|
||||
env["ANKI_LAUNCHER_WANT_TERMINAL"] = "1"
|
||||
creationflags = 0
|
||||
if sys.platform == "win32":
|
||||
creationflags = (
|
||||
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
|
||||
)
|
||||
# On Windows, changing the handles breaks ANSI display
|
||||
io = None if sys.platform == "win32" else subprocess.DEVNULL
|
||||
|
||||
subprocess.Popen(
|
||||
[launcher],
|
||||
start_new_session=True,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
stdin=io,
|
||||
stdout=io,
|
||||
stderr=io,
|
||||
env=env,
|
||||
creationflags=creationflags,
|
||||
)
|
||||
|
@ -52,30 +112,6 @@ def update_and_restart() -> None:
|
|||
mw.app.quit()
|
||||
|
||||
|
||||
def _trigger_launcher_run() -> None:
|
||||
"""Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run."""
|
||||
try:
|
||||
# Get the local data directory equivalent to Rust's dirs::data_local_dir()
|
||||
if is_win:
|
||||
from aqt.winpaths import get_local_appdata
|
||||
|
||||
data_dir = Path(get_local_appdata())
|
||||
elif is_mac:
|
||||
data_dir = Path.home() / "Library" / "Application Support"
|
||||
else: # Linux
|
||||
data_dir = Path(
|
||||
os.environ.get("XDG_DATA_HOME", Path.home() / ".local" / "share")
|
||||
)
|
||||
|
||||
pyproject_path = data_dir / "AnkiProgramFiles" / "pyproject.toml"
|
||||
|
||||
if pyproject_path.exists():
|
||||
# Touch the file to update its mtime
|
||||
pyproject_path.touch()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
def confirm_then_upgrade():
|
||||
if not askUser("Change to a different Anki version?"):
|
||||
return
|
||||
|
@ -116,10 +152,18 @@ def _packagedCmd(cmd: list[str]) -> tuple[Any, dict[str, str]]:
|
|||
return cmd, env
|
||||
|
||||
|
||||
def on_addon_config():
|
||||
showInfo(
|
||||
"This add-on is automatically added when installing older Anki versions, so that they work with the launcher. You can remove it if you wish."
|
||||
)
|
||||
|
||||
|
||||
def setup():
|
||||
mw.addonManager.setConfigAction(__name__, on_addon_config)
|
||||
|
||||
if pointVersion() >= 250600:
|
||||
return
|
||||
if not have_launcher():
|
||||
if not launcher_executable():
|
||||
return
|
||||
|
||||
# Add action to tools menu
|
||||
|
@ -129,7 +173,21 @@ def setup():
|
|||
|
||||
# Monkey-patch audio tools to use anki-audio
|
||||
if is_win or is_mac:
|
||||
import aqt
|
||||
import aqt.sound
|
||||
|
||||
aqt.sound._packagedCmd = _packagedCmd
|
||||
|
||||
# Inject launcher functions into launcher module
|
||||
import aqt.package
|
||||
|
||||
aqt.package.launcher_executable = launcher_executable
|
||||
aqt.package.update_and_restart = update_and_restart
|
||||
aqt.package.trigger_launcher_run = trigger_launcher_run
|
||||
aqt.package.uv_binary = uv_binary
|
||||
aqt.package.launcher_root = launcher_root
|
||||
aqt.package.venv_binary = venv_binary
|
||||
aqt.package.add_python_requirements = add_python_requirements
|
||||
|
||||
|
||||
setup()
|
||||
|
|
|
@ -13,7 +13,8 @@ HOST_ARCH=$(uname -m)
|
|||
|
||||
# Define output paths
|
||||
OUTPUT_DIR="../../../out/launcher"
|
||||
LAUNCHER_DIR="$OUTPUT_DIR/anki-launcher"
|
||||
ANKI_VERSION=$(cat ../../../.version | tr -d '\n')
|
||||
LAUNCHER_DIR="$OUTPUT_DIR/anki-launcher-$ANKI_VERSION-linux"
|
||||
|
||||
# Clean existing output directory
|
||||
rm -rf "$LAUNCHER_DIR"
|
||||
|
@ -61,6 +62,7 @@ done
|
|||
# Copy additional files from parent directory
|
||||
cp ../pyproject.toml "$LAUNCHER_DIR/"
|
||||
cp ../../../.python-version "$LAUNCHER_DIR/"
|
||||
cp ../versions.py "$LAUNCHER_DIR/"
|
||||
|
||||
# Set executable permissions
|
||||
chmod +x \
|
||||
|
@ -75,10 +77,9 @@ chmod +x \
|
|||
# Set proper permissions and create tarball
|
||||
chmod -R a+r "$LAUNCHER_DIR"
|
||||
|
||||
# Create tarball using the same options as the Rust template
|
||||
ZSTD="zstd -c --long -T0 -18"
|
||||
TRANSFORM="s%^.%anki-launcher%S"
|
||||
TARBALL="$OUTPUT_DIR/anki-launcher.tar.zst"
|
||||
TRANSFORM="s%^.%anki-launcher-$ANKI_VERSION-linux%S"
|
||||
TARBALL="$OUTPUT_DIR/anki-launcher-$ANKI_VERSION-linux.tar.zst"
|
||||
|
||||
tar -I "$ZSTD" --transform "$TRANSFORM" -cf "$TARBALL" -C "$LAUNCHER_DIR" .
|
||||
|
||||
|
|
|
@ -5,9 +5,11 @@
|
|||
<key>CFBundleDisplayName</key>
|
||||
<string>Anki</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>1.0</string>
|
||||
<string>ANKI_VERSION</string>
|
||||
<key>LSMinimumSystemVersion</key>
|
||||
<string>11</string>
|
||||
<string>12</string>
|
||||
<key>LSApplicationCategoryType</key>
|
||||
<string>public.app-category.education</string>
|
||||
<key>CFBundleDocumentTypes</key>
|
||||
<array>
|
||||
<dict>
|
||||
|
|
|
@ -30,25 +30,33 @@ lipo -create \
|
|||
-output "$APP_LAUNCHER/Contents/MacOS/launcher"
|
||||
cp "$OUTPUT_DIR/uv" "$APP_LAUNCHER/Contents/MacOS/"
|
||||
|
||||
# Build install_name_tool stub
|
||||
clang -arch arm64 -o "$OUTPUT_DIR/stub_arm64" stub.c
|
||||
clang -arch x86_64 -o "$OUTPUT_DIR/stub_x86_64" stub.c
|
||||
lipo -create "$OUTPUT_DIR/stub_arm64" "$OUTPUT_DIR/stub_x86_64" -output "$APP_LAUNCHER/Contents/MacOS/install_name_tool"
|
||||
rm "$OUTPUT_DIR/stub_arm64" "$OUTPUT_DIR/stub_x86_64"
|
||||
|
||||
# Copy support files
|
||||
cp Info.plist "$APP_LAUNCHER/Contents/"
|
||||
ANKI_VERSION=$(cat ../../../.version | tr -d '\n')
|
||||
sed "s/ANKI_VERSION/$ANKI_VERSION/g" Info.plist > "$APP_LAUNCHER/Contents/Info.plist"
|
||||
cp icon/Assets.car "$APP_LAUNCHER/Contents/Resources/"
|
||||
cp ../pyproject.toml "$APP_LAUNCHER/Contents/Resources/"
|
||||
cp ../../../.python-version "$APP_LAUNCHER/Contents/Resources/"
|
||||
cp ../versions.py "$APP_LAUNCHER/Contents/Resources/"
|
||||
|
||||
# Codesign
|
||||
for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do
|
||||
codesign --force -vvvv -o runtime -s "Developer ID Application:" \
|
||||
--entitlements entitlements.python.xml \
|
||||
"$i"
|
||||
done
|
||||
|
||||
# Check
|
||||
codesign -vvv "$APP_LAUNCHER"
|
||||
spctl -a "$APP_LAUNCHER"
|
||||
|
||||
# Notarize and bundle (skip if NODMG is set)
|
||||
# Codesign/bundle
|
||||
if [ -z "$NODMG" ]; then
|
||||
for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/install_name_tool" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do
|
||||
codesign --force -vvvv -o runtime -s "Developer ID Application:" \
|
||||
--entitlements entitlements.python.xml \
|
||||
"$i"
|
||||
done
|
||||
|
||||
# Check
|
||||
codesign -vvv "$APP_LAUNCHER"
|
||||
spctl -a "$APP_LAUNCHER"
|
||||
|
||||
# Notarize and build dmg
|
||||
./notarize.sh "$OUTPUT_DIR"
|
||||
./dmg/build.sh "$OUTPUT_DIR"
|
||||
fi
|
|
@ -6,7 +6,8 @@ set -e
|
|||
# base folder with Anki.app in it
|
||||
output="$1"
|
||||
dist="$1/tmp"
|
||||
dmg_path="$output/Anki.dmg"
|
||||
ANKI_VERSION=$(cat ../../../.version | tr -d '\n')
|
||||
dmg_path="$output/anki-launcher-$ANKI_VERSION-mac.dmg"
|
||||
|
||||
if [ -d "/Volumes/Anki" ]
|
||||
then
|
||||
|
|
6
qt/launcher/mac/stub.c
Normal file
6
qt/launcher/mac/stub.c
Normal file
|
@ -0,0 +1,6 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
int main(void) {
|
||||
return 0;
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
[project]
|
||||
name = "anki-launcher"
|
||||
version = "0.1.0"
|
||||
version = "1.0.0"
|
||||
description = "UV-based launcher for Anki."
|
||||
requires-python = ">=3.9"
|
||||
dependencies = [
|
||||
|
|
|
@ -22,6 +22,11 @@ const NSIS_PATH: &str = "C:\\Program Files (x86)\\NSIS\\makensis.exe";
|
|||
fn main() -> Result<()> {
|
||||
println!("Building Windows launcher...");
|
||||
|
||||
// Read version early so it can be used throughout the build process
|
||||
let version = std::fs::read_to_string("../../../.version")?
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
let output_dir = PathBuf::from(OUTPUT_DIR);
|
||||
let launcher_exe_dir = PathBuf::from(LAUNCHER_EXE_DIR);
|
||||
let nsis_dir = PathBuf::from(NSIS_DIR);
|
||||
|
@ -31,16 +36,20 @@ fn main() -> Result<()> {
|
|||
extract_nsis_plugins()?;
|
||||
copy_files(&output_dir)?;
|
||||
sign_binaries(&output_dir)?;
|
||||
copy_nsis_files(&nsis_dir)?;
|
||||
copy_nsis_files(&nsis_dir, &version)?;
|
||||
build_uninstaller(&output_dir, &nsis_dir)?;
|
||||
sign_file(&output_dir.join("uninstall.exe"))?;
|
||||
generate_install_manifest(&output_dir)?;
|
||||
build_installer(&output_dir, &nsis_dir)?;
|
||||
sign_file(&PathBuf::from("../../../out/launcher_exe/anki-install.exe"))?;
|
||||
|
||||
let installer_filename = format!("anki-launcher-{version}-windows.exe");
|
||||
let installer_path = PathBuf::from("../../../out/launcher_exe").join(&installer_filename);
|
||||
|
||||
sign_file(&installer_path)?;
|
||||
|
||||
println!("Build completed successfully!");
|
||||
println!("Output directory: {}", output_dir.display());
|
||||
println!("Installer: ../../../out/launcher_exe/anki-install.exe");
|
||||
println!("Installer: ../../../out/launcher_exe/{installer_filename}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -139,6 +148,9 @@ fn copy_files(output_dir: &Path) -> Result<()> {
|
|||
output_dir.join(".python-version"),
|
||||
)?;
|
||||
|
||||
// Copy versions.py
|
||||
copy_file("../versions.py", output_dir.join("versions.py"))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -232,11 +244,13 @@ fn generate_install_manifest(output_dir: &Path) -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn copy_nsis_files(nsis_dir: &Path) -> Result<()> {
|
||||
fn copy_nsis_files(nsis_dir: &Path, version: &str) -> Result<()> {
|
||||
println!("Copying NSIS support files...");
|
||||
|
||||
// Copy anki.template.nsi as anki.nsi
|
||||
copy_file("anki.template.nsi", nsis_dir.join("anki.nsi"))?;
|
||||
// Copy anki.template.nsi as anki.nsi and substitute version placeholders
|
||||
let template_content = std::fs::read_to_string("anki.template.nsi")?;
|
||||
let substituted_content = template_content.replace("ANKI_VERSION", version);
|
||||
write_file(nsis_dir.join("anki.nsi"), substituted_content)?;
|
||||
|
||||
// Copy fileassoc.nsh
|
||||
copy_file("fileassoc.nsh", nsis_dir.join("fileassoc.nsh"))?;
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -62,8 +62,9 @@ pub fn prepare_for_launch_after_update(mut cmd: Command, root: &Path) -> Result<
|
|||
pub fn relaunch_in_terminal() -> Result<()> {
|
||||
let current_exe = std::env::current_exe().context("Failed to get current executable path")?;
|
||||
Command::new("open")
|
||||
.args(["-a", "Terminal"])
|
||||
.args(["-na", "Terminal"])
|
||||
.arg(current_exe)
|
||||
.env_remove("ANKI_LAUNCHER_WANT_TERMINAL")
|
||||
.ensure_spawn()?;
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
|
|
@ -116,8 +116,9 @@ pub use windows::ensure_terminal_shown;
|
|||
pub fn ensure_terminal_shown() -> Result<()> {
|
||||
use std::io::IsTerminal;
|
||||
|
||||
let want_terminal = std::env::var("ANKI_LAUNCHER_WANT_TERMINAL").is_ok();
|
||||
let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout());
|
||||
if !stdout_is_terminal {
|
||||
if want_terminal || !stdout_is_terminal {
|
||||
#[cfg(target_os = "macos")]
|
||||
mac::relaunch_in_terminal()?;
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
|
@ -128,3 +129,13 @@ pub fn ensure_terminal_shown() -> Result<()> {
|
|||
print!("\x1b]2;Anki Launcher\x07");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn ensure_os_supported() -> Result<()> {
|
||||
#[cfg(all(unix, not(target_os = "macos")))]
|
||||
unix::ensure_glibc_supported()?;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
windows::ensure_windows_version_supported()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -9,15 +9,22 @@ use anyhow::Result;
|
|||
pub fn relaunch_in_terminal() -> Result<()> {
|
||||
let current_exe = std::env::current_exe().context("Failed to get current executable path")?;
|
||||
|
||||
// Try terminals in order of preference
|
||||
// Try terminals in roughly most specific to least specific.
|
||||
// First, try commonly used terminals for riced systems.
|
||||
// Second, try common defaults.
|
||||
// Finally, try x11 compatibility terminals.
|
||||
let terminals = [
|
||||
("x-terminal-emulator", vec!["-e"]),
|
||||
("gnome-terminal", vec!["--"]),
|
||||
("konsole", vec!["-e"]),
|
||||
("xfce4-terminal", vec!["-e"]),
|
||||
// commonly used for riced systems
|
||||
("alacritty", vec!["-e"]),
|
||||
("kitty", vec![]),
|
||||
("foot", vec![]),
|
||||
// the user's default terminal in Debian/Ubuntu
|
||||
("x-terminal-emulator", vec!["-e"]),
|
||||
// default installs for the most common distros
|
||||
("xfce4-terminal", vec!["-e"]),
|
||||
("gnome-terminal", vec!["-e"]),
|
||||
("konsole", vec!["-e"]),
|
||||
// x11-compatibility terminals
|
||||
("urxvt", vec!["-e"]),
|
||||
("xterm", vec!["-e"]),
|
||||
];
|
||||
|
@ -65,3 +72,34 @@ pub fn finalize_uninstall() {
|
|||
let mut input = String::new();
|
||||
let _ = stdin().read_line(&mut input);
|
||||
}
|
||||
|
||||
pub fn ensure_glibc_supported() -> Result<()> {
|
||||
use std::ffi::CStr;
|
||||
let get_glibc_version = || -> Option<(u32, u32)> {
|
||||
let version_ptr = unsafe { libc::gnu_get_libc_version() };
|
||||
if version_ptr.is_null() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let version_cstr = unsafe { CStr::from_ptr(version_ptr) };
|
||||
let version_str = version_cstr.to_str().ok()?;
|
||||
|
||||
// Parse version string (format: "2.36" or "2.36.1")
|
||||
let version_parts: Vec<&str> = version_str.split('.').collect();
|
||||
if version_parts.len() < 2 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let major: u32 = version_parts[0].parse().ok()?;
|
||||
let minor: u32 = version_parts[1].parse().ok()?;
|
||||
|
||||
Some((major, minor))
|
||||
};
|
||||
|
||||
let (major, minor) = get_glibc_version().unwrap_or_default();
|
||||
if major < 2 || (major == 2 && minor < 36) {
|
||||
anyhow::bail!("Anki requires a modern Linux distro with glibc 2.36 or later.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ use anyhow::Context;
|
|||
use anyhow::Result;
|
||||
use widestring::u16cstr;
|
||||
use windows::core::PCWSTR;
|
||||
use windows::Wdk::System::SystemServices::RtlGetVersion;
|
||||
use windows::Win32::System::Console::AttachConsole;
|
||||
use windows::Win32::System::Console::GetConsoleWindow;
|
||||
use windows::Win32::System::Console::ATTACH_PARENT_PROCESS;
|
||||
|
@ -18,8 +19,45 @@ use windows::Win32::System::Registry::HKEY;
|
|||
use windows::Win32::System::Registry::HKEY_CURRENT_USER;
|
||||
use windows::Win32::System::Registry::KEY_READ;
|
||||
use windows::Win32::System::Registry::REG_SZ;
|
||||
use windows::Win32::System::SystemInformation::OSVERSIONINFOW;
|
||||
use windows::Win32::UI::Shell::SetCurrentProcessExplicitAppUserModelID;
|
||||
|
||||
/// Returns true if running on Windows 10 (not Windows 11)
|
||||
fn is_windows_10() -> bool {
|
||||
unsafe {
|
||||
let mut info = OSVERSIONINFOW {
|
||||
dwOSVersionInfoSize: std::mem::size_of::<OSVERSIONINFOW>() as u32,
|
||||
..Default::default()
|
||||
};
|
||||
if RtlGetVersion(&mut info).is_ok() {
|
||||
// Windows 10 has build numbers < 22000, Windows 11 >= 22000
|
||||
info.dwBuildNumber < 22000 && info.dwMajorVersion == 10
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensures Windows 10 version 1809 or later
|
||||
pub fn ensure_windows_version_supported() -> Result<()> {
|
||||
unsafe {
|
||||
let mut info = OSVERSIONINFOW {
|
||||
dwOSVersionInfoSize: std::mem::size_of::<OSVERSIONINFOW>() as u32,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if RtlGetVersion(&mut info).is_err() {
|
||||
anyhow::bail!("Failed to get Windows version information");
|
||||
}
|
||||
|
||||
if info.dwBuildNumber >= 17763 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
anyhow::bail!("Windows 10 version 1809 or later is required.")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ensure_terminal_shown() -> Result<()> {
|
||||
unsafe {
|
||||
if !GetConsoleWindow().is_invalid() {
|
||||
|
@ -29,6 +67,14 @@ pub fn ensure_terminal_shown() -> Result<()> {
|
|||
}
|
||||
|
||||
if std::env::var("ANKI_IMPLICIT_CONSOLE").is_ok() && attach_to_parent_console() {
|
||||
// This black magic triggers Windows to switch to the new
|
||||
// ANSI-supporting console host, which is usually only available
|
||||
// when the app is built with the console subsystem.
|
||||
// Only needed on Windows 10, not Windows 11.
|
||||
if is_windows_10() {
|
||||
let _ = Command::new("cmd").args(["/C", ""]).status();
|
||||
}
|
||||
|
||||
// Successfully attached to parent console
|
||||
reconnect_stdio_to_console();
|
||||
return Ok(());
|
||||
|
|
44
qt/launcher/versions.py
Normal file
44
qt/launcher/versions.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
import pip_system_certs.wrapt_requests
|
||||
import requests
|
||||
|
||||
pip_system_certs.wrapt_requests.inject_truststore()
|
||||
|
||||
|
||||
def main():
|
||||
"""Fetch and return all versions from PyPI, sorted by upload time."""
|
||||
url = "https://pypi.org/pypi/aqt/json"
|
||||
|
||||
try:
|
||||
response = requests.get(url, timeout=30)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
releases = data.get("releases", {})
|
||||
|
||||
# Create list of (version, upload_time) tuples
|
||||
version_times = []
|
||||
for version, files in releases.items():
|
||||
if files: # Only include versions that have files
|
||||
# Use the upload time of the first file for each version
|
||||
upload_time = files[0].get("upload_time_iso_8601")
|
||||
if upload_time:
|
||||
version_times.append((version, upload_time))
|
||||
|
||||
# Sort by upload time
|
||||
version_times.sort(key=lambda x: x[1])
|
||||
|
||||
# Extract just the version names
|
||||
versions = [version for version, _ in version_times]
|
||||
print(json.dumps(versions))
|
||||
except Exception as e:
|
||||
print(f"Error fetching versions: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -24,7 +24,7 @@ Name "Anki"
|
|||
Unicode true
|
||||
|
||||
; The file to write (relative to nsis directory)
|
||||
OutFile "..\launcher_exe\anki-install.exe"
|
||||
OutFile "..\launcher_exe\anki-launcher-ANKI_VERSION-windows.exe"
|
||||
|
||||
; Non elevated
|
||||
RequestExecutionLevel user
|
||||
|
@ -214,7 +214,7 @@ Section ""
|
|||
|
||||
; Write the uninstall keys for Windows
|
||||
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayName" "Anki Launcher"
|
||||
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayVersion" "1.0.0"
|
||||
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayVersion" "ANKI_VERSION"
|
||||
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "UninstallString" '"$INSTDIR\uninstall.exe"'
|
||||
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "QuietUninstallString" '"$INSTDIR\uninstall.exe" /S'
|
||||
WriteRegDWORD HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "NoModify" 1
|
||||
|
|
|
@ -33,6 +33,12 @@ class _MacOSHelper:
|
|||
"On completion, file should be saved if no error has arrived."
|
||||
self._dll.end_wav_record()
|
||||
|
||||
def disable_appnap(self) -> None:
|
||||
self._dll.disable_appnap()
|
||||
|
||||
def enable_appnap(self) -> None:
|
||||
self._dll.enable_appnap()
|
||||
|
||||
|
||||
# this must not be overwritten or deallocated
|
||||
@CFUNCTYPE(None, c_char_p) # type: ignore
|
||||
|
|
25
qt/mac/appnap.swift
Normal file
25
qt/mac/appnap.swift
Normal file
|
@ -0,0 +1,25 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import Foundation
|
||||
|
||||
private var currentActivity: NSObjectProtocol?
|
||||
|
||||
@_cdecl("disable_appnap")
|
||||
public func disableAppNap() {
|
||||
// No-op if already assigned
|
||||
guard currentActivity == nil else { return }
|
||||
|
||||
currentActivity = ProcessInfo.processInfo.beginActivity(
|
||||
options: .userInitiatedAllowingIdleSystemSleep,
|
||||
reason: "AppNap is disabled"
|
||||
)
|
||||
}
|
||||
|
||||
@_cdecl("enable_appnap")
|
||||
public func enableAppNap() {
|
||||
guard let activity = currentActivity else { return }
|
||||
|
||||
ProcessInfo.processInfo.endActivity(activity)
|
||||
currentActivity = nil
|
||||
}
|
|
@ -15,6 +15,7 @@ echo "Building macOS helper dylib..."
|
|||
# Create the wheel using uv
|
||||
echo "Creating wheel..."
|
||||
cd "$SCRIPT_DIR"
|
||||
rm -rf dist
|
||||
"$PROJ_ROOT/out/extracted/uv/uv" build --wheel
|
||||
|
||||
echo "Build complete!"
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
|
|
@ -4,7 +4,7 @@ build-backend = "hatchling.build"
|
|||
|
||||
[project]
|
||||
name = "anki-mac-helper"
|
||||
version = "0.1.0"
|
||||
version = "0.1.1"
|
||||
description = "Small support library for Anki on Macs"
|
||||
requires-python = ">=3.9"
|
||||
license = { text = "AGPL-3.0-or-later" }
|
||||
|
|
14
qt/mac/update-launcher-env
Executable file
14
qt/mac/update-launcher-env
Executable file
|
@ -0,0 +1,14 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Build and install into the launcher venv
|
||||
|
||||
set -e
|
||||
|
||||
./build.sh
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
export VIRTUAL_ENV=$HOME/Library/Application\ Support/AnkiProgramFiles/.venv
|
||||
else
|
||||
export VIRTUAL_ENV=$HOME/.local/share/AnkiProgramFiles/.venv
|
||||
fi
|
||||
../../out/extracted/uv/uv pip install dist/*.whl
|
||||
|
|
@ -12,7 +12,7 @@ dependencies = [
|
|||
"send2trash",
|
||||
"waitress>=2.0.0",
|
||||
"pywin32; sys.platform == 'win32'",
|
||||
"anki-mac-helper; sys.platform == 'darwin'",
|
||||
"anki-mac-helper>=0.1.1; sys.platform == 'darwin'",
|
||||
"pip-system-certs!=5.1",
|
||||
"pyqt6>=6.2",
|
||||
"pyqt6-webengine>=6.2",
|
||||
|
@ -37,14 +37,14 @@ qt67 = [
|
|||
"pyqt6-webengine-qt6==6.7.3",
|
||||
"pyqt6_sip==13.10.2",
|
||||
]
|
||||
qt69 = [
|
||||
qt = [
|
||||
"pyqt6==6.9.1",
|
||||
"pyqt6-qt6==6.9.1",
|
||||
"pyqt6-webengine==6.9.0",
|
||||
"pyqt6-webengine-qt6==6.9.1",
|
||||
"pyqt6-webengine==6.8.0",
|
||||
"pyqt6-webengine-qt6==6.8.2",
|
||||
"pyqt6_sip==13.10.2",
|
||||
]
|
||||
qt = [
|
||||
qt68 = [
|
||||
"pyqt6==6.8.0",
|
||||
"pyqt6-qt6==6.8.1",
|
||||
"pyqt6-webengine==6.8.0",
|
||||
|
@ -58,7 +58,7 @@ conflicts = [
|
|||
{ extra = "qt" },
|
||||
{ extra = "qt66" },
|
||||
{ extra = "qt67" },
|
||||
{ extra = "qt69" },
|
||||
{ extra = "qt68" },
|
||||
],
|
||||
]
|
||||
|
||||
|
@ -72,9 +72,12 @@ build-backend = "hatchling.build"
|
|||
[project.scripts]
|
||||
anki = "aqt:run"
|
||||
|
||||
[project.gui-scripts]
|
||||
ankiw = "aqt:run"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["aqt"]
|
||||
exclude = ["**/*.scss", "**/*.ui"]
|
||||
exclude = ["aqt/data", "**/*.ui"]
|
||||
|
||||
[tool.hatch.version]
|
||||
source = "code"
|
||||
|
|
|
@ -48,6 +48,7 @@ async-trait.workspace = true
|
|||
axum.workspace = true
|
||||
axum-client-ip.workspace = true
|
||||
axum-extra.workspace = true
|
||||
bitflags.workspace = true
|
||||
blake3.workspace = true
|
||||
bytes.workspace = true
|
||||
chrono.workspace = true
|
||||
|
@ -80,6 +81,7 @@ pin-project.workspace = true
|
|||
prost.workspace = true
|
||||
pulldown-cmark.workspace = true
|
||||
rand.workspace = true
|
||||
rayon.workspace = true
|
||||
regex.workspace = true
|
||||
reqwest.workspace = true
|
||||
rusqlite.workspace = true
|
||||
|
|
|
@ -22,6 +22,7 @@ inflections.workspace = true
|
|||
anki_io.workspace = true
|
||||
anyhow.workspace = true
|
||||
itertools.workspace = true
|
||||
regex.workspace = true
|
||||
|
||||
[dependencies]
|
||||
fluent.workspace = true
|
||||
|
|
|
@ -4,6 +4,5 @@
|
|||
// Include auto-generated content
|
||||
|
||||
#![allow(clippy::all)]
|
||||
#![allow(text_direction_codepoint_in_literal)]
|
||||
|
||||
include!(concat!(env!("OUT_DIR"), "/strings.rs"));
|
||||
|
|
|
@ -195,12 +195,30 @@ pub(crate) const {lang_name}: phf::Map<&str, &str> = phf::phf_map! {{",
|
|||
.unwrap();
|
||||
|
||||
for (module, contents) in modules {
|
||||
writeln!(buf, r###" "{module}" => r##"{contents}"##,"###).unwrap();
|
||||
let escaped_contents = escape_unicode_control_chars(contents);
|
||||
writeln!(
|
||||
buf,
|
||||
r###" "{module}" => r##"{escaped_contents}"##,"###
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
buf.push_str("};\n");
|
||||
}
|
||||
|
||||
fn escape_unicode_control_chars(input: &str) -> String {
|
||||
use regex::Regex;
|
||||
|
||||
static RE: std::sync::OnceLock<Regex> = std::sync::OnceLock::new();
|
||||
let re = RE.get_or_init(|| Regex::new(r"[\u{202a}-\u{202e}\u{2066}-\u{2069}]").unwrap());
|
||||
|
||||
re.replace_all(input, |caps: ®ex::Captures| {
|
||||
let c = caps.get(0).unwrap().as_str().chars().next().unwrap();
|
||||
format!("\\u{{{:04x}}}", c as u32)
|
||||
})
|
||||
.into_owned()
|
||||
}
|
||||
|
||||
fn lang_constant_name(lang: &str) -> String {
|
||||
lang.to_ascii_uppercase().replace('-', "_")
|
||||
}
|
||||
|
|
|
@ -42,14 +42,14 @@ enum CheckableUrl {
|
|||
}
|
||||
|
||||
impl CheckableUrl {
|
||||
fn url(&self) -> Cow<str> {
|
||||
fn url(&self) -> Cow<'_, str> {
|
||||
match *self {
|
||||
Self::HelpPage(page) => help_page_to_link(page).into(),
|
||||
Self::String(s) => s.into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn anchor(&self) -> Cow<str> {
|
||||
fn anchor(&self) -> Cow<'_, str> {
|
||||
match *self {
|
||||
Self::HelpPage(page) => help_page_link_suffix(page).into(),
|
||||
Self::String(s) => s.split('#').next_back().unwrap_or_default().into(),
|
||||
|
|
|
@ -11,6 +11,24 @@ use snafu::ensure;
|
|||
use snafu::ResultExt;
|
||||
use snafu::Snafu;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CodeDisplay(Option<i32>);
|
||||
|
||||
impl std::fmt::Display for CodeDisplay {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self.0 {
|
||||
Some(code) => write!(f, "{code}"),
|
||||
None => write!(f, "?"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Option<i32>> for CodeDisplay {
|
||||
fn from(code: Option<i32>) -> Self {
|
||||
CodeDisplay(code)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum Error {
|
||||
#[snafu(display("Failed to execute: {cmdline}"))]
|
||||
|
@ -18,8 +36,15 @@ pub enum Error {
|
|||
cmdline: String,
|
||||
source: std::io::Error,
|
||||
},
|
||||
#[snafu(display("Failed with code {code:?}: {cmdline}"))]
|
||||
ReturnedError { cmdline: String, code: Option<i32> },
|
||||
#[snafu(display("Failed to run ({code}): {cmdline}"))]
|
||||
ReturnedError { cmdline: String, code: CodeDisplay },
|
||||
#[snafu(display("Failed to run ({code}): {cmdline}: {stdout}{stderr}"))]
|
||||
ReturnedWithOutputError {
|
||||
cmdline: String,
|
||||
code: CodeDisplay,
|
||||
stdout: String,
|
||||
stderr: String,
|
||||
},
|
||||
#[snafu(display("Couldn't decode stdout/stderr as utf8"))]
|
||||
InvalidUtf8 {
|
||||
cmdline: String,
|
||||
|
@ -71,31 +96,36 @@ impl CommandExt for Command {
|
|||
status.success(),
|
||||
ReturnedSnafu {
|
||||
cmdline: get_cmdline(self),
|
||||
code: status.code(),
|
||||
code: CodeDisplay::from(status.code()),
|
||||
}
|
||||
);
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn utf8_output(&mut self) -> Result<Utf8Output> {
|
||||
let cmdline = get_cmdline(self);
|
||||
let output = self.output().with_context(|_| DidNotExecuteSnafu {
|
||||
cmdline: get_cmdline(self),
|
||||
cmdline: cmdline.clone(),
|
||||
})?;
|
||||
|
||||
let stdout = String::from_utf8(output.stdout).with_context(|_| InvalidUtf8Snafu {
|
||||
cmdline: cmdline.clone(),
|
||||
})?;
|
||||
let stderr = String::from_utf8(output.stderr).with_context(|_| InvalidUtf8Snafu {
|
||||
cmdline: cmdline.clone(),
|
||||
})?;
|
||||
|
||||
ensure!(
|
||||
output.status.success(),
|
||||
ReturnedSnafu {
|
||||
cmdline: get_cmdline(self),
|
||||
code: output.status.code(),
|
||||
ReturnedWithOutputSnafu {
|
||||
cmdline,
|
||||
code: CodeDisplay::from(output.status.code()),
|
||||
stdout: stdout.clone(),
|
||||
stderr: stderr.clone(),
|
||||
}
|
||||
);
|
||||
Ok(Utf8Output {
|
||||
stdout: String::from_utf8(output.stdout).with_context(|_| InvalidUtf8Snafu {
|
||||
cmdline: get_cmdline(self),
|
||||
})?,
|
||||
stderr: String::from_utf8(output.stderr).with_context(|_| InvalidUtf8Snafu {
|
||||
cmdline: get_cmdline(self),
|
||||
})?,
|
||||
})
|
||||
|
||||
Ok(Utf8Output { stdout, stderr })
|
||||
}
|
||||
|
||||
fn ensure_spawn(&mut self) -> Result<std::process::Child> {
|
||||
|
@ -135,7 +165,10 @@ mod test {
|
|||
#[cfg(not(windows))]
|
||||
assert!(matches!(
|
||||
Command::new("false").ensure_success(),
|
||||
Err(Error::ReturnedError { code: Some(1), .. })
|
||||
Err(Error::ReturnedError {
|
||||
code: CodeDisplay(_),
|
||||
..
|
||||
})
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@ impl BackendCollectionService for Backend {
|
|||
}
|
||||
|
||||
impl Backend {
|
||||
pub(super) fn lock_open_collection(&self) -> Result<MutexGuard<Option<Collection>>> {
|
||||
pub(super) fn lock_open_collection(&self) -> Result<MutexGuard<'_, Option<Collection>>> {
|
||||
let guard = self.col.lock().unwrap();
|
||||
guard
|
||||
.is_some()
|
||||
|
@ -102,7 +102,7 @@ impl Backend {
|
|||
.ok_or(AnkiError::CollectionNotOpen)
|
||||
}
|
||||
|
||||
pub(super) fn lock_closed_collection(&self) -> Result<MutexGuard<Option<Collection>>> {
|
||||
pub(super) fn lock_closed_collection(&self) -> Result<MutexGuard<'_, Option<Collection>>> {
|
||||
let guard = self.col.lock().unwrap();
|
||||
guard
|
||||
.is_none()
|
||||
|
|
|
@ -39,6 +39,7 @@ impl From<BoolKeyProto> for BoolKey {
|
|||
BoolKeyProto::RenderLatex => BoolKey::RenderLatex,
|
||||
BoolKeyProto::LoadBalancerEnabled => BoolKey::LoadBalancerEnabled,
|
||||
BoolKeyProto::FsrsShortTermWithStepsEnabled => BoolKey::FsrsShortTermWithStepsEnabled,
|
||||
BoolKeyProto::FsrsLegacyEvaluate => BoolKey::FsrsLegacyEvaluate,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -105,7 +105,8 @@ impl Card {
|
|||
|
||||
/// Returns true if the card has a due date in terms of days.
|
||||
fn is_due_in_days(&self) -> bool {
|
||||
matches!(self.queue, CardQueue::DayLearn | CardQueue::Review)
|
||||
self.ctype != CardType::New && self.original_or_current_due() <= 365_000 // keep consistent with SQL
|
||||
|| matches!(self.queue, CardQueue::DayLearn | CardQueue::Review)
|
||||
|| (self.ctype == CardType::Review && self.is_undue_queue())
|
||||
}
|
||||
|
||||
|
@ -125,20 +126,20 @@ impl Card {
|
|||
}
|
||||
}
|
||||
|
||||
/// This uses card.due and card.ivl to infer the elapsed time. If 'set due
|
||||
/// date' or an add-on has changed the due date, this won't be accurate.
|
||||
pub(crate) fn days_since_last_review(&self, timing: &SchedTimingToday) -> Option<u32> {
|
||||
if !self.is_due_in_days() {
|
||||
Some(
|
||||
(timing.next_day_at.0 as u32).saturating_sub(self.original_or_current_due() as u32)
|
||||
/ 86_400,
|
||||
)
|
||||
} else {
|
||||
/// If last_review_date isn't stored in the card, this uses card.due and
|
||||
/// card.ivl to infer the elapsed time, which won't be accurate if
|
||||
/// 'set due date' or an add-on has changed the due date.
|
||||
pub(crate) fn seconds_since_last_review(&self, timing: &SchedTimingToday) -> Option<u32> {
|
||||
if let Some(last_review_time) = self.last_review_time {
|
||||
Some(timing.now.elapsed_secs_since(last_review_time) as u32)
|
||||
} else if self.is_due_in_days() {
|
||||
self.due_time(timing).map(|due| {
|
||||
(due.adding_secs(-86_400 * self.interval as i64)
|
||||
.elapsed_secs()
|
||||
/ 86_400) as u32
|
||||
.elapsed_secs()) as u32
|
||||
})
|
||||
} else {
|
||||
let last_review_time = TimestampSecs(self.original_or_current_due() as i64);
|
||||
Some(timing.now.elapsed_secs_since(last_review_time) as u32)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -541,12 +542,12 @@ impl RowContext {
|
|||
self.cards[0]
|
||||
.memory_state
|
||||
.as_ref()
|
||||
.zip(self.cards[0].days_since_last_review(&self.timing))
|
||||
.zip(self.cards[0].seconds_since_last_review(&self.timing))
|
||||
.zip(Some(self.cards[0].decay.unwrap_or(FSRS5_DEFAULT_DECAY)))
|
||||
.map(|((state, days_elapsed), decay)| {
|
||||
let r = FSRS::new(None).unwrap().current_retrievability(
|
||||
.map(|((state, seconds), decay)| {
|
||||
let r = FSRS::new(None).unwrap().current_retrievability_seconds(
|
||||
(*state).into(),
|
||||
days_elapsed,
|
||||
seconds,
|
||||
decay,
|
||||
);
|
||||
format!("{:.0}%", r * 100.)
|
||||
|
|
|
@ -96,6 +96,7 @@ pub struct Card {
|
|||
pub(crate) memory_state: Option<FsrsMemoryState>,
|
||||
pub(crate) desired_retention: Option<f32>,
|
||||
pub(crate) decay: Option<f32>,
|
||||
pub(crate) last_review_time: Option<TimestampSecs>,
|
||||
/// JSON object or empty; exposed through the reviewer for persisting custom
|
||||
/// state
|
||||
pub(crate) custom_data: String,
|
||||
|
@ -147,6 +148,7 @@ impl Default for Card {
|
|||
memory_state: None,
|
||||
desired_retention: None,
|
||||
decay: None,
|
||||
last_review_time: None,
|
||||
custom_data: String::new(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -107,6 +107,7 @@ impl TryFrom<anki_proto::cards::Card> for Card {
|
|||
memory_state: c.memory_state.map(Into::into),
|
||||
desired_retention: c.desired_retention,
|
||||
decay: c.decay,
|
||||
last_review_time: c.last_review_time_secs.map(TimestampSecs),
|
||||
custom_data: c.custom_data,
|
||||
})
|
||||
}
|
||||
|
@ -136,6 +137,7 @@ impl From<Card> for anki_proto::cards::Card {
|
|||
memory_state: c.memory_state.map(Into::into),
|
||||
desired_retention: c.desired_retention,
|
||||
decay: c.decay,
|
||||
last_review_time_secs: c.last_review_time.map(|t| t.0),
|
||||
custom_data: c.custom_data,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ pub fn prettify_av_tags<S: Into<String> + AsRef<str>>(txt: S) -> String {
|
|||
|
||||
/// Parse `txt` into [CardNodes] and return the result,
|
||||
/// or [None] if it only contains text nodes.
|
||||
fn nodes_or_text_only(txt: &str) -> Option<CardNodes> {
|
||||
fn nodes_or_text_only(txt: &str) -> Option<CardNodes<'_>> {
|
||||
let nodes = CardNodes::parse(txt);
|
||||
(!nodes.text_only).then_some(nodes)
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue