mirror of
https://github.com/ankitects/anki.git
synced 2025-09-25 01:06:35 -04:00
Compare commits
No commits in common. "main" and "25.06b6" have entirely different histories.
343 changed files with 8136 additions and 10532 deletions
|
@ -10,6 +10,3 @@ PYTHONDONTWRITEBYTECODE = "1" # prevent junk files on Windows
|
|||
|
||||
[term]
|
||||
color = "always"
|
||||
|
||||
[target.'cfg(all(target_env = "msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -19,4 +19,4 @@ yarn-error.log
|
|||
ts/.svelte-kit
|
||||
.yarn
|
||||
.claude/settings.local.json
|
||||
.claude/user.md
|
||||
CLAUDE.local.md
|
||||
|
|
4
.isort.cfg
Normal file
4
.isort.cfg
Normal file
|
@ -0,0 +1,4 @@
|
|||
[settings]
|
||||
py_version=39
|
||||
known_first_party=anki,aqt,tests
|
||||
profile=black
|
48
.pylintrc
Normal file
48
.pylintrc
Normal file
|
@ -0,0 +1,48 @@
|
|||
[MASTER]
|
||||
ignore-patterns=.*_pb2.*
|
||||
persistent = no
|
||||
extension-pkg-whitelist=orjson,PyQt6
|
||||
init-hook="import sys; sys.path.extend(['pylib/anki/_vendor', 'out/qt'])"
|
||||
|
||||
[REPORTS]
|
||||
output-format=colorized
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
disable=
|
||||
R,
|
||||
line-too-long,
|
||||
too-many-lines,
|
||||
missing-function-docstring,
|
||||
missing-module-docstring,
|
||||
missing-class-docstring,
|
||||
import-outside-toplevel,
|
||||
wrong-import-position,
|
||||
wrong-import-order,
|
||||
fixme,
|
||||
unused-wildcard-import,
|
||||
attribute-defined-outside-init,
|
||||
redefined-builtin,
|
||||
wildcard-import,
|
||||
broad-except,
|
||||
bare-except,
|
||||
unused-argument,
|
||||
unused-variable,
|
||||
redefined-outer-name,
|
||||
global-statement,
|
||||
protected-access,
|
||||
arguments-differ,
|
||||
arguments-renamed,
|
||||
consider-using-f-string,
|
||||
invalid-name,
|
||||
broad-exception-raised
|
||||
|
||||
[BASIC]
|
||||
good-names =
|
||||
id,
|
||||
tr,
|
||||
db,
|
||||
ok,
|
||||
ip,
|
||||
|
||||
[IMPORTS]
|
||||
ignored-modules = anki.*_pb2, anki.sync_pb2, win32file,pywintypes,socket,win32pipe,pyaudio,anki.scheduler_pb2,anki.notetypes_pb2
|
93
.ruff.toml
93
.ruff.toml
|
@ -1,91 +1,2 @@
|
|||
lint.select = [
|
||||
"E", # pycodestyle errors
|
||||
"F", # Pyflakes errors
|
||||
"PL", # Pylint rules
|
||||
"I", # Isort rules
|
||||
"ARG",
|
||||
# "UP", # pyupgrade
|
||||
# "B", # flake8-bugbear
|
||||
# "SIM", # flake8-simplify
|
||||
]
|
||||
|
||||
extend-exclude = ["*_pb2.py", "*_pb2.pyi"]
|
||||
|
||||
lint.ignore = [
|
||||
# Docstring rules (missing-*-docstring in pylint)
|
||||
"D100", # Missing docstring in public module
|
||||
"D101", # Missing docstring in public class
|
||||
"D103", # Missing docstring in public function
|
||||
|
||||
# Import rules (wrong-import-* in pylint)
|
||||
"E402", # Module level import not at top of file
|
||||
"E501", # Line too long
|
||||
|
||||
# pycodestyle rules
|
||||
"E741", # ambiguous-variable-name
|
||||
|
||||
# Comment rules (fixme in pylint)
|
||||
"FIX002", # Line contains TODO
|
||||
|
||||
# Pyflakes rules
|
||||
"F402", # import-shadowed-by-loop-var
|
||||
"F403", # undefined-local-with-import-star
|
||||
"F405", # undefined-local-with-import-star-usage
|
||||
|
||||
# Naming rules (invalid-name in pylint)
|
||||
"N801", # Class name should use CapWords convention
|
||||
"N802", # Function name should be lowercase
|
||||
"N803", # Argument name should be lowercase
|
||||
"N806", # Variable in function should be lowercase
|
||||
"N811", # Constant imported as non-constant
|
||||
"N812", # Lowercase imported as non-lowercase
|
||||
"N813", # Camelcase imported as lowercase
|
||||
"N814", # Camelcase imported as constant
|
||||
"N815", # Variable in class scope should not be mixedCase
|
||||
"N816", # Variable in global scope should not be mixedCase
|
||||
"N817", # CamelCase imported as acronym
|
||||
"N818", # Error suffix in exception names
|
||||
|
||||
# Pylint rules
|
||||
"PLW0603", # global-statement
|
||||
"PLW2901", # redefined-loop-name
|
||||
"PLC0415", # import-outside-top-level
|
||||
"PLR2004", # magic-value-comparison
|
||||
|
||||
# Exception handling (broad-except, bare-except in pylint)
|
||||
"BLE001", # Do not catch blind exception
|
||||
|
||||
# Argument rules (unused-argument in pylint)
|
||||
"ARG001", # Unused function argument
|
||||
"ARG002", # Unused method argument
|
||||
"ARG005", # Unused lambda argument
|
||||
|
||||
# Access rules (protected-access in pylint)
|
||||
"SLF001", # Private member accessed
|
||||
|
||||
# String formatting (consider-using-f-string in pylint)
|
||||
"UP032", # Use f-string instead of format call
|
||||
|
||||
# Exception rules (broad-exception-raised in pylint)
|
||||
"TRY301", # Abstract raise to an inner function
|
||||
|
||||
# Builtin shadowing (redefined-builtin in pylint)
|
||||
"A001", # Variable shadows a Python builtin
|
||||
"A002", # Argument shadows a Python builtin
|
||||
"A003", # Class attribute shadows a Python builtin
|
||||
]
|
||||
|
||||
[lint.per-file-ignores]
|
||||
"**/anki/*_pb2.py" = ["ALL"]
|
||||
|
||||
[lint.pep8-naming]
|
||||
ignore-names = ["id", "tr", "db", "ok", "ip"]
|
||||
|
||||
[lint.pylint]
|
||||
max-args = 12
|
||||
max-returns = 10
|
||||
max-branches = 35
|
||||
max-statements = 125
|
||||
|
||||
[lint.isort]
|
||||
known-first-party = ["anki", "aqt", "tests"]
|
||||
target-version = "py39"
|
||||
extend-exclude = []
|
||||
|
|
2
.version
2
.version
|
@ -1 +1 @@
|
|||
25.09.2
|
||||
25.06b6
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"recommendations": [
|
||||
"dprint.dprint",
|
||||
"ms-python.python",
|
||||
"charliermarsh.ruff",
|
||||
"ms-python.black-formatter",
|
||||
"rust-lang.rust-analyzer",
|
||||
"svelte.svelte-vscode",
|
||||
"zxh404.vscode-proto3",
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
"out/qt",
|
||||
"qt"
|
||||
],
|
||||
"python.formatting.provider": "charliermarsh.ruff",
|
||||
"python.formatting.provider": "black",
|
||||
"python.linting.mypyEnabled": false,
|
||||
"python.analysis.diagnosticSeverityOverrides": {
|
||||
"reportMissingModuleSource": "none"
|
||||
|
|
|
@ -1,2 +1 @@
|
|||
nodeLinker: node-modules
|
||||
enableScripts: false
|
||||
|
|
|
@ -80,7 +80,3 @@ when possible.
|
|||
|
||||
in rslib, use error/mod.rs's AnkiError/Result and snafu. In our other Rust modules, prefer anyhow + additional context where appropriate. Unwrapping
|
||||
in build scripts/tests is fine.
|
||||
|
||||
## Individual preferences
|
||||
|
||||
See @.claude/user.md
|
||||
|
|
11
CONTRIBUTORS
11
CONTRIBUTORS
|
@ -49,7 +49,6 @@ Sander Santema <github.com/sandersantema/>
|
|||
Thomas Brownback <https://github.com/brownbat/>
|
||||
Andrew Gaul <andrew@gaul.org>
|
||||
kenden
|
||||
Emil Hamrin <github.com/e-hamrin>
|
||||
Nickolay Yudin <kelciour@gmail.com>
|
||||
neitrinoweb <github.com/neitrinoweb/>
|
||||
Andreas Reis <github.com/nwwt>
|
||||
|
@ -234,16 +233,6 @@ Spiritual Father <https://github.com/spiritualfather>
|
|||
Emmanuel Ferdman <https://github.com/emmanuel-ferdman>
|
||||
Sunong2008 <https://github.com/Sunrongguo2008>
|
||||
Marvin Kopf <marvinkopf@outlook.com>
|
||||
Kevin Nakamura <grinkers@grinkers.net>
|
||||
Bradley Szoke <bradleyszoke@gmail.com>
|
||||
jcznk <https://github.com/jcznk>
|
||||
Thomas Rixen <thomas.rixen@student.uclouvain.be>
|
||||
Siyuan Mattuwu Yan <syan4@ualberta.ca>
|
||||
Lee Doughty <32392044+leedoughty@users.noreply.github.com>
|
||||
memchr <memchr@proton.me>
|
||||
Max Romanowski <maxr777@proton.me>
|
||||
Aldlss <ayaldlss@gmail.com>
|
||||
|
||||
********************
|
||||
|
||||
The text of the 3 clause BSD license follows:
|
||||
|
|
1123
Cargo.lock
generated
1123
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
14
Cargo.toml
14
Cargo.toml
|
@ -33,8 +33,9 @@ git = "https://github.com/ankitects/linkcheck.git"
|
|||
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
|
||||
|
||||
[workspace.dependencies.fsrs]
|
||||
version = "5.1.0"
|
||||
version = "4.1.1"
|
||||
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
|
||||
# rev = "a7f7efc10f0a26b14ee348cc7402155685f2a24f"
|
||||
# path = "../open-spaced-repetition/fsrs-rs"
|
||||
|
||||
[workspace.dependencies]
|
||||
|
@ -51,7 +52,7 @@ ninja_gen = { "path" = "build/ninja_gen" }
|
|||
unicase = "=2.6.0" # any changes could invalidate sqlite indexes
|
||||
|
||||
# normal
|
||||
ammonia = "4.1.2"
|
||||
ammonia = "4.1.0"
|
||||
anyhow = "1.0.98"
|
||||
async-compression = { version = "0.4.24", features = ["zstd", "tokio"] }
|
||||
async-stream = "0.3.6"
|
||||
|
@ -59,7 +60,6 @@ async-trait = "0.1.88"
|
|||
axum = { version = "0.8.4", features = ["multipart", "macros"] }
|
||||
axum-client-ip = "1.1.3"
|
||||
axum-extra = { version = "0.10.1", features = ["typed-header"] }
|
||||
bitflags = "2.9.1"
|
||||
blake3 = "1.8.2"
|
||||
bytes = "1.10.1"
|
||||
camino = "1.1.10"
|
||||
|
@ -109,7 +109,6 @@ prost-types = "0.13"
|
|||
pulldown-cmark = "0.13.0"
|
||||
pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] }
|
||||
rand = "0.9.1"
|
||||
rayon = "1.10.0"
|
||||
regex = "1.11.1"
|
||||
reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] }
|
||||
rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] }
|
||||
|
@ -133,15 +132,14 @@ tokio-util = { version = "0.7.15", features = ["io"] }
|
|||
tower-http = { version = "0.6.6", features = ["trace"] }
|
||||
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] }
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = { version = "0.3.20", features = ["fmt", "env-filter"] }
|
||||
tracing-subscriber = { version = "0.3.19", features = ["fmt", "env-filter"] }
|
||||
unic-langid = { version = "0.9.6", features = ["macros"] }
|
||||
unic-ucd-category = "0.9.0"
|
||||
unicode-normalization = "0.1.24"
|
||||
walkdir = "2.5.0"
|
||||
which = "8.0.0"
|
||||
widestring = "1.1.0"
|
||||
winapi = { version = "0.3", features = ["wincon", "winreg"] }
|
||||
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_Foundation", "Win32_UI_Shell", "Wdk_System_SystemServices"] }
|
||||
winapi = { version = "0.3", features = ["wincon"] }
|
||||
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams"] }
|
||||
wiremock = "0.6.3"
|
||||
xz2 = "0.1.7"
|
||||
zip = { version = "4.1.0", default-features = false, features = ["deflate", "time"] }
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Anki®
|
||||
# Anki
|
||||
|
||||
[](https://buildkite.com/ankitects/anki-ci)
|
||||
|
||||
|
|
|
@ -1,18 +1,23 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
use ninja_gen::action::BuildAction;
|
||||
use ninja_gen::archives::Platform;
|
||||
use ninja_gen::build::FilesHandle;
|
||||
use ninja_gen::command::RunCommand;
|
||||
use ninja_gen::copy::CopyFiles;
|
||||
use ninja_gen::glob;
|
||||
use ninja_gen::hashmap;
|
||||
use ninja_gen::input::BuildInput;
|
||||
use ninja_gen::inputs;
|
||||
use ninja_gen::python::python_format;
|
||||
use ninja_gen::python::PythonEnvironment;
|
||||
use ninja_gen::python::PythonLint;
|
||||
use ninja_gen::python::PythonTypecheck;
|
||||
use ninja_gen::python::RuffCheck;
|
||||
use ninja_gen::rsync::RsyncFiles;
|
||||
use ninja_gen::Build;
|
||||
|
||||
/// Normalize version string by removing leading zeros from numeric parts
|
||||
|
@ -46,7 +51,7 @@ fn normalize_version(version: &str) -> String {
|
|||
part.to_string()
|
||||
} else {
|
||||
let normalized_prefix = numeric_prefix.parse::<u32>().unwrap_or(0).to_string();
|
||||
format!("{normalized_prefix}{rest}")
|
||||
format!("{}{}", normalized_prefix, rest)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -55,7 +60,14 @@ fn normalize_version(version: &str) -> String {
|
|||
}
|
||||
|
||||
pub fn setup_venv(build: &mut Build) -> Result<()> {
|
||||
let extra_binary_exports = &["mypy", "ruff", "pytest", "protoc-gen-mypy"];
|
||||
let extra_binary_exports = &[
|
||||
"mypy",
|
||||
"black",
|
||||
"isort",
|
||||
"pylint",
|
||||
"pytest",
|
||||
"protoc-gen-mypy",
|
||||
];
|
||||
build.add_action(
|
||||
"pyenv",
|
||||
PythonEnvironment {
|
||||
|
@ -123,14 +135,7 @@ impl BuildAction for BuildWheel {
|
|||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl FilesHandle) {
|
||||
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") {
|
||||
let uv_path =
|
||||
std::env::var("UV_BINARY").expect("UV_BINARY must be set in OFFLINE_BUILD mode");
|
||||
build.add_inputs("uv", inputs![uv_path]);
|
||||
} else {
|
||||
build.add_inputs("uv", inputs![":uv_binary"]);
|
||||
}
|
||||
|
||||
build.add_inputs("uv", inputs![":uv_binary"]);
|
||||
build.add_inputs("", &self.deps);
|
||||
|
||||
// Set the project directory based on which package we're building
|
||||
|
@ -195,26 +200,60 @@ pub fn check_python(build: &mut Build) -> Result<()> {
|
|||
},
|
||||
)?;
|
||||
|
||||
let ruff_folders = &["qt/aqt", "ftl", "pylib/tools", "tools", "python"];
|
||||
let ruff_deps = inputs![
|
||||
glob!["{pylib,ftl,qt,python,tools}/**/*.py"],
|
||||
":pylib:anki",
|
||||
":qt:aqt"
|
||||
];
|
||||
add_pylint(build)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_pylint(build: &mut Build) -> Result<()> {
|
||||
// pylint does not support PEP420 implicit namespaces split across import paths,
|
||||
// so we need to merge our pylib sources and generated files before invoking it,
|
||||
// and add a top-level __init__.py
|
||||
build.add_action(
|
||||
"check:ruff",
|
||||
RuffCheck {
|
||||
folders: ruff_folders,
|
||||
deps: ruff_deps.clone(),
|
||||
check_only: true,
|
||||
"check:pylint:copy_pylib",
|
||||
RsyncFiles {
|
||||
inputs: inputs![":pylib:anki"],
|
||||
target_folder: "pylint/anki",
|
||||
strip_prefix: "$builddir/pylib/anki",
|
||||
// avoid copying our large rsbridge binary
|
||||
extra_args: "--links",
|
||||
},
|
||||
)?;
|
||||
build.add_action(
|
||||
"fix:ruff",
|
||||
RuffCheck {
|
||||
folders: ruff_folders,
|
||||
deps: ruff_deps,
|
||||
check_only: false,
|
||||
"check:pylint:copy_pylib",
|
||||
RsyncFiles {
|
||||
inputs: inputs![glob!["pylib/anki/**"]],
|
||||
target_folder: "pylint/anki",
|
||||
strip_prefix: "pylib/anki",
|
||||
extra_args: "",
|
||||
},
|
||||
)?;
|
||||
build.add_action(
|
||||
"check:pylint:copy_pylib",
|
||||
RunCommand {
|
||||
command: ":pyenv:bin",
|
||||
args: "$script $out",
|
||||
inputs: hashmap! { "script" => inputs!["python/mkempty.py"] },
|
||||
outputs: hashmap! { "out" => vec!["pylint/anki/__init__.py"] },
|
||||
},
|
||||
)?;
|
||||
build.add_action(
|
||||
"check:pylint",
|
||||
PythonLint {
|
||||
folders: &[
|
||||
"$builddir/pylint/anki",
|
||||
"qt/aqt",
|
||||
"ftl",
|
||||
"pylib/tools",
|
||||
"tools",
|
||||
"python",
|
||||
],
|
||||
pylint_ini: inputs![".pylintrc"],
|
||||
deps: inputs![
|
||||
":check:pylint:copy_pylib",
|
||||
":qt:aqt",
|
||||
glob!("{pylib/tools,ftl,qt,python,tools}/**/*.py")
|
||||
],
|
||||
},
|
||||
)?;
|
||||
|
||||
|
@ -227,19 +266,15 @@ struct Sphinx {
|
|||
|
||||
impl BuildAction for Sphinx {
|
||||
fn command(&self) -> &str {
|
||||
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") {
|
||||
"$python python/sphinx/build.py"
|
||||
} else {
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
"$uv sync --extra sphinx && $python python/sphinx/build.py"
|
||||
} else {
|
||||
"$python python/sphinx/build.py"
|
||||
}
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl FilesHandle) {
|
||||
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") {
|
||||
let uv_path =
|
||||
std::env::var("UV_BINARY").expect("UV_BINARY must be set in OFFLINE_BUILD mode");
|
||||
build.add_inputs("uv", inputs![uv_path]);
|
||||
} else {
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
build.add_inputs("uv", inputs![":uv_binary"]);
|
||||
// Set environment variable to use the existing pyenv
|
||||
build.add_variable("pyenv_path", "$builddir/pyenv");
|
||||
|
|
|
@ -169,7 +169,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
|
|||
|
||||
pub fn check_rust(build: &mut Build) -> Result<()> {
|
||||
let inputs = inputs![
|
||||
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**,tools/minilints/**}"),
|
||||
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,tools/workspace-hack/**}"),
|
||||
"Cargo.lock",
|
||||
"Cargo.toml",
|
||||
"rust-toolchain.toml",
|
||||
|
|
|
@ -35,7 +35,3 @@ path = "src/bin/update_uv.rs"
|
|||
[[bin]]
|
||||
name = "update_protoc"
|
||||
path = "src/bin/update_protoc.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "update_node"
|
||||
path = "src/bin/update_node.rs"
|
||||
|
|
|
@ -49,46 +49,6 @@ pub trait BuildAction {
|
|||
}
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
std::any::type_name::<Self>()
|
||||
.split("::")
|
||||
.last()
|
||||
.unwrap()
|
||||
.split('<')
|
||||
.next()
|
||||
.unwrap()
|
||||
std::any::type_name::<Self>().split("::").last().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
trait TestBuildAction {}
|
||||
|
||||
#[cfg(test)]
|
||||
impl<T: TestBuildAction + ?Sized> BuildAction for T {
|
||||
fn command(&self) -> &str {
|
||||
"test"
|
||||
}
|
||||
fn files(&mut self, _build: &mut impl FilesHandle) {}
|
||||
}
|
||||
|
||||
#[allow(dead_code, unused_variables)]
|
||||
#[test]
|
||||
fn should_strip_regions_in_type_name() {
|
||||
struct Bare;
|
||||
impl TestBuildAction for Bare {}
|
||||
assert_eq!(Bare {}.name(), "Bare");
|
||||
|
||||
struct WithLifeTime<'a>(&'a str);
|
||||
impl TestBuildAction for WithLifeTime<'_> {}
|
||||
assert_eq!(WithLifeTime("test").name(), "WithLifeTime");
|
||||
|
||||
struct WithMultiLifeTime<'a, 'b>(&'a str, &'b str);
|
||||
impl TestBuildAction for WithMultiLifeTime<'_, '_> {}
|
||||
assert_eq!(
|
||||
WithMultiLifeTime("test", "test").name(),
|
||||
"WithMultiLifeTime"
|
||||
);
|
||||
|
||||
struct WithGeneric<T>(T);
|
||||
impl<T> TestBuildAction for WithGeneric<T> {}
|
||||
assert_eq!(WithGeneric(3).name(), "WithGeneric");
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ impl Platform {
|
|||
}
|
||||
|
||||
/// Append .exe to path if on Windows.
|
||||
pub fn with_exe(path: &str) -> Cow<'_, str> {
|
||||
pub fn with_exe(path: &str) -> Cow<str> {
|
||||
if cfg!(windows) {
|
||||
format!("{path}.exe").into()
|
||||
} else {
|
||||
|
|
|
@ -1,268 +0,0 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::error::Error;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use regex::Regex;
|
||||
use reqwest::blocking::Client;
|
||||
use serde_json::Value;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct NodeRelease {
|
||||
version: String,
|
||||
files: Vec<NodeFile>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct NodeFile {
|
||||
filename: String,
|
||||
url: String,
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
let release_info = fetch_node_release_info()?;
|
||||
let new_text = generate_node_archive_function(&release_info)?;
|
||||
update_node_text(&new_text)?;
|
||||
println!("Node.js archive function updated successfully!");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn fetch_node_release_info() -> Result<NodeRelease, Box<dyn Error>> {
|
||||
let client = Client::new();
|
||||
|
||||
// Get the Node.js release info
|
||||
let response = client
|
||||
.get("https://nodejs.org/dist/index.json")
|
||||
.header("User-Agent", "anki-build-updater")
|
||||
.send()?;
|
||||
|
||||
let releases: Vec<Value> = response.json()?;
|
||||
|
||||
// Find the latest LTS release
|
||||
let latest = releases
|
||||
.iter()
|
||||
.find(|release| {
|
||||
// LTS releases have a non-false "lts" field
|
||||
release["lts"].as_str().is_some() && release["lts"] != false
|
||||
})
|
||||
.ok_or("No LTS releases found")?;
|
||||
|
||||
let version = latest["version"]
|
||||
.as_str()
|
||||
.ok_or("Version not found")?
|
||||
.to_string();
|
||||
|
||||
let files = latest["files"]
|
||||
.as_array()
|
||||
.ok_or("Files array not found")?
|
||||
.iter()
|
||||
.map(|f| f.as_str().unwrap_or(""))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let lts_name = latest["lts"].as_str().unwrap_or("unknown");
|
||||
println!("Found Node.js LTS version: {version} ({lts_name})");
|
||||
|
||||
// Map platforms to their expected file keys and full filenames
|
||||
let platform_mapping = vec![
|
||||
(
|
||||
"linux-x64",
|
||||
"linux-x64",
|
||||
format!("node-{version}-linux-x64.tar.xz"),
|
||||
),
|
||||
(
|
||||
"linux-arm64",
|
||||
"linux-arm64",
|
||||
format!("node-{version}-linux-arm64.tar.xz"),
|
||||
),
|
||||
(
|
||||
"darwin-x64",
|
||||
"osx-x64-tar",
|
||||
format!("node-{version}-darwin-x64.tar.xz"),
|
||||
),
|
||||
(
|
||||
"darwin-arm64",
|
||||
"osx-arm64-tar",
|
||||
format!("node-{version}-darwin-arm64.tar.xz"),
|
||||
),
|
||||
(
|
||||
"win-x64",
|
||||
"win-x64-zip",
|
||||
format!("node-{version}-win-x64.zip"),
|
||||
),
|
||||
(
|
||||
"win-arm64",
|
||||
"win-arm64-zip",
|
||||
format!("node-{version}-win-arm64.zip"),
|
||||
),
|
||||
];
|
||||
|
||||
let mut node_files = Vec::new();
|
||||
|
||||
for (platform, file_key, filename) in platform_mapping {
|
||||
// Check if this file exists in the release
|
||||
if files.contains(&file_key) {
|
||||
let url = format!("https://nodejs.org/dist/{version}/{filename}");
|
||||
node_files.push(NodeFile {
|
||||
filename: filename.clone(),
|
||||
url,
|
||||
});
|
||||
println!("Found file for {platform}: {filename} (key: {file_key})");
|
||||
} else {
|
||||
return Err(
|
||||
format!("File not found for {platform} (key: {file_key}): {filename}").into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(NodeRelease {
|
||||
version,
|
||||
files: node_files,
|
||||
})
|
||||
}
|
||||
|
||||
fn generate_node_archive_function(release: &NodeRelease) -> Result<String, Box<dyn Error>> {
|
||||
let client = Client::new();
|
||||
|
||||
// Fetch the SHASUMS256.txt file once
|
||||
println!("Fetching SHA256 checksums...");
|
||||
let shasums_url = format!("https://nodejs.org/dist/{}/SHASUMS256.txt", release.version);
|
||||
let shasums_response = client
|
||||
.get(&shasums_url)
|
||||
.header("User-Agent", "anki-build-updater")
|
||||
.send()?;
|
||||
let shasums_text = shasums_response.text()?;
|
||||
|
||||
// Create a mapping from filename patterns to platform names - using the exact
|
||||
// patterns we stored in files
|
||||
let platform_mapping = vec![
|
||||
("linux-x64.tar.xz", "LinuxX64"),
|
||||
("linux-arm64.tar.xz", "LinuxArm"),
|
||||
("darwin-x64.tar.xz", "MacX64"),
|
||||
("darwin-arm64.tar.xz", "MacArm"),
|
||||
("win-x64.zip", "WindowsX64"),
|
||||
("win-arm64.zip", "WindowsArm"),
|
||||
];
|
||||
|
||||
let mut platform_blocks = Vec::new();
|
||||
|
||||
for (file_pattern, platform_name) in platform_mapping {
|
||||
// Find the file that ends with this pattern
|
||||
if let Some(file) = release
|
||||
.files
|
||||
.iter()
|
||||
.find(|f| f.filename.ends_with(file_pattern))
|
||||
{
|
||||
// Find the SHA256 for this file
|
||||
let sha256 = shasums_text
|
||||
.lines()
|
||||
.find(|line| line.contains(&file.filename))
|
||||
.and_then(|line| line.split_whitespace().next())
|
||||
.ok_or_else(|| format!("SHA256 not found for {}", file.filename))?;
|
||||
|
||||
println!(
|
||||
"Found SHA256 for {}: {} => {}",
|
||||
platform_name, file.filename, sha256
|
||||
);
|
||||
|
||||
let block = format!(
|
||||
" Platform::{} => OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }},",
|
||||
platform_name, file.url, sha256
|
||||
);
|
||||
platform_blocks.push(block);
|
||||
} else {
|
||||
return Err(format!(
|
||||
"File not found for platform {platform_name}: no file ending with {file_pattern}"
|
||||
)
|
||||
.into());
|
||||
}
|
||||
}
|
||||
|
||||
let function = format!(
|
||||
"pub fn node_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}",
|
||||
platform_blocks.join("\n")
|
||||
);
|
||||
|
||||
Ok(function)
|
||||
}
|
||||
|
||||
fn update_node_text(new_function: &str) -> Result<(), Box<dyn Error>> {
|
||||
let node_rs_content = read_node_rs()?;
|
||||
|
||||
// Regex to match the entire node_archive function with proper multiline
|
||||
// matching
|
||||
let re = Regex::new(
|
||||
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}",
|
||||
)?;
|
||||
|
||||
let updated_content = re.replace(&node_rs_content, new_function);
|
||||
|
||||
write_node_rs(&updated_content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_node_rs() -> Result<String, Box<dyn Error>> {
|
||||
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
|
||||
let manifest_dir =
|
||||
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
|
||||
let path = Path::new(&manifest_dir).join("src").join("node.rs");
|
||||
Ok(fs::read_to_string(path)?)
|
||||
}
|
||||
|
||||
fn write_node_rs(content: &str) -> Result<(), Box<dyn Error>> {
|
||||
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
|
||||
let manifest_dir =
|
||||
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
|
||||
let path = Path::new(&manifest_dir).join("src").join("node.rs");
|
||||
fs::write(path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_regex_replacement() {
|
||||
let sample_content = r#"Some other code
|
||||
pub fn node_archive(platform: Platform) -> OnlineArchive {
|
||||
match platform {
|
||||
Platform::LinuxX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz",
|
||||
sha256: "old_hash",
|
||||
},
|
||||
Platform::MacX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz",
|
||||
sha256: "old_hash",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
More code here"#;
|
||||
|
||||
let new_function = r#"pub fn node_archive(platform: Platform) -> OnlineArchive {
|
||||
match platform {
|
||||
Platform::LinuxX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-linux-x64.tar.xz",
|
||||
sha256: "new_hash",
|
||||
},
|
||||
Platform::MacX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-darwin-x64.tar.xz",
|
||||
sha256: "new_hash",
|
||||
},
|
||||
}
|
||||
}"#;
|
||||
|
||||
let re = Regex::new(
|
||||
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}"
|
||||
).unwrap();
|
||||
|
||||
let result = re.replace(sample_content, new_function);
|
||||
assert!(result.contains("v21.0.0"));
|
||||
assert!(result.contains("new_hash"));
|
||||
assert!(!result.contains("old_hash"));
|
||||
assert!(result.contains("Some other code"));
|
||||
assert!(result.contains("More code here"));
|
||||
}
|
||||
}
|
|
@ -72,11 +72,12 @@ fn fetch_protoc_release_info() -> Result<String, Box<dyn Error>> {
|
|||
"MacArm" => continue, // Skip MacArm since it's handled with MacX64
|
||||
"WindowsX64" => "Platform::WindowsX64 | Platform::WindowsArm",
|
||||
"WindowsArm" => continue, // Skip WindowsArm since it's handled with WindowsX64
|
||||
_ => &format!("Platform::{platform}"),
|
||||
_ => &format!("Platform::{}", platform),
|
||||
};
|
||||
|
||||
match_blocks.push(format!(
|
||||
" {match_pattern} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}"
|
||||
" {} => {{\n OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }}\n }}",
|
||||
match_pattern, download_url, sha256
|
||||
));
|
||||
}
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ fn fetch_uv_release_info() -> Result<String, Box<dyn Error>> {
|
|||
// Find the corresponding .sha256 or .sha256sum asset
|
||||
let sha_asset = assets.iter().find(|a| {
|
||||
let name = a["name"].as_str().unwrap_or("");
|
||||
name == format!("{asset_name}.sha256") || name == format!("{asset_name}.sha256sum")
|
||||
name == format!("{}.sha256", asset_name) || name == format!("{}.sha256sum", asset_name)
|
||||
});
|
||||
if sha_asset.is_none() {
|
||||
eprintln!("No sha256 asset found for {asset_name}");
|
||||
|
@ -71,7 +71,8 @@ fn fetch_uv_release_info() -> Result<String, Box<dyn Error>> {
|
|||
let sha256 = sha_text.split_whitespace().next().unwrap_or("");
|
||||
|
||||
match_blocks.push(format!(
|
||||
" Platform::{platform} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}"
|
||||
" Platform::{} => {{\n OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }}\n }}",
|
||||
platform, download_url, sha256
|
||||
));
|
||||
}
|
||||
|
||||
|
@ -134,7 +135,10 @@ mod tests {
|
|||
assert_eq!(
|
||||
updated_lines,
|
||||
original_lines - EXPECTED_LINES_REMOVED,
|
||||
"Expected line count to decrease by exactly {EXPECTED_LINES_REMOVED} lines (original: {original_lines}, updated: {updated_lines})"
|
||||
"Expected line count to decrease by exactly {} lines (original: {}, updated: {})",
|
||||
EXPECTED_LINES_REMOVED,
|
||||
original_lines,
|
||||
updated_lines
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -300,7 +300,7 @@ impl BuildStatement<'_> {
|
|||
|
||||
writeln!(buf, "build {outputs_str}: {action_name} {inputs_str}").unwrap();
|
||||
for (key, value) in self.variables.iter().sorted() {
|
||||
writeln!(buf, " {key} = {value}").unwrap();
|
||||
writeln!(buf, " {key} = {}", value).unwrap();
|
||||
}
|
||||
writeln!(buf).unwrap();
|
||||
|
||||
|
@ -476,7 +476,7 @@ impl FilesHandle for BuildStatement<'_> {
|
|||
let outputs = outputs.into_iter().map(|v| {
|
||||
let v = v.as_ref();
|
||||
let v = if !v.starts_with("$builddir/") && !v.starts_with("$builddir\\") {
|
||||
format!("$builddir/{v}")
|
||||
format!("$builddir/{}", v)
|
||||
} else {
|
||||
v.to_owned()
|
||||
};
|
||||
|
|
|
@ -19,28 +19,28 @@ use crate::input::BuildInput;
|
|||
pub fn node_archive(platform: Platform) -> OnlineArchive {
|
||||
match platform {
|
||||
Platform::LinuxX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-x64.tar.xz",
|
||||
sha256: "325c0f1261e0c61bcae369a1274028e9cfb7ab7949c05512c5b1e630f7e80e12",
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz",
|
||||
sha256: "822780369d0ea309e7d218e41debbd1a03f8cdf354ebf8a4420e89f39cc2e612",
|
||||
},
|
||||
Platform::LinuxArm => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-arm64.tar.xz",
|
||||
sha256: "140aee84be6774f5fb3f404be72adbe8420b523f824de82daeb5ab218dab7b18",
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-arm64.tar.xz",
|
||||
sha256: "f6df68c6793244071f69023a9b43a0cf0b13d65cbe86d55925c28e4134d9aafb",
|
||||
},
|
||||
Platform::MacX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-x64.tar.xz",
|
||||
sha256: "f79de1f64df4ac68493a344bb5ab7d289d0275271e87b543d1278392c9de778a",
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz",
|
||||
sha256: "d4b4ab81ebf1f7aab09714f834992f27270ad0079600da00c8110f8950ca6c5a",
|
||||
},
|
||||
Platform::MacArm => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-arm64.tar.xz",
|
||||
sha256: "cc9cc294eaf782dd93c8c51f460da610cc35753c6a9947411731524d16e97914",
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-arm64.tar.xz",
|
||||
sha256: "f18a7438723d48417f5e9be211a2f3c0520ffbf8e02703469e5153137ca0f328",
|
||||
},
|
||||
Platform::WindowsX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-x64.zip",
|
||||
sha256: "721ab118a3aac8584348b132767eadf51379e0616f0db802cc1e66d7f0d98f85",
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-x64.zip",
|
||||
sha256: "893115cd92ad27bf178802f15247115e93c0ef0c753b93dca96439240d64feb5",
|
||||
},
|
||||
Platform::WindowsArm => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-arm64.zip",
|
||||
sha256: "78355dc9ca117bb71d3f081e4b1b281855e2b134f3939bb0ca314f7567b0e621",
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-arm64.zip",
|
||||
sha256: "89c1f7034dcd6ff5c17f2af61232a96162a1902f862078347dcf274a938b6142",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -98,7 +98,7 @@ impl BuildAction for YarnInstall<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn with_cmd_ext(bin: &str) -> Cow<'_, str> {
|
||||
fn with_cmd_ext(bin: &str) -> Cow<str> {
|
||||
if cfg!(windows) {
|
||||
format!("{bin}.cmd").into()
|
||||
} else {
|
||||
|
|
|
@ -148,7 +148,7 @@ impl BuildAction for PythonEnvironment {
|
|||
// Add --python flag to extra_args if PYTHON_BINARY is set
|
||||
let mut args = self.extra_args.to_string();
|
||||
if let Ok(python_binary) = env::var("PYTHON_BINARY") {
|
||||
args = format!("--python {python_binary} {args}");
|
||||
args = format!("--python {} {}", python_binary, args);
|
||||
}
|
||||
build.add_variable("extra_args", args);
|
||||
}
|
||||
|
@ -193,19 +193,31 @@ impl BuildAction for PythonTypecheck {
|
|||
struct PythonFormat<'a> {
|
||||
pub inputs: &'a BuildInput,
|
||||
pub check_only: bool,
|
||||
pub isort_ini: &'a BuildInput,
|
||||
}
|
||||
|
||||
impl BuildAction for PythonFormat<'_> {
|
||||
fn command(&self) -> &str {
|
||||
"$ruff format $mode $in && $ruff check --select I --fix $in"
|
||||
"$black -t py39 -q $check --color $in && $
|
||||
$isort --color --settings-path $isort_ini $check $in"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
|
||||
build.add_inputs("in", self.inputs);
|
||||
build.add_inputs("ruff", inputs![":pyenv:ruff"]);
|
||||
build.add_inputs("black", inputs![":pyenv:black"]);
|
||||
build.add_inputs("isort", inputs![":pyenv:isort"]);
|
||||
|
||||
let hash = simple_hash(self.inputs);
|
||||
build.add_variable("mode", if self.check_only { "--check" } else { "" });
|
||||
build.add_env_var("BLACK_CACHE_DIR", "out/python/black.cache.{hash}");
|
||||
build.add_inputs("isort_ini", self.isort_ini);
|
||||
build.add_variable(
|
||||
"check",
|
||||
if self.check_only {
|
||||
"--diff --check"
|
||||
} else {
|
||||
""
|
||||
},
|
||||
);
|
||||
|
||||
build.add_output_stamp(format!(
|
||||
"tests/python_format.{}.{hash}",
|
||||
|
@ -215,11 +227,13 @@ impl BuildAction for PythonFormat<'_> {
|
|||
}
|
||||
|
||||
pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Result<()> {
|
||||
let isort_ini = &inputs![".isort.cfg"];
|
||||
build.add_action(
|
||||
format!("check:format:python:{group}"),
|
||||
PythonFormat {
|
||||
inputs: &inputs,
|
||||
check_only: true,
|
||||
isort_ini,
|
||||
},
|
||||
)?;
|
||||
|
||||
|
@ -228,39 +242,34 @@ pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Resu
|
|||
PythonFormat {
|
||||
inputs: &inputs,
|
||||
check_only: false,
|
||||
isort_ini,
|
||||
},
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub struct RuffCheck {
|
||||
pub struct PythonLint {
|
||||
pub folders: &'static [&'static str],
|
||||
pub pylint_ini: BuildInput,
|
||||
pub deps: BuildInput,
|
||||
pub check_only: bool,
|
||||
}
|
||||
|
||||
impl BuildAction for RuffCheck {
|
||||
impl BuildAction for PythonLint {
|
||||
fn command(&self) -> &str {
|
||||
"$ruff check $folders $mode"
|
||||
"$pylint --rcfile $pylint_ini -sn -j $cpus $folders"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
|
||||
build.add_inputs("", &self.deps);
|
||||
build.add_inputs("", inputs![".ruff.toml"]);
|
||||
build.add_inputs("ruff", inputs![":pyenv:ruff"]);
|
||||
build.add_inputs("pylint", inputs![":pyenv:pylint"]);
|
||||
build.add_inputs("pylint_ini", &self.pylint_ini);
|
||||
build.add_variable("folders", self.folders.join(" "));
|
||||
build.add_variable(
|
||||
"mode",
|
||||
if self.check_only {
|
||||
""
|
||||
} else {
|
||||
"--fix --unsafe-fixes"
|
||||
},
|
||||
);
|
||||
// On a 16 core system, values above 10 do not improve wall clock time,
|
||||
// but waste extra cores that could be working on other tests.
|
||||
build.add_variable("cpus", num_cpus::get().min(10).to_string());
|
||||
|
||||
let hash = simple_hash(&self.deps);
|
||||
let kind = if self.check_only { "check" } else { "fix" };
|
||||
build.add_output_stamp(format!("tests/python_ruff.{kind}.{hash}"));
|
||||
build.add_output_stamp(format!("tests/python_lint.{hash}"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -30,12 +30,12 @@ impl Build {
|
|||
)
|
||||
.unwrap();
|
||||
for (key, value) in &self.variables {
|
||||
writeln!(&mut buf, "{key} = {value}").unwrap();
|
||||
writeln!(&mut buf, "{} = {}", key, value).unwrap();
|
||||
}
|
||||
buf.push('\n');
|
||||
|
||||
for (key, value) in &self.pools {
|
||||
writeln!(&mut buf, "pool {key}\n depth = {value}").unwrap();
|
||||
writeln!(&mut buf, "pool {}\n depth = {}", key, value).unwrap();
|
||||
}
|
||||
buf.push('\n');
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ fn sha2_data(data: &[u8]) -> String {
|
|||
let mut digest = sha2::Sha256::new();
|
||||
digest.update(data);
|
||||
let result = digest.finalize();
|
||||
format!("{result:x}")
|
||||
format!("{:x}", result)
|
||||
}
|
||||
|
||||
enum CompressionKind {
|
||||
|
|
|
@ -138,7 +138,7 @@ fn setup_build_root() -> Utf8PathBuf {
|
|||
true
|
||||
};
|
||||
if create {
|
||||
println!("Switching build root to {new_target}");
|
||||
println!("Switching build root to {}", new_target);
|
||||
std::os::unix::fs::symlink(new_target, build_root).unwrap();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,19 +32,10 @@ pub fn setup_pyenv(args: PyenvArgs) {
|
|||
}
|
||||
}
|
||||
|
||||
let mut command = Command::new(args.uv_bin);
|
||||
|
||||
// remove UV_* environment variables to avoid interference
|
||||
for (key, _) in std::env::vars() {
|
||||
if key.starts_with("UV_") || key == "VIRTUAL_ENV" {
|
||||
command.env_remove(key);
|
||||
}
|
||||
}
|
||||
|
||||
run_command(
|
||||
command
|
||||
Command::new(args.uv_bin)
|
||||
.env("UV_PROJECT_ENVIRONMENT", args.pyenv_folder.clone())
|
||||
.args(["sync", "--locked", "--no-config"])
|
||||
.args(["sync", "--locked"])
|
||||
.args(args.extra_args),
|
||||
);
|
||||
|
||||
|
|
|
@ -83,7 +83,7 @@ fn split_args(args: Vec<String>) -> Vec<Vec<String>> {
|
|||
|
||||
pub fn run_command(command: &mut Command) {
|
||||
if let Err(err) = command.ensure_success() {
|
||||
println!("{err}");
|
||||
println!("{}", err);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,11 +28,7 @@ pub fn setup_yarn(args: YarnArgs) {
|
|||
.arg("--ignore-scripts"),
|
||||
);
|
||||
} else {
|
||||
run_command(
|
||||
Command::new(&args.yarn_bin)
|
||||
.arg("install")
|
||||
.arg("--immutable"),
|
||||
);
|
||||
run_command(Command::new(&args.yarn_bin).arg("install"));
|
||||
}
|
||||
|
||||
std::fs::write(args.stamp, b"").unwrap();
|
||||
|
|
7532
cargo/licenses.json
7532
cargo/licenses.json
File diff suppressed because it is too large
Load diff
|
@ -85,7 +85,7 @@ When formatting issues are reported, they can be fixed with
|
|||
./ninja format
|
||||
```
|
||||
|
||||
## Fixing ruff/eslint/copyright header issues
|
||||
## Fixing eslint/copyright header issues
|
||||
|
||||
```
|
||||
./ninja fix
|
||||
|
|
|
@ -1,78 +1,35 @@
|
|||
# This is a user-contributed Dockerfile. No official support is available.
|
||||
# This Dockerfile uses three stages.
|
||||
# 1. Compile anki (and dependencies) and build python wheels.
|
||||
# 2. Create a virtual environment containing anki and its dependencies.
|
||||
# 3. Create a final image that only includes anki's virtual environment and required
|
||||
# system packages.
|
||||
|
||||
ARG PYTHON_VERSION="3.9"
|
||||
ARG DEBIAN_FRONTEND="noninteractive"
|
||||
|
||||
FROM ubuntu:24.04 AS build
|
||||
# Build anki.
|
||||
FROM python:$PYTHON_VERSION AS build
|
||||
RUN curl -fsSL https://github.com/bazelbuild/bazelisk/releases/download/v1.7.4/bazelisk-linux-amd64 \
|
||||
> /usr/local/bin/bazel \
|
||||
&& chmod +x /usr/local/bin/bazel \
|
||||
# Bazel expects /usr/bin/python
|
||||
&& ln -s /usr/local/bin/python /usr/bin/python
|
||||
WORKDIR /opt/anki
|
||||
ENV PYTHON_VERSION="3.13"
|
||||
|
||||
|
||||
# System deps
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
git \
|
||||
build-essential \
|
||||
pkg-config \
|
||||
libssl-dev \
|
||||
libbz2-dev \
|
||||
libreadline-dev \
|
||||
libsqlite3-dev \
|
||||
libffi-dev \
|
||||
zlib1g-dev \
|
||||
liblzma-dev \
|
||||
ca-certificates \
|
||||
ninja-build \
|
||||
rsync \
|
||||
libglib2.0-0 \
|
||||
libgl1 \
|
||||
libx11-6 \
|
||||
libxext6 \
|
||||
libxrender1 \
|
||||
libxkbcommon0 \
|
||||
libxkbcommon-x11-0 \
|
||||
libxcb1 \
|
||||
libxcb-render0 \
|
||||
libxcb-shm0 \
|
||||
libxcb-icccm4 \
|
||||
libxcb-image0 \
|
||||
libxcb-keysyms1 \
|
||||
libxcb-randr0 \
|
||||
libxcb-shape0 \
|
||||
libxcb-xfixes0 \
|
||||
libxcb-xinerama0 \
|
||||
libxcb-xinput0 \
|
||||
libsm6 \
|
||||
libice6 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# install rust with rustup
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||
|
||||
# Install uv and Python 3.13 with uv
|
||||
RUN curl -LsSf https://astral.sh/uv/install.sh | sh \
|
||||
&& ln -s /root/.local/bin/uv /usr/local/bin/uv
|
||||
ENV PATH="/root/.local/bin:${PATH}"
|
||||
|
||||
RUN uv python install ${PYTHON_VERSION} --default
|
||||
|
||||
COPY . .
|
||||
|
||||
COPY . .
|
||||
# Build python wheels.
|
||||
RUN ./tools/build
|
||||
|
||||
|
||||
# Install pre-compiled Anki.
|
||||
FROM python:3.13-slim AS installer
|
||||
FROM python:${PYTHON_VERSION}-slim as installer
|
||||
WORKDIR /opt/anki/
|
||||
COPY --from=build /opt/anki/out/wheels/ wheels/
|
||||
COPY --from=build /opt/anki/wheels/ wheels/
|
||||
# Use virtual environment.
|
||||
RUN python -m venv venv \
|
||||
&& ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \
|
||||
&& ./venv/bin/python -m pip install --no-cache-dir /opt/anki/wheels/*.whl
|
||||
|
||||
|
||||
# We use another build stage here so we don't include the wheels in the final image.
|
||||
FROM python:3.13-slim AS final
|
||||
FROM python:${PYTHON_VERSION}-slim as final
|
||||
COPY --from=installer /opt/anki/venv /opt/anki/venv
|
||||
ENV PATH=/opt/anki/venv/bin:$PATH
|
||||
# Install run-time dependencies.
|
||||
|
@ -102,9 +59,9 @@ RUN apt-get update \
|
|||
libxrender1 \
|
||||
libxtst6 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Add non-root user.
|
||||
RUN useradd --create-home anki
|
||||
USER anki
|
||||
WORKDIR /work
|
||||
ENTRYPOINT ["/opt/anki/venv/bin/anki"]
|
||||
ENTRYPOINT ["/opt/anki/venv/bin/anki"]
|
||||
LABEL maintainer="Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com>"
|
||||
|
|
|
@ -98,6 +98,12 @@ should preferably be assigned a number between 1 and 15. If a message contains
|
|||
|
||||
Protobuf has an official Python implementation with an extensive [reference](https://developers.google.com/protocol-buffers/docs/reference/python-generated).
|
||||
|
||||
- Every message used in aqt or pylib must be added to the respective `.pylintrc`
|
||||
to avoid failing type checks. The unqualified protobuf message's name must be
|
||||
used, not an alias from `collection.py` for example. This should be taken into
|
||||
account when choosing a message name in order to prevent skipping typechecking
|
||||
a Python class of the same name.
|
||||
|
||||
### Typescript
|
||||
|
||||
Anki uses [protobuf-es](https://github.com/bufbuild/protobuf-es), which offers
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 480ef0da728c7ea3485c58529ae7ee02be3e5dba
|
||||
Subproject commit cc56464ab6354d4f1ad87ab3cc5c071c076b662d
|
|
@ -5,11 +5,6 @@ database-check-card-properties =
|
|||
[one] Fixed { $count } invalid card property.
|
||||
*[other] Fixed { $count } invalid card properties.
|
||||
}
|
||||
database-check-card-last-review-time-empty =
|
||||
{ $count ->
|
||||
[one] Added last review time to { $count } card.
|
||||
*[other] Added last review time to { $count } cards.
|
||||
}
|
||||
database-check-missing-templates =
|
||||
{ $count ->
|
||||
[one] Deleted { $count } card with missing template.
|
||||
|
|
|
@ -384,6 +384,8 @@ deck-config-which-deck = Which deck would you like to display options for?
|
|||
deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }...
|
||||
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters.
|
||||
deck-config-not-enough-history = Insufficient review history to perform this operation.
|
||||
deck-config-unable-to-determine-desired-retention =
|
||||
Unable to determine a minimum recommended retention.
|
||||
deck-config-must-have-400-reviews =
|
||||
{ $count ->
|
||||
[one] Only { $count } review was found.
|
||||
|
@ -392,6 +394,7 @@ deck-config-must-have-400-reviews =
|
|||
# Numbers that control how aggressively the FSRS algorithm schedules cards
|
||||
deck-config-weights = FSRS parameters
|
||||
deck-config-compute-optimal-weights = Optimize FSRS parameters
|
||||
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
|
||||
deck-config-optimize-button = Optimize Current Preset
|
||||
# Indicates that a given function or label, provided via the "text" variable, operates slowly.
|
||||
deck-config-slow-suffix = { $text } (slow)
|
||||
|
@ -404,6 +407,7 @@ deck-config-historical-retention = Historical retention
|
|||
deck-config-smaller-is-better = Smaller numbers indicate a better fit to your review history.
|
||||
deck-config-steps-too-large-for-fsrs = When FSRS is enabled, steps of 1 day or more are not recommended.
|
||||
deck-config-get-params = Get Params
|
||||
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
|
||||
deck-config-complete = { $num }% complete.
|
||||
deck-config-iterations = Iteration: { $count }...
|
||||
deck-config-reschedule-cards-on-change = Reschedule cards on change
|
||||
|
@ -421,8 +425,6 @@ deck-config-desired-retention-tooltip =
|
|||
less frequently, and you will forget more of them. Be conservative when adjusting this - higher
|
||||
values will greatly increase your workload, and lower values can be demoralizing when you forget
|
||||
a lot of material.
|
||||
deck-config-desired-retention-tooltip2 =
|
||||
The workload values provided by the info box are a rough approximation. For a greater level of accuracy, use the simulator.
|
||||
deck-config-historical-retention-tooltip =
|
||||
When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will
|
||||
assume that when you did those old reviews, you remembered 90% of the material. If your old retention
|
||||
|
@ -464,7 +466,12 @@ deck-config-compute-optimal-weights-tooltip2 =
|
|||
By default, parameters will be calculated from the review history of all decks using the current preset. You can
|
||||
optionally adjust the search before calculating the parameters, if you'd like to alter which cards are used for
|
||||
optimizing the parameters.
|
||||
|
||||
deck-config-compute-optimal-retention-tooltip4 =
|
||||
This tool will attempt to find the desired retention value
|
||||
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
|
||||
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if you’re
|
||||
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
|
||||
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
|
||||
deck-config-please-save-your-changes-first = Please save your changes first.
|
||||
deck-config-workload-factor-change = Approximate workload: {$factor}x
|
||||
(compared to {$previousDR}% desired retention)
|
||||
|
@ -496,10 +503,7 @@ deck-config-desired-retention-below-optimal = Your desired retention is below op
|
|||
# Description of the y axis in the FSRS simulation
|
||||
# diagram (Deck options -> FSRS) showing the total number of
|
||||
# cards that can be recalled or retrieved on a specific date.
|
||||
deck-config-fsrs-simulator-experimental = FSRS Simulator (Experimental)
|
||||
deck-config-fsrs-simulate-desired-retention-experimental = FSRS Desired Retention Simulator (Experimental)
|
||||
deck-config-fsrs-simulate-save-preset = After optimizing, please save your deck preset before running the simulator.
|
||||
deck-config-fsrs-desired-retention-help-me-decide-experimental = Help Me Decide (Experimental)
|
||||
deck-config-fsrs-simulator-experimental = FSRS simulator (experimental)
|
||||
deck-config-additional-new-cards-to-simulate = Additional new cards to simulate
|
||||
deck-config-simulate = Simulate
|
||||
deck-config-clear-last-simulate = Clear Last Simulation
|
||||
|
@ -508,14 +512,10 @@ deck-config-advanced-settings = Advanced Settings
|
|||
deck-config-smooth-graph = Smooth graph
|
||||
deck-config-suspend-leeches = Suspend leeches
|
||||
deck-config-save-options-to-preset = Save Changes to Preset
|
||||
deck-config-save-options-to-preset-confirm = Overwrite the options in your current preset with the options that are currently set in the simulator?
|
||||
# Radio button in the FSRS simulation diagram (Deck options -> FSRS) selecting
|
||||
# to show the total number of cards that can be recalled or retrieved on a
|
||||
# specific date.
|
||||
deck-config-fsrs-simulator-radio-memorized = Memorized
|
||||
deck-config-fsrs-simulator-radio-ratio = Time / Memorized Ratio
|
||||
# $time here is pre-formatted e.g. "10 Seconds"
|
||||
deck-config-fsrs-simulator-ratio-tooltip = { $time } per memorized card
|
||||
|
||||
## Messages related to the FSRS scheduler’s health check. The health check determines whether the correlation between FSRS predictions and your memory is good or bad. It can be optionally triggered as part of the "Optimize" function.
|
||||
|
||||
|
@ -525,7 +525,7 @@ deck-config-health-check = Check health when optimizing
|
|||
deck-config-fsrs-bad-fit-warning = Health Check:
|
||||
Your memory is difficult for FSRS to predict. Recommendations:
|
||||
|
||||
- Suspend or reformulate any cards you constantly forget.
|
||||
- Suspend or reformulate leeches.
|
||||
- Use the answer buttons consistently. Keep in mind that "Hard" is a passing grade, not a failing grade.
|
||||
- Understand before you memorize.
|
||||
|
||||
|
@ -536,17 +536,6 @@ deck-config-fsrs-good-fit = Health Check:
|
|||
|
||||
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
|
||||
|
||||
deck-config-unable-to-determine-desired-retention =
|
||||
Unable to determine a minimum recommended retention.
|
||||
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
|
||||
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
|
||||
deck-config-compute-optimal-retention-tooltip4 =
|
||||
This tool will attempt to find the desired retention value
|
||||
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
|
||||
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if you’re
|
||||
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
|
||||
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
|
||||
deck-config-plotted-on-x-axis = (Plotted on the X-axis)
|
||||
deck-config-a-100-day-interval =
|
||||
{ $days ->
|
||||
[one] A 100 day interval will become { $days } day.
|
||||
|
|
|
@ -48,7 +48,6 @@ importing-merge-notetypes-help =
|
|||
Warning: This will require a one-way sync, and may mark existing notes as modified.
|
||||
importing-mnemosyne-20-deck-db = Mnemosyne 2.0 Deck (*.db)
|
||||
importing-multicharacter-separators-are-not-supported-please = Multi-character separators are not supported. Please enter one character only.
|
||||
importing-new-deck-will-be-created = A new deck will be created: { $name }
|
||||
importing-notes-added-from-file = Notes added from file: { $val }
|
||||
importing-notes-found-in-file = Notes found in file: { $val }
|
||||
importing-notes-skipped-as-theyre-already-in = Notes skipped, as up-to-date copies are already in your collection: { $val }
|
||||
|
|
|
@ -34,7 +34,7 @@ preferences-when-adding-default-to-current-deck = When adding, default to curren
|
|||
preferences-you-can-restore-backups-via-fileswitch = You can restore backups via File > Switch Profile.
|
||||
preferences-legacy-timezone-handling = Legacy timezone handling (buggy, but required for AnkiDroid <= 2.14)
|
||||
preferences-default-search-text = Default search text
|
||||
preferences-default-search-text-example = e.g. "deck:current"
|
||||
preferences-default-search-text-example = eg. 'deck:current '
|
||||
preferences-theme = Theme
|
||||
preferences-theme-follow-system = Follow System
|
||||
preferences-theme-light = Light
|
||||
|
|
|
@ -80,7 +80,7 @@ statistics-reviews =
|
|||
# This fragment of the tooltip in the FSRS simulation
|
||||
# diagram (Deck options -> FSRS) shows the total number of
|
||||
# cards that can be recalled or retrieved on a specific date.
|
||||
statistics-memorized = {$memorized} cards memorized
|
||||
statistics-memorized = {$memorized} memorized
|
||||
statistics-today-title = Today
|
||||
statistics-today-again-count = Again count:
|
||||
statistics-today-type-counts = Learn: { $learnCount }, Review: { $reviewCount }, Relearn: { $relearnCount }, Filtered: { $filteredCount }
|
||||
|
@ -99,9 +99,9 @@ statistics-counts-relearning-cards = Relearning
|
|||
statistics-counts-title = Card Counts
|
||||
statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards
|
||||
|
||||
## Retention represents your actual retention from past reviews, in
|
||||
## comparison to the "desired retention" setting of FSRS, which forecasts
|
||||
## future retention. Retention is the percentage of all reviewed cards
|
||||
## True Retention represents your actual retention rate from past reviews, in
|
||||
## comparison to the "desired retention" parameter of FSRS, which forecasts
|
||||
## future retention. True Retention is the percentage of all reviewed cards
|
||||
## that were marked as "Hard," "Good," or "Easy" within a specific time period.
|
||||
##
|
||||
## Most of these strings are used as column / row headings in a table.
|
||||
|
@ -112,9 +112,9 @@ statistics-counts-separate-suspended-buried-cards = Separate suspended/buried ca
|
|||
## N.B. Stats cards may be very small on mobile devices and when the Stats
|
||||
## window is certain sizes.
|
||||
|
||||
statistics-true-retention-title = Retention
|
||||
statistics-true-retention-title = True Retention
|
||||
statistics-true-retention-subtitle = Pass rate of cards with an interval ≥ 1 day.
|
||||
statistics-true-retention-tooltip = If you are using FSRS, your retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data.
|
||||
statistics-true-retention-tooltip = If you are using FSRS, your true retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data.
|
||||
statistics-true-retention-range = Range
|
||||
statistics-true-retention-pass = Pass
|
||||
statistics-true-retention-fail = Fail
|
||||
|
|
|
@ -46,20 +46,6 @@ studying-type-answer-unknown-field = Type answer: unknown field { $val }
|
|||
studying-unbury = Unbury
|
||||
studying-what-would-you-like-to-unbury = What would you like to unbury?
|
||||
studying-you-havent-recorded-your-voice-yet = You haven't recorded your voice yet.
|
||||
studying-card-studied-in-minute =
|
||||
{ $cards ->
|
||||
[one] { $cards } card
|
||||
*[other] { $cards } cards
|
||||
} studied in
|
||||
{ $minutes ->
|
||||
[one] { $minutes } minute.
|
||||
*[other] { $minutes } minutes.
|
||||
}
|
||||
studying-question-time-elapsed = Question time elapsed
|
||||
studying-answer-time-elapsed = Answer time elapsed
|
||||
|
||||
## OBSOLETE; you do not need to translate this
|
||||
|
||||
studying-card-studied-in =
|
||||
{ $count ->
|
||||
[one] { $count } card studied in
|
||||
|
@ -70,3 +56,5 @@ studying-minute =
|
|||
[one] { $count } minute.
|
||||
*[other] { $count } minutes.
|
||||
}
|
||||
studying-question-time-elapsed = Question time elapsed
|
||||
studying-answer-time-elapsed = Answer time elapsed
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit fd5f984785ad07a0d3dbd893ee3d7e3671eaebd6
|
||||
Subproject commit 5f9a9ceb6e8a9aade26c1ad9f1c936f5cc4d9e2a
|
|
@ -435,7 +435,7 @@ impl TextWriter {
|
|||
item = item.trim_start_matches(' ');
|
||||
}
|
||||
|
||||
write!(self.buffer, "{item}")
|
||||
write!(self.buffer, "{}", item)
|
||||
}
|
||||
|
||||
fn write_char_into_indent(&mut self, ch: char) {
|
||||
|
|
|
@ -67,7 +67,7 @@ fn additional_template_folder(dst_folder: &Utf8Path) -> Option<Utf8PathBuf> {
|
|||
|
||||
fn all_langs(lang_folder: &Utf8Path) -> Result<Vec<Utf8PathBuf>> {
|
||||
std::fs::read_dir(lang_folder)
|
||||
.with_context(|| format!("reading {lang_folder:?}"))?
|
||||
.with_context(|| format!("reading {:?}", lang_folder))?
|
||||
.filter_map(Result::ok)
|
||||
.map(|e| Ok(e.path().utf8()?))
|
||||
.collect()
|
||||
|
|
22
package.json
22
package.json
|
@ -19,8 +19,8 @@
|
|||
"@poppanator/sveltekit-svg": "^5.0.0",
|
||||
"@sqltools/formatter": "^1.2.2",
|
||||
"@sveltejs/adapter-static": "^3.0.0",
|
||||
"@sveltejs/kit": "^2.22.2",
|
||||
"@sveltejs/vite-plugin-svelte": "5.1",
|
||||
"@sveltejs/kit": "^2.20.7",
|
||||
"@sveltejs/vite-plugin-svelte": "4.0.0",
|
||||
"@types/bootstrap": "^5.0.12",
|
||||
"@types/codemirror": "^5.60.0",
|
||||
"@types/d3": "^7.0.0",
|
||||
|
@ -30,7 +30,7 @@
|
|||
"@types/jqueryui": "^1.12.13",
|
||||
"@types/lodash-es": "^4.17.4",
|
||||
"@types/marked": "^5.0.0",
|
||||
"@types/node": "^22",
|
||||
"@types/node": "^20",
|
||||
"@typescript-eslint/eslint-plugin": "^5.60.1",
|
||||
"@typescript-eslint/parser": "^5.60.1",
|
||||
"caniuse-lite": "^1.0.30001431",
|
||||
|
@ -48,16 +48,16 @@
|
|||
"prettier": "^3.4.2",
|
||||
"prettier-plugin-svelte": "^3.3.2",
|
||||
"sass": "<1.77",
|
||||
"svelte": "^5.34.9",
|
||||
"svelte-check": "^4.2.2",
|
||||
"svelte-preprocess": "^6.0.3",
|
||||
"svelte": "^5.17.3",
|
||||
"svelte-check": "^3.4.4",
|
||||
"svelte-preprocess": "^5.0.4",
|
||||
"svelte-preprocess-esbuild": "^3.0.1",
|
||||
"svgo": "^3.2.0",
|
||||
"tslib": "^2.0.3",
|
||||
"tsx": "^4.8.1",
|
||||
"tsx": "^3.12.0",
|
||||
"typescript": "^5.0.4",
|
||||
"vite": "6",
|
||||
"vitest": "^3"
|
||||
"vite": "5.4.19",
|
||||
"vitest": "^2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@bufbuild/protobuf": "^1.2.1",
|
||||
|
@ -81,9 +81,7 @@
|
|||
},
|
||||
"resolutions": {
|
||||
"canvas": "npm:empty-npm-package@1.0.0",
|
||||
"cookie": "0.7.0",
|
||||
"devalue": "^5.3.2",
|
||||
"vite": "6"
|
||||
"cookie": "0.7.0"
|
||||
},
|
||||
"browserslist": [
|
||||
"defaults",
|
||||
|
|
|
@ -51,7 +51,6 @@ message Card {
|
|||
optional FsrsMemoryState memory_state = 20;
|
||||
optional float desired_retention = 21;
|
||||
optional float decay = 22;
|
||||
optional int64 last_review_time_secs = 23;
|
||||
string custom_data = 19;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@ service CollectionService {
|
|||
rpc LatestProgress(generic.Empty) returns (Progress);
|
||||
rpc SetWantsAbort(generic.Empty) returns (generic.Empty);
|
||||
rpc SetLoadBalancerEnabled(generic.Bool) returns (OpChanges);
|
||||
rpc GetCustomColours(generic.Empty) returns (GetCustomColoursResponse);
|
||||
}
|
||||
|
||||
// Implicitly includes any of the above methods that are not listed in the
|
||||
|
@ -164,7 +163,3 @@ message CreateBackupRequest {
|
|||
bool force = 2;
|
||||
bool wait_for_completion = 3;
|
||||
}
|
||||
|
||||
message GetCustomColoursResponse {
|
||||
repeated string colours = 1;
|
||||
}
|
||||
|
|
|
@ -56,7 +56,6 @@ message ConfigKey {
|
|||
RENDER_LATEX = 25;
|
||||
LOAD_BALANCER_ENABLED = 26;
|
||||
FSRS_SHORT_TERM_WITH_STEPS_ENABLED = 27;
|
||||
FSRS_LEGACY_EVALUATE = 28;
|
||||
}
|
||||
enum String {
|
||||
SET_DUE_BROWSER = 0;
|
||||
|
|
|
@ -40,10 +40,12 @@ message DeckConfigId {
|
|||
message GetRetentionWorkloadRequest {
|
||||
repeated float w = 1;
|
||||
string search = 2;
|
||||
float before = 3;
|
||||
float after = 4;
|
||||
}
|
||||
|
||||
message GetRetentionWorkloadResponse {
|
||||
map<uint32, float> costs = 1;
|
||||
float factor = 1;
|
||||
}
|
||||
|
||||
message GetIgnoredBeforeCountRequest {
|
||||
|
@ -217,8 +219,6 @@ message DeckConfigsForUpdate {
|
|||
bool review_today_active = 5;
|
||||
// Whether new_today applies to today or a past day.
|
||||
bool new_today_active = 6;
|
||||
// Deck-specific desired retention override
|
||||
optional float desired_retention = 7;
|
||||
}
|
||||
string name = 1;
|
||||
int64 config_id = 2;
|
||||
|
@ -236,7 +236,6 @@ message DeckConfigsForUpdate {
|
|||
bool new_cards_ignore_review_limit = 7;
|
||||
bool fsrs = 8;
|
||||
bool fsrs_health_check = 11;
|
||||
bool fsrs_legacy_evaluate = 12;
|
||||
bool apply_all_parent_limits = 9;
|
||||
uint32 days_since_last_fsrs_optimize = 10;
|
||||
}
|
||||
|
|
|
@ -83,8 +83,6 @@ message Deck {
|
|||
optional uint32 new_limit = 7;
|
||||
DayLimit review_limit_today = 8;
|
||||
DayLimit new_limit_today = 9;
|
||||
// Deck-specific desired retention override
|
||||
optional float desired_retention = 10;
|
||||
|
||||
reserved 12 to 15;
|
||||
}
|
||||
|
|
|
@ -27,9 +27,6 @@ service FrontendService {
|
|||
rpc deckOptionsRequireClose(generic.Empty) returns (generic.Empty);
|
||||
// Warns python that the deck option web view is ready to receive requests.
|
||||
rpc deckOptionsReady(generic.Empty) returns (generic.Empty);
|
||||
|
||||
// Save colour picker's custom colour palette
|
||||
rpc SaveCustomColours(generic.Empty) returns (generic.Empty);
|
||||
}
|
||||
|
||||
service BackendFrontendService {}
|
||||
|
|
|
@ -176,12 +176,9 @@ message CsvMetadata {
|
|||
// to determine the number of columns.
|
||||
repeated string column_labels = 5;
|
||||
oneof deck {
|
||||
// id of an existing deck
|
||||
int64 deck_id = 6;
|
||||
// One-based. 0 means n/a.
|
||||
uint32 deck_column = 7;
|
||||
// name of new deck to be created
|
||||
string deck_name = 17;
|
||||
}
|
||||
oneof notetype {
|
||||
// One notetype for all rows with given column mapping.
|
||||
|
|
|
@ -59,7 +59,7 @@ message AddNoteRequest {
|
|||
}
|
||||
|
||||
message AddNoteResponse {
|
||||
collection.OpChangesWithCount changes = 1;
|
||||
collection.OpChanges changes = 1;
|
||||
int64 note_id = 2;
|
||||
}
|
||||
|
||||
|
|
|
@ -55,11 +55,7 @@ service SchedulerService {
|
|||
returns (ComputeOptimalRetentionResponse);
|
||||
rpc SimulateFsrsReview(SimulateFsrsReviewRequest)
|
||||
returns (SimulateFsrsReviewResponse);
|
||||
rpc SimulateFsrsWorkload(SimulateFsrsReviewRequest)
|
||||
returns (SimulateFsrsWorkloadResponse);
|
||||
rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse);
|
||||
rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest)
|
||||
returns (EvaluateParamsResponse);
|
||||
rpc ComputeMemoryState(cards.CardId) returns (ComputeMemoryStateResponse);
|
||||
// The number of days the calculated interval was fuzzed by on the previous
|
||||
// review (if any). Utilized by the FSRS add-on.
|
||||
|
@ -406,9 +402,31 @@ message SimulateFsrsReviewRequest {
|
|||
repeated float easy_days_percentages = 10;
|
||||
deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11;
|
||||
optional uint32 suspend_after_lapse_count = 12;
|
||||
float historical_retention = 13;
|
||||
uint32 learning_step_count = 14;
|
||||
uint32 relearning_step_count = 15;
|
||||
// For CMRR
|
||||
message CMRRTarget {
|
||||
message Memorized {
|
||||
float loss_aversion = 1;
|
||||
};
|
||||
|
||||
message Stability {};
|
||||
|
||||
message FutureMemorized {
|
||||
int32 days = 1;
|
||||
};
|
||||
|
||||
message AverageFutureMemorized {
|
||||
int32 days = 1;
|
||||
};
|
||||
|
||||
oneof kind {
|
||||
Memorized memorized = 1;
|
||||
Stability stability = 2;
|
||||
FutureMemorized future_memorized = 3;
|
||||
AverageFutureMemorized average_future_memorized = 4;
|
||||
};
|
||||
};
|
||||
|
||||
optional CMRRTarget target = 13;
|
||||
}
|
||||
|
||||
message SimulateFsrsReviewResponse {
|
||||
|
@ -418,12 +436,6 @@ message SimulateFsrsReviewResponse {
|
|||
repeated float daily_time_cost = 4;
|
||||
}
|
||||
|
||||
message SimulateFsrsWorkloadResponse {
|
||||
map<uint32, float> cost = 1;
|
||||
map<uint32, float> memorized = 2;
|
||||
map<uint32, uint32> review_count = 3;
|
||||
}
|
||||
|
||||
message ComputeOptimalRetentionResponse {
|
||||
float optimal_retention = 1;
|
||||
}
|
||||
|
@ -455,12 +467,6 @@ message EvaluateParamsRequest {
|
|||
uint32 num_of_relearning_steps = 3;
|
||||
}
|
||||
|
||||
message EvaluateParamsLegacyRequest {
|
||||
repeated float params = 1;
|
||||
string search = 2;
|
||||
int64 ignore_revlogs_before_ms = 3;
|
||||
}
|
||||
|
||||
message EvaluateParamsResponse {
|
||||
float log_loss = 1;
|
||||
float rmse_bins = 2;
|
||||
|
|
|
@ -74,15 +74,10 @@ message SearchNode {
|
|||
repeated SearchNode nodes = 1;
|
||||
Joiner joiner = 2;
|
||||
}
|
||||
enum FieldSearchMode {
|
||||
FIELD_SEARCH_MODE_NORMAL = 0;
|
||||
FIELD_SEARCH_MODE_REGEX = 1;
|
||||
FIELD_SEARCH_MODE_NOCOMBINING = 2;
|
||||
}
|
||||
message Field {
|
||||
string field_name = 1;
|
||||
string text = 2;
|
||||
FieldSearchMode mode = 3;
|
||||
bool is_re = 3;
|
||||
}
|
||||
|
||||
oneof filter {
|
||||
|
|
|
@ -46,6 +46,7 @@ from .errors import (
|
|||
|
||||
# the following comment is required to suppress a warning that only shows up
|
||||
# when there are other pylint failures
|
||||
# pylint: disable=c-extension-no-member
|
||||
if _rsbridge.buildhash() != anki.buildinfo.buildhash:
|
||||
raise Exception(
|
||||
f"""rsbridge and anki build hashes do not match:
|
||||
|
@ -163,7 +164,7 @@ class RustBackend(RustBackendGenerated):
|
|||
finally:
|
||||
elapsed = time.time() - start
|
||||
if current_thread() is main_thread() and elapsed > 0.2:
|
||||
print(f"blocked main thread for {int(elapsed * 1000)}ms:")
|
||||
print(f"blocked main thread for {int(elapsed*1000)}ms:")
|
||||
print("".join(traceback.format_stack()))
|
||||
|
||||
err = backend_pb2.BackendError()
|
||||
|
@ -246,7 +247,7 @@ def backend_exception_to_pylib(err: backend_pb2.BackendError) -> Exception:
|
|||
return BackendError(err.message, help_page, context, backtrace)
|
||||
|
||||
elif val == kind.SEARCH_ERROR:
|
||||
return SearchError(err.message, help_page, context, backtrace)
|
||||
return SearchError(markdown(err.message), help_page, context, backtrace)
|
||||
|
||||
elif val == kind.UNDO_EMPTY:
|
||||
return UndoEmpty(err.message, help_page, context, backtrace)
|
||||
|
|
|
@ -7,7 +7,7 @@ import pprint
|
|||
import time
|
||||
from typing import NewType
|
||||
|
||||
import anki
|
||||
import anki # pylint: disable=unused-import
|
||||
import anki.collection
|
||||
import anki.decks
|
||||
import anki.notes
|
||||
|
@ -49,7 +49,6 @@ class Card(DeprecatedNamesMixin):
|
|||
memory_state: FSRSMemoryState | None
|
||||
desired_retention: float | None
|
||||
decay: float | None
|
||||
last_review_time: int | None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -104,11 +103,6 @@ class Card(DeprecatedNamesMixin):
|
|||
card.desired_retention if card.HasField("desired_retention") else None
|
||||
)
|
||||
self.decay = card.decay if card.HasField("decay") else None
|
||||
self.last_review_time = (
|
||||
card.last_review_time_secs
|
||||
if card.HasField("last_review_time_secs")
|
||||
else None
|
||||
)
|
||||
|
||||
def _to_backend_card(self) -> cards_pb2.Card:
|
||||
# mtime & usn are set by backend
|
||||
|
@ -133,7 +127,6 @@ class Card(DeprecatedNamesMixin):
|
|||
memory_state=self.memory_state,
|
||||
desired_retention=self.desired_retention,
|
||||
decay=self.decay,
|
||||
last_review_time_secs=self.last_review_time,
|
||||
)
|
||||
|
||||
@deprecated(info="please use col.update_card()")
|
||||
|
|
|
@ -158,7 +158,7 @@ class Collection(DeprecatedNamesMixin):
|
|||
self.tags = TagManager(self)
|
||||
self.conf = ConfigManager(self)
|
||||
self._load_scheduler()
|
||||
self._startReps = 0
|
||||
self._startReps = 0 # pylint: disable=invalid-name
|
||||
|
||||
def name(self) -> Any:
|
||||
return os.path.splitext(os.path.basename(self.path))[0]
|
||||
|
@ -511,7 +511,9 @@ class Collection(DeprecatedNamesMixin):
|
|||
# Utils
|
||||
##########################################################################
|
||||
|
||||
def nextID(self, type: str, inc: bool = True) -> Any:
|
||||
def nextID( # pylint: disable=invalid-name
|
||||
self, type: str, inc: bool = True
|
||||
) -> Any:
|
||||
type = f"next{type.capitalize()}"
|
||||
id = self.conf.get(type, 1)
|
||||
if inc:
|
||||
|
@ -528,7 +530,7 @@ class Collection(DeprecatedNamesMixin):
|
|||
def new_note(self, notetype: NotetypeDict) -> Note:
|
||||
return Note(self, notetype)
|
||||
|
||||
def add_note(self, note: Note, deck_id: DeckId) -> OpChangesWithCount:
|
||||
def add_note(self, note: Note, deck_id: DeckId) -> OpChanges:
|
||||
hooks.note_will_be_added(self, note, deck_id)
|
||||
out = self._backend.add_note(note=note._to_backend_note(), deck_id=deck_id)
|
||||
note.id = NoteId(out.note_id)
|
||||
|
@ -847,6 +849,7 @@ class Collection(DeprecatedNamesMixin):
|
|||
)
|
||||
|
||||
def _pb_search_separator(self, operator: SearchJoiner) -> SearchNode.Group.Joiner.V:
|
||||
# pylint: disable=no-member
|
||||
if operator == "AND":
|
||||
return SearchNode.Group.Joiner.AND
|
||||
else:
|
||||
|
@ -864,9 +867,7 @@ class Collection(DeprecatedNamesMixin):
|
|||
return column
|
||||
return None
|
||||
|
||||
def browser_row_for_id(
|
||||
self, id_: int
|
||||
) -> tuple[
|
||||
def browser_row_for_id(self, id_: int) -> tuple[
|
||||
Generator[tuple[str, bool, BrowserRow.Cell.TextElideMode.V], None, None],
|
||||
BrowserRow.Color.V,
|
||||
str,
|
||||
|
@ -1211,6 +1212,8 @@ class Collection(DeprecatedNamesMixin):
|
|||
# the count on things like edits, which we probably could do by checking
|
||||
# the previous state in moveToState.
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
def startTimebox(self) -> None:
|
||||
self._startTime = time.time()
|
||||
self._startReps = self.sched.reps
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -350,7 +351,7 @@ class AnkiPackageExporter(AnkiExporter):
|
|||
colfile = path.replace(".apkg", ".anki2")
|
||||
AnkiExporter.exportInto(self, colfile)
|
||||
# prevent older clients from accessing
|
||||
|
||||
# pylint: disable=unreachable
|
||||
self._addDummyCollection(z)
|
||||
z.write(colfile, "collection.anki21")
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
|
|
@ -175,8 +175,8 @@ class MnemoFact:
|
|||
def fact_view(self) -> type[MnemoFactView]:
|
||||
try:
|
||||
fact_view = self.cards[0].fact_view_id
|
||||
except IndexError:
|
||||
return FrontOnly
|
||||
except IndexError as err:
|
||||
raise Exception(f"Fact {id} has no cards") from err
|
||||
|
||||
if fact_view.startswith("1.") or fact_view.startswith("1::"):
|
||||
return FrontOnly
|
||||
|
@ -187,7 +187,7 @@ class MnemoFact:
|
|||
elif fact_view.startswith("5.1"):
|
||||
return Cloze
|
||||
|
||||
raise Exception(f"Fact {self.id} has unknown fact view: {fact_view}")
|
||||
raise Exception(f"Fact {id} has unknown fact view: {fact_view}")
|
||||
|
||||
def anki_fields(self, fact_view: type[MnemoFactView]) -> list[str]:
|
||||
return [munge_field(self.fields.get(k, "")) for k in fact_view.field_keys]
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
"""
|
||||
Tools for extending Anki.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -143,6 +144,7 @@ class TextImporter(NoteImporter):
|
|||
self.close()
|
||||
zuper = super()
|
||||
if hasattr(zuper, "__del__"):
|
||||
# pylint: disable=no-member
|
||||
zuper.__del__(self) # type: ignore
|
||||
|
||||
def noteFromFields(self, fields: list[str]) -> ForeignNote:
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
import re
|
||||
import time
|
||||
|
@ -34,6 +35,7 @@ f._id=d._fact_id"""
|
|||
):
|
||||
if id != curid:
|
||||
if note:
|
||||
# pylint: disable=unsubscriptable-object
|
||||
notes[note["_id"]] = note
|
||||
note = {"_id": _id}
|
||||
curid = id
|
||||
|
@ -183,6 +185,7 @@ acq_reps+ret_reps, lapses, card_type_id from cards"""
|
|||
state = dict(n=1)
|
||||
|
||||
def repl(match):
|
||||
# pylint: disable=cell-var-from-loop
|
||||
# replace [...] with cloze refs
|
||||
res = "{{c%d::%s}}" % (state["n"], match.group(1))
|
||||
state["n"] += 1
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ from anki._legacy import DeprecatedNamesMixinForModule
|
|||
TR = anki._fluent.LegacyTranslationEnum
|
||||
FormatTimeSpan = _pb.FormatTimespanRequest
|
||||
|
||||
# When adding new languages here, check lang_to_disk_lang() below
|
||||
|
||||
langs = sorted(
|
||||
[
|
||||
("Afrikaans", "af_ZA"),
|
||||
|
@ -38,7 +38,6 @@ langs = sorted(
|
|||
("Italiano", "it_IT"),
|
||||
("lo jbobau", "jbo_EN"),
|
||||
("Lenga d'òc", "oc_FR"),
|
||||
("Қазақша", "kk_KZ"),
|
||||
("Magyar", "hu_HU"),
|
||||
("Nederlands", "nl_NL"),
|
||||
("Norsk", "nb_NO"),
|
||||
|
@ -65,7 +64,6 @@ langs = sorted(
|
|||
("Українська мова", "uk_UA"),
|
||||
("Հայերեն", "hy_AM"),
|
||||
("עִבְרִית", "he_IL"),
|
||||
("ייִדיש", "yi"),
|
||||
("العربية", "ar_SA"),
|
||||
("فارسی", "fa_IR"),
|
||||
("ภาษาไทย", "th_TH"),
|
||||
|
@ -75,7 +73,6 @@ langs = sorted(
|
|||
("ଓଡ଼ିଆ", "or_OR"),
|
||||
("Filipino", "tl"),
|
||||
("ئۇيغۇر", "ug"),
|
||||
("Oʻzbekcha", "uz_UZ"),
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -106,7 +103,6 @@ compatMap = {
|
|||
"it": "it_IT",
|
||||
"ja": "ja_JP",
|
||||
"jbo": "jbo_EN",
|
||||
"kk": "kk_KZ",
|
||||
"ko": "ko_KR",
|
||||
"la": "la_LA",
|
||||
"mn": "mn_MN",
|
||||
|
@ -127,9 +123,7 @@ compatMap = {
|
|||
"th": "th_TH",
|
||||
"tr": "tr_TR",
|
||||
"uk": "uk_UA",
|
||||
"uz": "uz_UZ",
|
||||
"vi": "vi_VN",
|
||||
"yi": "yi",
|
||||
}
|
||||
|
||||
|
||||
|
@ -163,13 +157,13 @@ def lang_to_disk_lang(lang: str) -> str:
|
|||
|
||||
|
||||
# the currently set interface language
|
||||
current_lang = "en"
|
||||
current_lang = "en" # pylint: disable=invalid-name
|
||||
|
||||
# the current Fluent translation instance. Code in pylib/ should
|
||||
# not reference this, and should use col.tr instead. The global
|
||||
# instance exists for legacy reasons, and as a convenience for the
|
||||
# Qt code.
|
||||
current_i18n: anki._backend.RustBackend | None = None
|
||||
current_i18n: anki._backend.RustBackend | None = None # pylint: disable=invalid-name
|
||||
tr_legacyglobal = anki._backend.Translations(None)
|
||||
|
||||
|
||||
|
@ -184,7 +178,7 @@ def ngettext(single: str, plural: str, num: int) -> str:
|
|||
|
||||
|
||||
def set_lang(lang: str) -> None:
|
||||
global current_lang, current_i18n
|
||||
global current_lang, current_i18n # pylint: disable=invalid-name
|
||||
current_lang = lang
|
||||
current_i18n = anki._backend.RustBackend(langs=[lang])
|
||||
tr_legacyglobal.backend = weakref.ref(current_i18n)
|
||||
|
@ -204,7 +198,9 @@ def get_def_lang(user_lang: str | None = None) -> tuple[int, str]:
|
|||
# getdefaultlocale() is deprecated since Python 3.11, but we need to keep using it as getlocale() behaves differently: https://bugs.python.org/issue38805
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
(sys_lang, enc) = locale.getdefaultlocale()
|
||||
(sys_lang, enc) = (
|
||||
locale.getdefaultlocale() # pylint: disable=deprecated-method
|
||||
)
|
||||
except AttributeError:
|
||||
# this will return a different format on Windows (e.g. Italian_Italy), resulting in us falling back to en_US
|
||||
# further below
|
||||
|
@ -237,7 +233,7 @@ def get_def_lang(user_lang: str | None = None) -> tuple[int, str]:
|
|||
|
||||
|
||||
def is_rtl(lang: str) -> bool:
|
||||
return lang in ("he", "ar", "fa", "ug", "yi")
|
||||
return lang in ("he", "ar", "fa", "ug")
|
||||
|
||||
|
||||
# strip off unicode isolation markers from a translated string
|
||||
|
|
|
@ -10,7 +10,7 @@ import time
|
|||
from collections.abc import Sequence
|
||||
from typing import Any, NewType, Union
|
||||
|
||||
import anki
|
||||
import anki # pylint: disable=unused-import
|
||||
import anki.collection
|
||||
import anki.notes
|
||||
from anki import notetypes_pb2
|
||||
|
@ -419,7 +419,7 @@ and notes.mid = ? and cards.ord = ?""",
|
|||
|
||||
# legacy API - used by unit tests and add-ons
|
||||
|
||||
def change(
|
||||
def change( # pylint: disable=invalid-name
|
||||
self,
|
||||
notetype: NotetypeDict,
|
||||
nids: list[anki.notes.NoteId],
|
||||
|
@ -478,6 +478,8 @@ and notes.mid = ? and cards.ord = ?""",
|
|||
# Legacy
|
||||
##########################################################################
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
@deprecated(info="use note.cloze_numbers_in_fields()")
|
||||
def _availClozeOrds(
|
||||
self, notetype: NotetypeDict, flds: str, allow_empty: bool = True
|
||||
|
|
|
@ -7,7 +7,7 @@ import copy
|
|||
from collections.abc import Sequence
|
||||
from typing import NewType
|
||||
|
||||
import anki
|
||||
import anki # pylint: disable=unused-import
|
||||
import anki.cards
|
||||
import anki.collection
|
||||
import anki.decks
|
||||
|
|
|
@ -4,8 +4,10 @@
|
|||
# The backend code has moved into _backend; this file exists only to avoid breaking
|
||||
# some add-ons. They should be updated to point to the correct location in the
|
||||
# future.
|
||||
#
|
||||
# pylint: disable=unused-import
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
# ruff: noqa: F401
|
||||
from anki.decks import DeckTreeNode
|
||||
from anki.errors import InvalidInput, NotFoundError
|
||||
from anki.lang import TR
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
"""
|
||||
The V3/2021 scheduler.
|
||||
|
@ -183,7 +184,7 @@ class Scheduler(SchedulerBaseWithLegacy):
|
|||
return self._interval_for_filtered_state(state.filtered)
|
||||
else:
|
||||
assert_exhaustive(kind)
|
||||
return 0
|
||||
return 0 # pylint: disable=unreachable
|
||||
|
||||
def _interval_for_normal_state(
|
||||
self, normal: scheduler_pb2.SchedulingState.Normal
|
||||
|
@ -199,7 +200,7 @@ class Scheduler(SchedulerBaseWithLegacy):
|
|||
return normal.relearning.learning.scheduled_secs
|
||||
else:
|
||||
assert_exhaustive(kind)
|
||||
return 0
|
||||
return 0 # pylint: disable=unreachable
|
||||
|
||||
def _interval_for_filtered_state(
|
||||
self, filtered: scheduler_pb2.SchedulingState.Filtered
|
||||
|
@ -211,7 +212,7 @@ class Scheduler(SchedulerBaseWithLegacy):
|
|||
return self._interval_for_normal_state(filtered.rescheduling.original_state)
|
||||
else:
|
||||
assert_exhaustive(kind)
|
||||
return 0
|
||||
return 0 # pylint: disable=unreachable
|
||||
|
||||
def nextIvl(self, card: Card, ease: int) -> Any:
|
||||
"Don't use this - it is only required by tests, and will be moved in the future."
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
# pylint: disable=C
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -26,7 +27,7 @@ def _legacy_card_stats(
|
|||
col: anki.collection.Collection, card_id: anki.cards.CardId, include_revlog: bool
|
||||
) -> str:
|
||||
"A quick hack to preserve compatibility with the old HTML string API."
|
||||
random_id = f"cardinfo-{base62(random.randint(0, 2**64 - 1))}"
|
||||
random_id = f"cardinfo-{base62(random.randint(0, 2 ** 64 - 1))}"
|
||||
varName = random_id.replace("-", "")
|
||||
return f"""
|
||||
<div id="{random_id}"></div>
|
||||
|
@ -323,6 +324,7 @@ group by day order by day"""
|
|||
yaxes=[dict(min=0), dict(position="right", min=0)],
|
||||
)
|
||||
if days is not None:
|
||||
# pylint: disable=invalid-unary-operand-type
|
||||
conf["xaxis"]["min"] = -days + 0.5
|
||||
|
||||
def plot(id: str, data: Any, ylabel: str, ylabel2: str) -> str:
|
||||
|
@ -357,6 +359,7 @@ group by day order by day"""
|
|||
yaxes=[dict(min=0), dict(position="right", min=0)],
|
||||
)
|
||||
if days is not None:
|
||||
# pylint: disable=invalid-unary-operand-type
|
||||
conf["xaxis"]["min"] = -days + 0.5
|
||||
|
||||
def plot(id: str, data: Any, ylabel: str, ylabel2: str) -> str:
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# pylint: disable=invalid-name
|
||||
|
||||
# from subtlepatterns.com; CC BY 4.0.
|
||||
# by Daniel Beaton
|
||||
# https://www.toptal.com/designers/subtlepatterns/fancy-deboss/
|
||||
|
|
|
@ -12,6 +12,7 @@ from anki import notetypes_pb2
|
|||
from anki._legacy import DeprecatedNamesMixinForModule
|
||||
from anki.utils import from_json_bytes
|
||||
|
||||
# pylint: disable=no-member
|
||||
StockNotetypeKind = notetypes_pb2.StockNotetype.Kind
|
||||
|
||||
# add-on authors can add ("note type name", function)
|
||||
|
|
|
@ -16,7 +16,7 @@ import re
|
|||
from collections.abc import Collection, Sequence
|
||||
from typing import Match
|
||||
|
||||
import anki
|
||||
import anki # pylint: disable=unused-import
|
||||
import anki.collection
|
||||
from anki import tags_pb2
|
||||
from anki._legacy import DeprecatedNamesMixin, deprecated
|
||||
|
|
|
@ -24,6 +24,7 @@ from anki.dbproxy import DBProxy
|
|||
_tmpdir: str | None
|
||||
|
||||
try:
|
||||
# pylint: disable=c-extension-no-member
|
||||
import orjson
|
||||
|
||||
to_json_bytes: Callable[[Any], bytes] = orjson.dumps
|
||||
|
@ -155,12 +156,12 @@ def field_checksum(data: str) -> int:
|
|||
# Temp files
|
||||
##############################################################################
|
||||
|
||||
_tmpdir = None
|
||||
_tmpdir = None # pylint: disable=invalid-name
|
||||
|
||||
|
||||
def tmpdir() -> str:
|
||||
"A reusable temp folder which we clean out on each program invocation."
|
||||
global _tmpdir
|
||||
global _tmpdir # pylint: disable=invalid-name
|
||||
if not _tmpdir:
|
||||
|
||||
def cleanup() -> None:
|
||||
|
@ -215,6 +216,7 @@ def call(argv: list[str], wait: bool = True, **kwargs: Any) -> int:
|
|||
try:
|
||||
info.dwFlags |= subprocess.STARTF_USESHOWWINDOW # type: ignore
|
||||
except Exception:
|
||||
# pylint: disable=no-member
|
||||
info.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW # type: ignore
|
||||
else:
|
||||
info = None
|
||||
|
@ -280,7 +282,7 @@ def plat_desc() -> str:
|
|||
elif is_win:
|
||||
theos = f"win:{platform.win32_ver()[0]}"
|
||||
elif system == "Linux":
|
||||
import distro # pytype: disable=import-error
|
||||
import distro # pytype: disable=import-error # pylint: disable=import-error
|
||||
|
||||
dist_id = distro.id()
|
||||
dist_version = distro.version()
|
||||
|
|
|
@ -7,7 +7,7 @@ dependencies = [
|
|||
"decorator",
|
||||
"markdown",
|
||||
"orjson",
|
||||
"protobuf>=6.0,<8.0",
|
||||
"protobuf>=4.21",
|
||||
"requests[socks]",
|
||||
# remove after we update to min python 3.11+
|
||||
"typing_extensions",
|
||||
|
|
|
@ -28,6 +28,6 @@ fn main() {
|
|||
.to_string();
|
||||
|
||||
let libs_path = stdlib_path + "s";
|
||||
println!("cargo:rustc-link-search={libs_path}");
|
||||
println!("cargo:rustc-link-search={}", libs_path);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@ def test_find_cards():
|
|||
note = col.newNote()
|
||||
note["Front"] = "cat"
|
||||
note["Back"] = "sheep"
|
||||
note.tags.append("conjunção größte")
|
||||
col.addNote(note)
|
||||
catCard = note.cards()[0]
|
||||
m = col.models.current()
|
||||
|
@ -69,8 +68,6 @@ def test_find_cards():
|
|||
col.tags.bulk_remove(col.db.list("select id from notes"), "foo")
|
||||
assert len(col.find_cards("tag:foo")) == 0
|
||||
assert len(col.find_cards("tag:bar")) == 5
|
||||
assert len(col.find_cards("tag:conjuncao tag:groste")) == 0
|
||||
assert len(col.find_cards("tag:nc:conjuncao tag:nc:groste")) == 1
|
||||
# text searches
|
||||
assert len(col.find_cards("cat")) == 2
|
||||
assert len(col.find_cards("cat -dog")) == 1
|
||||
|
@ -172,7 +169,8 @@ def test_find_cards():
|
|||
# properties
|
||||
id = col.db.scalar("select id from cards limit 1")
|
||||
col.db.execute(
|
||||
"update cards set queue=2, ivl=10, reps=20, due=30, factor=2200 where id = ?",
|
||||
"update cards set queue=2, ivl=10, reps=20, due=30, factor=2200 "
|
||||
"where id = ?",
|
||||
id,
|
||||
)
|
||||
assert len(col.find_cards("prop:ivl>5")) == 1
|
||||
|
|
|
@ -551,10 +551,12 @@ def test_bury():
|
|||
col.addNote(note)
|
||||
c2 = note.cards()[0]
|
||||
# burying
|
||||
col.sched.bury_cards([c.id], manual=True)
|
||||
col.sched.bury_cards([c.id], manual=True) # pylint: disable=unexpected-keyword-arg
|
||||
c.load()
|
||||
assert c.queue == QUEUE_TYPE_MANUALLY_BURIED
|
||||
col.sched.bury_cards([c2.id], manual=False)
|
||||
col.sched.bury_cards(
|
||||
[c2.id], manual=False
|
||||
) # pylint: disable=unexpected-keyword-arg
|
||||
c2.load()
|
||||
assert c2.queue == QUEUE_TYPE_SIBLING_BURIED
|
||||
|
||||
|
|
|
@ -15,5 +15,6 @@ with open(buildhash_file, "r", encoding="utf8") as f:
|
|||
|
||||
with open(outpath, "w", encoding="utf8") as f:
|
||||
# if we switch to uppercase we'll need to add legacy aliases
|
||||
f.write("# pylint: disable=invalid-name\n")
|
||||
f.write(f"version = '{version}'\n")
|
||||
f.write(f"buildhash = '{buildhash}'\n")
|
||||
|
|
|
@ -133,7 +133,7 @@ prefix = """\
|
|||
# This file is automatically generated; edit tools/genhooks.py instead.
|
||||
# Please import from anki.hooks instead of this file.
|
||||
|
||||
# ruff: noqa: F401
|
||||
# pylint: disable=unused-import
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ Code for generating hooks.
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
|
@ -203,6 +204,9 @@ def write_file(path: str, hooks: list[Hook], prefix: str, suffix: str):
|
|||
|
||||
code += f"\n{suffix}"
|
||||
|
||||
# work around issue with latest black
|
||||
if sys.platform == "win32" and "HOME" in os.environ:
|
||||
os.environ["USERPROFILE"] = os.environ["HOME"]
|
||||
with open(path, "wb") as file:
|
||||
file.write(code.encode("utf8"))
|
||||
subprocess.run([sys.executable, "-m", "ruff", "format", "-q", path], check=True)
|
||||
subprocess.run([sys.executable, "-m", "black", "-q", path], check=True)
|
||||
|
|
|
@ -7,11 +7,14 @@ classifiers = ["Private :: Do Not Upload"]
|
|||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"black",
|
||||
"isort",
|
||||
"mypy",
|
||||
"mypy-protobuf",
|
||||
"ruff",
|
||||
"pylint",
|
||||
"pytest",
|
||||
"PyChromeDevTools",
|
||||
"colorama", # for isort --color
|
||||
"wheel",
|
||||
"hatchling", # for type checking hatch_build.py files
|
||||
"mock",
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
os.environ["REPO_ROOT"] = os.path.abspath(".")
|
||||
subprocess.run(["out/pyenv/bin/sphinx-apidoc", "-o", "out/python/sphinx", "pylib", "qt"], check=True)
|
||||
subprocess.run(["out/pyenv/bin/sphinx-build", "out/python/sphinx", "out/python/sphinx/html"], check=True)
|
||||
|
|
5
qt/.isort.cfg
Normal file
5
qt/.isort.cfg
Normal file
|
@ -0,0 +1,5 @@
|
|||
[settings]
|
||||
py_version=39
|
||||
profile=black
|
||||
known_first_party=anki,aqt
|
||||
extend_skip=aqt/forms,hooks_gen.py
|
|
@ -3,7 +3,6 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
# ruff: noqa: F401
|
||||
import atexit
|
||||
import logging
|
||||
import os
|
||||
|
@ -29,7 +28,7 @@ if sys.version_info[0] < 3 or sys.version_info[1] < 9:
|
|||
# ensure unicode filenames are supported
|
||||
try:
|
||||
"テスト".encode(sys.getfilesystemencoding())
|
||||
except UnicodeEncodeError:
|
||||
except UnicodeEncodeError as exc:
|
||||
print("Anki requires a UTF-8 locale.")
|
||||
print("Please Google 'how to change locale on [your Linux distro]'")
|
||||
sys.exit(1)
|
||||
|
@ -42,11 +41,6 @@ if "--syncserver" in sys.argv:
|
|||
# does not return
|
||||
run_sync_server()
|
||||
|
||||
if sys.platform == "win32":
|
||||
from win32com.shell import shell
|
||||
|
||||
shell.SetCurrentProcessExplicitAppUserModelID("Ankitects.Anki")
|
||||
|
||||
import argparse
|
||||
import builtins
|
||||
import cProfile
|
||||
|
@ -291,6 +285,7 @@ class NativeEventFilter(QAbstractNativeEventFilter):
|
|||
def nativeEventFilter(
|
||||
self, eventType: Any, message: Any
|
||||
) -> tuple[bool, Any | None]:
|
||||
|
||||
if eventType == "windows_generic_MSG":
|
||||
import ctypes.wintypes
|
||||
|
||||
|
@ -563,7 +558,7 @@ def run() -> None:
|
|||
print(f"Starting Anki {_version}...")
|
||||
try:
|
||||
_run()
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
QMessageBox.critical(
|
||||
None,
|
||||
|
|
|
@ -6,6 +6,8 @@ from __future__ import annotations
|
|||
import sys
|
||||
|
||||
if sys.platform == "darwin":
|
||||
from anki_mac_helper import macos_helper
|
||||
from anki_mac_helper import ( # pylint:disable=unused-import,import-error
|
||||
macos_helper,
|
||||
)
|
||||
else:
|
||||
macos_helper = None
|
||||
|
|
|
@ -66,14 +66,13 @@ def show(mw: aqt.AnkiQt) -> QDialog:
|
|||
# WebView contents
|
||||
######################################################################
|
||||
abouttext = "<center><img src='/_anki/imgs/anki-logo-thin.png'></center>"
|
||||
lede = tr.about_anki_is_a_friendly_intelligent_spaced().replace("Anki", "Anki®")
|
||||
abouttext += f"<p>{lede}"
|
||||
abouttext += f"<p>{tr.about_anki_is_a_friendly_intelligent_spaced()}"
|
||||
abouttext += f"<p>{tr.about_anki_is_licensed_under_the_agpl3()}"
|
||||
abouttext += f"<p>{tr.about_version(val=version_with_build())}<br>"
|
||||
abouttext += ("Python %s Qt %s Chromium %s<br>") % (
|
||||
abouttext += ("Python %s Qt %s PyQt %s<br>") % (
|
||||
platform.python_version(),
|
||||
qVersion(),
|
||||
(qWebEngineChromiumVersion() or "").split(".")[0],
|
||||
PYQT_VERSION_STR,
|
||||
)
|
||||
abouttext += (
|
||||
without_unicode_isolation(tr.about_visit_website(val=aqt.appWebsite))
|
||||
|
@ -224,9 +223,6 @@ def show(mw: aqt.AnkiQt) -> QDialog:
|
|||
"Mukunda Madhav Dey",
|
||||
"Adnane Taghi",
|
||||
"Anon_0000",
|
||||
"Bilolbek Normuminov",
|
||||
"Sagiv Marzini",
|
||||
"Zhanibek Rassululy",
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ from collections.abc import Callable
|
|||
import aqt.editor
|
||||
import aqt.forms
|
||||
from anki._legacy import deprecated
|
||||
from anki.collection import OpChanges, OpChangesWithCount, SearchNode
|
||||
from anki.collection import OpChanges, SearchNode
|
||||
from anki.decks import DeckId
|
||||
from anki.models import NotetypeId
|
||||
from anki.notes import Note, NoteFieldsCheckResult, NoteId
|
||||
|
@ -294,13 +294,13 @@ class AddCards(QMainWindow):
|
|||
|
||||
target_deck_id = self.deck_chooser.selected_deck_id
|
||||
|
||||
def on_success(changes: OpChangesWithCount) -> None:
|
||||
def on_success(changes: OpChanges) -> None:
|
||||
# only used for detecting changed sticky fields on close
|
||||
self._last_added_note = note
|
||||
|
||||
self.addHistory(note)
|
||||
|
||||
tooltip(tr.importing_cards_added(count=changes.count), period=500)
|
||||
tooltip(tr.adding_added(), period=500)
|
||||
av_player.stop_and_clear_queue()
|
||||
self._load_new_note(sticky_fields_from=note)
|
||||
gui_hooks.add_cards_did_add_note(note)
|
||||
|
|
|
@ -927,6 +927,7 @@ class AddonsDialog(QDialog):
|
|||
or self.mgr.configAction(addon.dir_name)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
def _onAddonItemSelected(self, row_int: int) -> None:
|
||||
try:
|
||||
|
@ -1456,9 +1457,7 @@ class ChooseAddonsToUpdateDialog(QDialog):
|
|||
layout.addWidget(addons_list_widget)
|
||||
self.addons_list_widget = addons_list_widget
|
||||
|
||||
button_box = QDialogButtonBox(
|
||||
QDialogButtonBox.StandardButton.Ok | QDialogButtonBox.StandardButton.Cancel
|
||||
) # type: ignore
|
||||
button_box = QDialogButtonBox(QDialogButtonBox.StandardButton.Ok | QDialogButtonBox.StandardButton.Cancel) # type: ignore
|
||||
qconnect(
|
||||
button_box.button(QDialogButtonBox.StandardButton.Ok).clicked, self.accept
|
||||
)
|
||||
|
|
|
@ -36,6 +36,7 @@ def ankihub_login(
|
|||
username: str = "",
|
||||
password: str = "",
|
||||
) -> None:
|
||||
|
||||
def on_future_done(fut: Future[str], username: str, password: str) -> None:
|
||||
try:
|
||||
token = fut.result()
|
||||
|
@ -72,6 +73,7 @@ def ankihub_logout(
|
|||
on_success: Callable[[], None],
|
||||
token: str,
|
||||
) -> None:
|
||||
|
||||
def logout() -> None:
|
||||
mw.pm.set_ankihub_username(None)
|
||||
mw.pm.set_ankihub_token(None)
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
from __future__ import annotations
|
||||
|
||||
# ruff: noqa: F401
|
||||
import sys
|
||||
|
||||
import aqt
|
||||
|
|
|
@ -10,8 +10,6 @@ import re
|
|||
from collections.abc import Callable, Sequence
|
||||
from typing import Any, cast
|
||||
|
||||
from markdown import markdown
|
||||
|
||||
import aqt
|
||||
import aqt.browser
|
||||
import aqt.editor
|
||||
|
@ -22,7 +20,7 @@ from anki.cards import Card, CardId
|
|||
from anki.collection import Collection, Config, OpChanges, SearchNode
|
||||
from anki.consts import *
|
||||
from anki.decks import DeckId
|
||||
from anki.errors import NotFoundError, SearchError
|
||||
from anki.errors import NotFoundError
|
||||
from anki.lang import without_unicode_isolation
|
||||
from anki.models import NotetypeId
|
||||
from anki.notes import NoteId
|
||||
|
@ -500,8 +498,6 @@ class Browser(QMainWindow):
|
|||
text = self.current_search()
|
||||
try:
|
||||
normed = self.col.build_search_string(text)
|
||||
except SearchError as err:
|
||||
showWarning(markdown(str(err)))
|
||||
except Exception as err:
|
||||
showWarning(str(err))
|
||||
else:
|
||||
|
|
|
@ -51,7 +51,6 @@ class CardInfoDialog(QDialog):
|
|||
|
||||
def _setup_ui(self, card_id: CardId | None) -> None:
|
||||
self.mw.garbage_collect_on_dialog_finish(self)
|
||||
self.setMinimumSize(400, 300)
|
||||
disable_help_button(self)
|
||||
restoreGeom(self, self.GEOMETRY_KEY, default_size=(800, 800))
|
||||
add_close_shortcut(self)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue