mirror of
https://github.com/ankitects/anki.git
synced 2026-01-07 02:53:54 -05:00
Merge remote-tracking branch 'upstream' into cheesecake-alex-prod
This commit is contained in:
commit
1e53505339
130 changed files with 2333 additions and 1184 deletions
|
|
@ -16,6 +16,7 @@ if [ "$CLEAR_RUST" = "1" ]; then
|
|||
rm -rf $BUILD_ROOT/rust
|
||||
fi
|
||||
|
||||
rm -f out/build.ninja
|
||||
./ninja pylib qt check
|
||||
|
||||
echo "--- Ensure libs importable"
|
||||
|
|
|
|||
13
.idea.dist/repo.iml
Normal file
13
.idea.dist/repo.iml
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/out/pylib" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/pylib" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/qt" isTestSource="false" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/extra" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/out/pyenv" />
|
||||
</content>
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
2
.version
2
.version
|
|
@ -1 +1 @@
|
|||
25.08b5
|
||||
25.09.2
|
||||
|
|
|
|||
|
|
@ -12,8 +12,7 @@
|
|||
"command": "tools/ninja.bat",
|
||||
"args": [
|
||||
"pylib",
|
||||
"qt",
|
||||
"extract:win_amd64_audio"
|
||||
"qt"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +1,2 @@
|
|||
nodeLinker: node-modules
|
||||
enableScripts: false
|
||||
|
|
|
|||
19
CONTRIBUTORS
19
CONTRIBUTORS
|
|
@ -49,6 +49,7 @@ Sander Santema <github.com/sandersantema/>
|
|||
Thomas Brownback <https://github.com/brownbat/>
|
||||
Andrew Gaul <andrew@gaul.org>
|
||||
kenden
|
||||
Emil Hamrin <github.com/e-hamrin>
|
||||
Nickolay Yudin <kelciour@gmail.com>
|
||||
neitrinoweb <github.com/neitrinoweb/>
|
||||
Andreas Reis <github.com/nwwt>
|
||||
|
|
@ -188,7 +189,7 @@ Christian Donat <https://github.com/cdonat2>
|
|||
Asuka Minato <https://asukaminato.eu.org>
|
||||
Dillon Baldwin <https://github.com/DillBal>
|
||||
Voczi <https://github.com/voczi>
|
||||
Ben Nguyen <105088397+bpnguyen107@users.noreply.github.com>
|
||||
Ben Nguyen <105088397+bpnguyen107@users.noreply.github.com>
|
||||
Themis Demetriades <themis100@outlook.com>
|
||||
Luke Bartholomew <lukesbart@icloud.com>
|
||||
Gregory Abrasaldo <degeemon@gmail.com>
|
||||
|
|
@ -238,6 +239,22 @@ Bradley Szoke <bradleyszoke@gmail.com>
|
|||
jcznk <https://github.com/jcznk>
|
||||
Thomas Rixen <thomas.rixen@student.uclouvain.be>
|
||||
Siyuan Mattuwu Yan <syan4@ualberta.ca>
|
||||
Lee Doughty <32392044+leedoughty@users.noreply.github.com>
|
||||
memchr <memchr@proton.me>
|
||||
Max Romanowski <maxr777@proton.me>
|
||||
Aldlss <ayaldlss@gmail.com>
|
||||
Hanna Nilsén <hanni614@student.liu.se>
|
||||
Elias Johansson Lara <elias.johanssonlara@gmail.com>
|
||||
Toby Penner <tobypenner01@gmail.com>
|
||||
Danilo Spillebeen <spillebeendanilo@gmail.com>
|
||||
Matbe766 <matildabergstrom01@gmail.com>
|
||||
Amanda Sternberg <mandis.sternberg@gmail.com>
|
||||
arold0 <arold0@icloud.com>
|
||||
nav1s <nav1s@proton.me>
|
||||
Ranjit Odedra <ranjitodedra.dev@gmail.com>
|
||||
Eltaurus <https://github.com/Eltaurus-Lt>
|
||||
jariji
|
||||
Francisco Esteva <fr.esteva@duocuc.cl>
|
||||
|
||||
********************
|
||||
|
||||
|
|
|
|||
1097
Cargo.lock
generated
1097
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -51,7 +51,7 @@ ninja_gen = { "path" = "build/ninja_gen" }
|
|||
unicase = "=2.6.0" # any changes could invalidate sqlite indexes
|
||||
|
||||
# normal
|
||||
ammonia = "4.1.0"
|
||||
ammonia = "4.1.2"
|
||||
anyhow = "1.0.98"
|
||||
async-compression = { version = "0.4.24", features = ["zstd", "tokio"] }
|
||||
async-stream = "0.3.6"
|
||||
|
|
@ -92,6 +92,7 @@ itertools = "0.14.0"
|
|||
junction = "1.2.0"
|
||||
libc = "0.2"
|
||||
libc-stdhandle = "0.1"
|
||||
locale_config = "0.3.0"
|
||||
maplit = "1.0.2"
|
||||
nom = "8.0.0"
|
||||
num-format = "0.4.4"
|
||||
|
|
@ -133,7 +134,7 @@ tokio-util = { version = "0.7.15", features = ["io"] }
|
|||
tower-http = { version = "0.6.6", features = ["trace"] }
|
||||
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] }
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = { version = "0.3.19", features = ["fmt", "env-filter"] }
|
||||
tracing-subscriber = { version = "0.3.20", features = ["fmt", "env-filter"] }
|
||||
unic-langid = { version = "0.9.6", features = ["macros"] }
|
||||
unic-ucd-category = "0.9.0"
|
||||
unicode-normalization = "0.1.24"
|
||||
|
|
|
|||
|
|
@ -49,6 +49,46 @@ pub trait BuildAction {
|
|||
}
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
std::any::type_name::<Self>().split("::").last().unwrap()
|
||||
std::any::type_name::<Self>()
|
||||
.split("::")
|
||||
.last()
|
||||
.unwrap()
|
||||
.split('<')
|
||||
.next()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
trait TestBuildAction {}
|
||||
|
||||
#[cfg(test)]
|
||||
impl<T: TestBuildAction + ?Sized> BuildAction for T {
|
||||
fn command(&self) -> &str {
|
||||
"test"
|
||||
}
|
||||
fn files(&mut self, _build: &mut impl FilesHandle) {}
|
||||
}
|
||||
|
||||
#[allow(dead_code, unused_variables)]
|
||||
#[test]
|
||||
fn should_strip_regions_in_type_name() {
|
||||
struct Bare;
|
||||
impl TestBuildAction for Bare {}
|
||||
assert_eq!(Bare {}.name(), "Bare");
|
||||
|
||||
struct WithLifeTime<'a>(&'a str);
|
||||
impl TestBuildAction for WithLifeTime<'_> {}
|
||||
assert_eq!(WithLifeTime("test").name(), "WithLifeTime");
|
||||
|
||||
struct WithMultiLifeTime<'a, 'b>(&'a str, &'b str);
|
||||
impl TestBuildAction for WithMultiLifeTime<'_, '_> {}
|
||||
assert_eq!(
|
||||
WithMultiLifeTime("test", "test").name(),
|
||||
"WithMultiLifeTime"
|
||||
);
|
||||
|
||||
struct WithGeneric<T>(T);
|
||||
impl<T> TestBuildAction for WithGeneric<T> {}
|
||||
assert_eq!(WithGeneric(3).name(), "WithGeneric");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ impl Platform {
|
|||
}
|
||||
|
||||
/// Append .exe to path if on Windows.
|
||||
pub fn with_exe(path: &str) -> Cow<str> {
|
||||
pub fn with_exe(path: &str) -> Cow<'_, str> {
|
||||
if cfg!(windows) {
|
||||
format!("{path}.exe").into()
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -98,7 +98,7 @@ impl BuildAction for YarnInstall<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn with_cmd_ext(bin: &str) -> Cow<str> {
|
||||
fn with_cmd_ext(bin: &str) -> Cow<'_, str> {
|
||||
if cfg!(windows) {
|
||||
format!("{bin}.cmd").into()
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -28,7 +28,11 @@ pub fn setup_yarn(args: YarnArgs) {
|
|||
.arg("--ignore-scripts"),
|
||||
);
|
||||
} else {
|
||||
run_command(Command::new(&args.yarn_bin).arg("install"));
|
||||
run_command(
|
||||
Command::new(&args.yarn_bin)
|
||||
.arg("install")
|
||||
.arg("--immutable"),
|
||||
);
|
||||
}
|
||||
|
||||
std::fs::write(args.stamp, b"").unwrap();
|
||||
|
|
|
|||
|
|
@ -599,6 +599,22 @@
|
|||
"name": "colored",
|
||||
"repository": "https://github.com/mackwic/colored"
|
||||
},
|
||||
{
|
||||
"authors": "Wim Looman <wim@nemo157.com>|Allen Bui <fairingrey@gmail.com>",
|
||||
"description": "Adaptors for various compression algorithms.",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"license_file": null,
|
||||
"name": "compression-codecs",
|
||||
"repository": "https://github.com/Nullus157/async-compression"
|
||||
},
|
||||
{
|
||||
"authors": "Wim Looman <wim@nemo157.com>|Allen Bui <fairingrey@gmail.com>",
|
||||
"description": "Abstractions for compression algorithms.",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"license_file": null,
|
||||
"name": "compression-core",
|
||||
"repository": "https://github.com/Nullus157/async-compression"
|
||||
},
|
||||
{
|
||||
"authors": "Stjepan Glavina <stjepang@gmail.com>|Taiki Endo <te316e89@gmail.com>|John Nunley <dev@notgull.net>",
|
||||
"description": "Concurrent multi-producer multi-consumer queue",
|
||||
|
|
@ -1759,6 +1775,14 @@
|
|||
"name": "http-body-util",
|
||||
"repository": "https://github.com/hyperium/http-body"
|
||||
},
|
||||
{
|
||||
"authors": null,
|
||||
"description": "No-dep range header parser",
|
||||
"license": "MIT",
|
||||
"license_file": null,
|
||||
"name": "http-range-header",
|
||||
"repository": "https://github.com/MarcusGrass/parse-range-headers"
|
||||
},
|
||||
{
|
||||
"authors": "Sean McArthur <sean@seanmonstar.com>",
|
||||
"description": "A tiny, safe, speedy, zero-copy HTTP/1.x parser.",
|
||||
|
|
@ -1943,6 +1967,14 @@
|
|||
"name": "intl_pluralrules",
|
||||
"repository": "https://github.com/zbraniecki/pluralrules"
|
||||
},
|
||||
{
|
||||
"authors": "quininer <quininer@live.com>",
|
||||
"description": "The low-level `io_uring` userspace interface for Rust",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"license_file": null,
|
||||
"name": "io-uring",
|
||||
"repository": "https://github.com/tokio-rs/io-uring"
|
||||
},
|
||||
{
|
||||
"authors": "Kris Price <kris@krisprice.nz>",
|
||||
"description": "Provides types and useful methods for working with IPv4 and IPv6 network addresses, commonly called IP prefixes. The new `IpNet`, `Ipv4Net`, and `Ipv6Net` types build on the existing `IpAddr`, `Ipv4Addr`, and `Ipv6Addr` types already provided in Rust's standard library and align to their design to stay consistent. The module also provides useful traits that extend `Ipv4Addr` and `Ipv6Addr` with methods for `Add`, `Sub`, `BitAnd`, and `BitOr` operations. The module only uses stable feature so it is guaranteed to compile using the stable toolchain.",
|
||||
|
|
@ -2168,7 +2200,7 @@
|
|||
"repository": "https://github.com/servo/html5ever"
|
||||
},
|
||||
{
|
||||
"authors": null,
|
||||
"authors": "The html5ever Project Developers",
|
||||
"description": "Procedural macro for html5ever.",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"license_file": null,
|
||||
|
|
@ -2194,7 +2226,7 @@
|
|||
{
|
||||
"authors": "Ibraheem Ahmed <ibraheem@ibraheem.ca>",
|
||||
"description": "A high performance, zero-copy URL router.",
|
||||
"license": "MIT AND BSD-3-Clause",
|
||||
"license": "BSD-3-Clause AND MIT",
|
||||
"license_file": null,
|
||||
"name": "matchit",
|
||||
"repository": "https://github.com/ibraheemdev/matchit"
|
||||
|
|
@ -2567,14 +2599,6 @@
|
|||
"name": "ordered-float",
|
||||
"repository": "https://github.com/reem/rust-ordered-float"
|
||||
},
|
||||
{
|
||||
"authors": "Daniel Salvadori <danaugrs@gmail.com>",
|
||||
"description": "Provides a macro to simplify operator overloading.",
|
||||
"license": "MIT",
|
||||
"license_file": null,
|
||||
"name": "overload",
|
||||
"repository": "https://github.com/danaugrs/overload"
|
||||
},
|
||||
{
|
||||
"authors": "Stjepan Glavina <stjepang@gmail.com>|The Rust Project Developers",
|
||||
"description": "Thread parking and unparking",
|
||||
|
|
@ -3040,7 +3064,7 @@
|
|||
"repository": "https://github.com/bluss/rawpointer/"
|
||||
},
|
||||
{
|
||||
"authors": "Niko Matsakis <niko@alum.mit.edu>|Josh Stone <cuviper@gmail.com>",
|
||||
"authors": null,
|
||||
"description": "Simple work-stealing parallelism for Rust",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"license_file": null,
|
||||
|
|
@ -3048,7 +3072,7 @@
|
|||
"repository": "https://github.com/rayon-rs/rayon"
|
||||
},
|
||||
{
|
||||
"authors": "Niko Matsakis <niko@alum.mit.edu>|Josh Stone <cuviper@gmail.com>",
|
||||
"authors": null,
|
||||
"description": "Core APIs for Rayon",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"license_file": null,
|
||||
|
|
@ -3095,28 +3119,12 @@
|
|||
"name": "regex",
|
||||
"repository": "https://github.com/rust-lang/regex"
|
||||
},
|
||||
{
|
||||
"authors": "Andrew Gallant <jamslam@gmail.com>",
|
||||
"description": "Automata construction and matching using regular expressions.",
|
||||
"license": "MIT OR Unlicense",
|
||||
"license_file": null,
|
||||
"name": "regex-automata",
|
||||
"repository": "https://github.com/BurntSushi/regex-automata"
|
||||
},
|
||||
{
|
||||
"authors": "The Rust Project Developers|Andrew Gallant <jamslam@gmail.com>",
|
||||
"description": "Automata construction and matching using regular expressions.",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"license_file": null,
|
||||
"name": "regex-automata",
|
||||
"repository": "https://github.com/rust-lang/regex/tree/master/regex-automata"
|
||||
},
|
||||
{
|
||||
"authors": "The Rust Project Developers",
|
||||
"description": "A regular expression parser.",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"license_file": null,
|
||||
"name": "regex-syntax",
|
||||
"repository": "https://github.com/rust-lang/regex"
|
||||
},
|
||||
{
|
||||
|
|
@ -3125,7 +3133,7 @@
|
|||
"license": "Apache-2.0 OR MIT",
|
||||
"license_file": null,
|
||||
"name": "regex-syntax",
|
||||
"repository": "https://github.com/rust-lang/regex/tree/master/regex-syntax"
|
||||
"repository": "https://github.com/rust-lang/regex"
|
||||
},
|
||||
{
|
||||
"authors": "John-John Tedro <udoprog@tedro.se>",
|
||||
|
|
@ -3455,14 +3463,6 @@
|
|||
"name": "serde_repr",
|
||||
"repository": "https://github.com/dtolnay/serde-repr"
|
||||
},
|
||||
{
|
||||
"authors": null,
|
||||
"description": "Serde-compatible spanned Value",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"license_file": null,
|
||||
"name": "serde_spanned",
|
||||
"repository": "https://github.com/toml-rs/toml"
|
||||
},
|
||||
{
|
||||
"authors": "Jacob Brown <kardeiz@gmail.com>",
|
||||
"description": "De/serialize structs with named fields as array of values",
|
||||
|
|
@ -3711,14 +3711,6 @@
|
|||
"name": "syn",
|
||||
"repository": "https://github.com/dtolnay/syn"
|
||||
},
|
||||
{
|
||||
"authors": "David Tolnay <dtolnay@gmail.com>",
|
||||
"description": "Parser for Rust source code",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"license_file": null,
|
||||
"name": "syn",
|
||||
"repository": "https://github.com/dtolnay/syn"
|
||||
},
|
||||
{
|
||||
"authors": "Actyx AG <developer@actyx.io>",
|
||||
"description": "A tool for enlisting the compiler's help in proving the absence of concurrency",
|
||||
|
|
@ -3927,6 +3919,14 @@
|
|||
"name": "tokio-rustls",
|
||||
"repository": "https://github.com/rustls/tokio-rustls"
|
||||
},
|
||||
{
|
||||
"authors": "Daniel Abramov <dabramov@snapview.de>|Alexey Galakhov <agalakhov@snapview.de>",
|
||||
"description": "Tokio binding for Tungstenite, the Lightweight stream-based WebSocket implementation",
|
||||
"license": "MIT",
|
||||
"license_file": null,
|
||||
"name": "tokio-tungstenite",
|
||||
"repository": "https://github.com/snapview/tokio-tungstenite"
|
||||
},
|
||||
{
|
||||
"authors": "Tokio Contributors <team@tokio.rs>",
|
||||
"description": "Additional utilities for working with Tokio.",
|
||||
|
|
@ -3951,14 +3951,6 @@
|
|||
"name": "toml_edit",
|
||||
"repository": "https://github.com/toml-rs/toml"
|
||||
},
|
||||
{
|
||||
"authors": null,
|
||||
"description": "A low-level interface for writing out TOML",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"license_file": null,
|
||||
"name": "toml_write",
|
||||
"repository": "https://github.com/toml-rs/toml"
|
||||
},
|
||||
{
|
||||
"authors": "Tower Maintainers <team@tower-rs.com>",
|
||||
"description": "Tower is a library of modular and reusable components for building robust clients and servers.",
|
||||
|
|
@ -4047,6 +4039,14 @@
|
|||
"name": "try-lock",
|
||||
"repository": "https://github.com/seanmonstar/try-lock"
|
||||
},
|
||||
{
|
||||
"authors": "Alexey Galakhov|Daniel Abramov",
|
||||
"description": "Lightweight stream-based WebSocket implementation",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"license_file": null,
|
||||
"name": "tungstenite",
|
||||
"repository": "https://github.com/snapview/tungstenite-rs"
|
||||
},
|
||||
{
|
||||
"authors": "Jacob Brown <kardeiz@gmail.com>",
|
||||
"description": "Provides a typemap container with FxHashMap",
|
||||
|
|
@ -4154,7 +4154,7 @@
|
|||
{
|
||||
"authors": "David Tolnay <dtolnay@gmail.com>",
|
||||
"description": "Determine whether characters have the XID_Start or XID_Continue properties according to Unicode Standard Annex #31",
|
||||
"license": "(MIT OR Apache-2.0) AND Unicode-3.0",
|
||||
"license": "(Apache-2.0 OR MIT) AND Unicode-3.0",
|
||||
"license_file": null,
|
||||
"name": "unicode-ident",
|
||||
"repository": "https://github.com/dtolnay/unicode-ident"
|
||||
|
|
@ -4920,11 +4920,11 @@
|
|||
"repository": "https://github.com/LukeMathWalker/wiremock-rs"
|
||||
},
|
||||
{
|
||||
"authors": null,
|
||||
"description": "Runtime support for the `wit-bindgen` crate",
|
||||
"authors": "Alex Crichton <alex@alexcrichton.com>",
|
||||
"description": "Rust bindings generator and runtime support for WIT and the component model. Used when compiling Rust programs to the component model.",
|
||||
"license": "Apache-2.0 OR Apache-2.0 WITH LLVM-exception OR MIT",
|
||||
"license_file": null,
|
||||
"name": "wit-bindgen-rt",
|
||||
"name": "wit-bindgen",
|
||||
"repository": "https://github.com/bytecodealliance/wit-bindgen"
|
||||
},
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,35 +1,78 @@
|
|||
# This Dockerfile uses three stages.
|
||||
# 1. Compile anki (and dependencies) and build python wheels.
|
||||
# 2. Create a virtual environment containing anki and its dependencies.
|
||||
# 3. Create a final image that only includes anki's virtual environment and required
|
||||
# system packages.
|
||||
# This is a user-contributed Dockerfile. No official support is available.
|
||||
|
||||
ARG PYTHON_VERSION="3.9"
|
||||
ARG DEBIAN_FRONTEND="noninteractive"
|
||||
|
||||
# Build anki.
|
||||
FROM python:$PYTHON_VERSION AS build
|
||||
RUN curl -fsSL https://github.com/bazelbuild/bazelisk/releases/download/v1.7.4/bazelisk-linux-amd64 \
|
||||
> /usr/local/bin/bazel \
|
||||
&& chmod +x /usr/local/bin/bazel \
|
||||
# Bazel expects /usr/bin/python
|
||||
&& ln -s /usr/local/bin/python /usr/bin/python
|
||||
FROM ubuntu:24.04 AS build
|
||||
WORKDIR /opt/anki
|
||||
COPY . .
|
||||
# Build python wheels.
|
||||
ENV PYTHON_VERSION="3.13"
|
||||
|
||||
|
||||
# System deps
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
git \
|
||||
build-essential \
|
||||
pkg-config \
|
||||
libssl-dev \
|
||||
libbz2-dev \
|
||||
libreadline-dev \
|
||||
libsqlite3-dev \
|
||||
libffi-dev \
|
||||
zlib1g-dev \
|
||||
liblzma-dev \
|
||||
ca-certificates \
|
||||
ninja-build \
|
||||
rsync \
|
||||
libglib2.0-0 \
|
||||
libgl1 \
|
||||
libx11-6 \
|
||||
libxext6 \
|
||||
libxrender1 \
|
||||
libxkbcommon0 \
|
||||
libxkbcommon-x11-0 \
|
||||
libxcb1 \
|
||||
libxcb-render0 \
|
||||
libxcb-shm0 \
|
||||
libxcb-icccm4 \
|
||||
libxcb-image0 \
|
||||
libxcb-keysyms1 \
|
||||
libxcb-randr0 \
|
||||
libxcb-shape0 \
|
||||
libxcb-xfixes0 \
|
||||
libxcb-xinerama0 \
|
||||
libxcb-xinput0 \
|
||||
libsm6 \
|
||||
libice6 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# install rust with rustup
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||
|
||||
# Install uv and Python 3.13 with uv
|
||||
RUN curl -LsSf https://astral.sh/uv/install.sh | sh \
|
||||
&& ln -s /root/.local/bin/uv /usr/local/bin/uv
|
||||
ENV PATH="/root/.local/bin:${PATH}"
|
||||
|
||||
RUN uv python install ${PYTHON_VERSION} --default
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN ./tools/build
|
||||
|
||||
|
||||
# Install pre-compiled Anki.
|
||||
FROM python:${PYTHON_VERSION}-slim as installer
|
||||
FROM python:3.13-slim AS installer
|
||||
WORKDIR /opt/anki/
|
||||
COPY --from=build /opt/anki/wheels/ wheels/
|
||||
COPY --from=build /opt/anki/out/wheels/ wheels/
|
||||
# Use virtual environment.
|
||||
RUN python -m venv venv \
|
||||
&& ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \
|
||||
&& ./venv/bin/python -m pip install --no-cache-dir /opt/anki/wheels/*.whl
|
||||
|
||||
|
||||
# We use another build stage here so we don't include the wheels in the final image.
|
||||
FROM python:${PYTHON_VERSION}-slim as final
|
||||
FROM python:3.13-slim AS final
|
||||
COPY --from=installer /opt/anki/venv /opt/anki/venv
|
||||
ENV PATH=/opt/anki/venv/bin:$PATH
|
||||
# Install run-time dependencies.
|
||||
|
|
@ -59,9 +102,9 @@ RUN apt-get update \
|
|||
libxrender1 \
|
||||
libxtst6 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Add non-root user.
|
||||
RUN useradd --create-home anki
|
||||
USER anki
|
||||
WORKDIR /work
|
||||
ENTRYPOINT ["/opt/anki/venv/bin/anki"]
|
||||
LABEL maintainer="Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com>"
|
||||
ENTRYPOINT ["/opt/anki/venv/bin/anki"]
|
||||
|
|
@ -46,10 +46,14 @@ see and install a number of recommended extensions.
|
|||
|
||||
## PyCharm/IntelliJ
|
||||
|
||||
If you decide to use PyCharm instead of VS Code, there are somethings to be
|
||||
aware of.
|
||||
### Setting up Python environment
|
||||
|
||||
### Pylib References
|
||||
To make PyCharm recognize `anki` and `aqt` imports, you need to add source paths to _Settings > Project Structure_.
|
||||
You can copy the provided .idea.dist directory to set up the paths automatically:
|
||||
|
||||
You'll need to use File>Project Structure to tell IntelliJ that pylib/ is a
|
||||
sources root, so it knows references to 'anki' in aqt are valid.
|
||||
```
|
||||
mkdir .idea && cd .idea
|
||||
ln -sf ../.idea.dist/* .
|
||||
```
|
||||
|
||||
You also need to add a new Python interpreter under _Settings > Python > Interpreter_ pointing to the Python executable under `out/pyenv` (available after building Anki).
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
Subproject commit a599715d3c27ff2eb895c749f3534ab73d83dad1
|
||||
Subproject commit ec5e4cad6242e538cacf52265243668f0de5da80
|
||||
|
|
@ -382,10 +382,8 @@ deck-config-which-deck = Which deck would you like to display options for?
|
|||
## Messages related to the FSRS scheduler
|
||||
|
||||
deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }...
|
||||
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters.
|
||||
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default values.
|
||||
deck-config-not-enough-history = Insufficient review history to perform this operation.
|
||||
deck-config-unable-to-determine-desired-retention =
|
||||
Unable to determine a minimum recommended retention.
|
||||
deck-config-must-have-400-reviews =
|
||||
{ $count ->
|
||||
[one] Only { $count } review was found.
|
||||
|
|
@ -394,7 +392,6 @@ deck-config-must-have-400-reviews =
|
|||
# Numbers that control how aggressively the FSRS algorithm schedules cards
|
||||
deck-config-weights = FSRS parameters
|
||||
deck-config-compute-optimal-weights = Optimize FSRS parameters
|
||||
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
|
||||
deck-config-optimize-button = Optimize Current Preset
|
||||
# Indicates that a given function or label, provided via the "text" variable, operates slowly.
|
||||
deck-config-slow-suffix = { $text } (slow)
|
||||
|
|
@ -407,7 +404,6 @@ deck-config-historical-retention = Historical retention
|
|||
deck-config-smaller-is-better = Smaller numbers indicate a better fit to your review history.
|
||||
deck-config-steps-too-large-for-fsrs = When FSRS is enabled, steps of 1 day or more are not recommended.
|
||||
deck-config-get-params = Get Params
|
||||
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
|
||||
deck-config-complete = { $num }% complete.
|
||||
deck-config-iterations = Iteration: { $count }...
|
||||
deck-config-reschedule-cards-on-change = Reschedule cards on change
|
||||
|
|
@ -468,12 +464,7 @@ deck-config-compute-optimal-weights-tooltip2 =
|
|||
By default, parameters will be calculated from the review history of all decks using the current preset. You can
|
||||
optionally adjust the search before calculating the parameters, if you'd like to alter which cards are used for
|
||||
optimizing the parameters.
|
||||
deck-config-compute-optimal-retention-tooltip4 =
|
||||
This tool will attempt to find the desired retention value
|
||||
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
|
||||
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if you’re
|
||||
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
|
||||
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
|
||||
|
||||
deck-config-please-save-your-changes-first = Please save your changes first.
|
||||
deck-config-workload-factor-change = Approximate workload: {$factor}x
|
||||
(compared to {$previousDR}% desired retention)
|
||||
|
|
@ -507,6 +498,7 @@ deck-config-desired-retention-below-optimal = Your desired retention is below op
|
|||
# cards that can be recalled or retrieved on a specific date.
|
||||
deck-config-fsrs-simulator-experimental = FSRS Simulator (Experimental)
|
||||
deck-config-fsrs-simulate-desired-retention-experimental = FSRS Desired Retention Simulator (Experimental)
|
||||
deck-config-fsrs-simulate-save-preset = After optimizing, please save your deck preset before running the simulator.
|
||||
deck-config-fsrs-desired-retention-help-me-decide-experimental = Help Me Decide (Experimental)
|
||||
deck-config-additional-new-cards-to-simulate = Additional new cards to simulate
|
||||
deck-config-simulate = Simulate
|
||||
|
|
@ -546,6 +538,16 @@ deck-config-fsrs-good-fit = Health Check:
|
|||
deck-config-fsrs-simulator-radio-ratio = Time / Memorized Ratio
|
||||
# $time here is pre-formatted e.g. "10 Seconds"
|
||||
deck-config-fsrs-simulator-ratio-tooltip = { $time } per memorized card
|
||||
deck-config-unable-to-determine-desired-retention =
|
||||
Unable to determine a minimum recommended retention.
|
||||
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
|
||||
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
|
||||
deck-config-compute-optimal-retention-tooltip4 =
|
||||
This tool will attempt to find the desired retention value
|
||||
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
|
||||
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if you’re
|
||||
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
|
||||
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
|
||||
deck-config-plotted-on-x-axis = (Plotted on the X-axis)
|
||||
deck-config-a-100-day-interval =
|
||||
{ $days ->
|
||||
|
|
|
|||
38
ftl/core/launcher.ftl
Normal file
38
ftl/core/launcher.ftl
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
launcher-title = Anki Launcher
|
||||
launcher-press-enter-to-install = Press the Enter/Return key on your keyboard to install or update Anki.
|
||||
launcher-press-enter-to-start = Press enter to start Anki.
|
||||
launcher-anki-will-start-shortly = Anki will start shortly.
|
||||
launcher-you-can-close-this-window = You can close this window.
|
||||
launcher-updating-anki = Updating Anki...
|
||||
launcher-latest-anki = Install Latest Anki (default)
|
||||
launcher-choose-a-version = Choose a version
|
||||
launcher-sync-project-changes = Sync project changes
|
||||
launcher-keep-existing-version = Keep existing version ({ $current })
|
||||
launcher-revert-to-previous = Revert to previous version ({ $prev })
|
||||
launcher-allow-betas = Allow betas: { $state }
|
||||
launcher-on = on
|
||||
launcher-off = off
|
||||
launcher-cache-downloads = Cache downloads: { $state }
|
||||
launcher-download-mirror = Download mirror: { $state }
|
||||
launcher-uninstall = Uninstall Anki
|
||||
launcher-invalid-input = Invalid input. Please try again.
|
||||
launcher-latest-releases = Latest releases: { $releases }
|
||||
launcher-enter-the-version-you-want = Enter the version you want to install:
|
||||
launcher-versions-before-cant-be-installed = Versions before 2.1.50 can't be installed.
|
||||
launcher-invalid-version = Invalid version.
|
||||
launcher-unable-to-check-for-versions = Unable to check for Anki versions. Please check your internet connection.
|
||||
launcher-checking-for-updates = Checking for updates...
|
||||
launcher-uninstall-confirm = Uninstall Anki's program files? (y/n)
|
||||
launcher-uninstall-cancelled = Uninstall cancelled.
|
||||
launcher-program-files-removed = Program files removed.
|
||||
launcher-remove-all-profiles-confirm = Remove all profiles/cards? (y/n)
|
||||
launcher-user-data-removed = User data removed.
|
||||
launcher-download-mirror-options = Download mirror options:
|
||||
launcher-mirror-no-mirror = No mirror
|
||||
launcher-mirror-china = China
|
||||
launcher-mirror-disabled = Mirror disabled.
|
||||
launcher-mirror-china-enabled = China mirror enabled.
|
||||
launcher-beta-releases-enabled = Beta releases enabled.
|
||||
launcher-beta-releases-disabled = Beta releases disabled.
|
||||
launcher-download-caching-enabled = Download caching enabled.
|
||||
launcher-download-caching-disabled = Download caching disabled and cache cleared.
|
||||
|
|
@ -46,6 +46,20 @@ studying-type-answer-unknown-field = Type answer: unknown field { $val }
|
|||
studying-unbury = Unbury
|
||||
studying-what-would-you-like-to-unbury = What would you like to unbury?
|
||||
studying-you-havent-recorded-your-voice-yet = You haven't recorded your voice yet.
|
||||
studying-card-studied-in-minute =
|
||||
{ $cards ->
|
||||
[one] { $cards } card
|
||||
*[other] { $cards } cards
|
||||
} studied in
|
||||
{ $minutes ->
|
||||
[one] { $minutes } minute.
|
||||
*[other] { $minutes } minutes.
|
||||
}
|
||||
studying-question-time-elapsed = Question time elapsed
|
||||
studying-answer-time-elapsed = Answer time elapsed
|
||||
|
||||
## OBSOLETE; you do not need to translate this
|
||||
|
||||
studying-card-studied-in =
|
||||
{ $count ->
|
||||
[one] { $count } card studied in
|
||||
|
|
@ -56,5 +70,3 @@ studying-minute =
|
|||
[one] { $count } minute.
|
||||
*[other] { $count } minutes.
|
||||
}
|
||||
studying-question-time-elapsed = Question time elapsed
|
||||
studying-answer-time-elapsed = Answer time elapsed
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
Subproject commit bb4207f3b8e9a7c428db282d12c75b850be532f3
|
||||
Subproject commit 0b7c530233390d73b706f012bbe7489539925c7d
|
||||
|
|
@ -82,6 +82,7 @@
|
|||
"resolutions": {
|
||||
"canvas": "npm:empty-npm-package@1.0.0",
|
||||
"cookie": "0.7.0",
|
||||
"devalue": "^5.3.2",
|
||||
"vite": "6"
|
||||
},
|
||||
"browserslist": [
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ service CollectionService {
|
|||
rpc LatestProgress(generic.Empty) returns (Progress);
|
||||
rpc SetWantsAbort(generic.Empty) returns (generic.Empty);
|
||||
rpc SetLoadBalancerEnabled(generic.Bool) returns (OpChanges);
|
||||
rpc GetCustomColours(generic.Empty) returns (GetCustomColoursResponse);
|
||||
}
|
||||
|
||||
// Implicitly includes any of the above methods that are not listed in the
|
||||
|
|
@ -163,3 +164,7 @@ message CreateBackupRequest {
|
|||
bool force = 2;
|
||||
bool wait_for_completion = 3;
|
||||
}
|
||||
|
||||
message GetCustomColoursResponse {
|
||||
repeated string colours = 1;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,6 +27,9 @@ service FrontendService {
|
|||
rpc deckOptionsRequireClose(generic.Empty) returns (generic.Empty);
|
||||
// Warns python that the deck option web view is ready to receive requests.
|
||||
rpc deckOptionsReady(generic.Empty) returns (generic.Empty);
|
||||
|
||||
// Save colour picker's custom colour palette
|
||||
rpc SaveCustomColours(generic.Empty) returns (generic.Empty);
|
||||
}
|
||||
|
||||
service BackendFrontendService {}
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ message AddNoteRequest {
|
|||
}
|
||||
|
||||
message AddNoteResponse {
|
||||
collection.OpChanges changes = 1;
|
||||
collection.OpChangesWithCount changes = 1;
|
||||
int64 note_id = 2;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -74,10 +74,15 @@ message SearchNode {
|
|||
repeated SearchNode nodes = 1;
|
||||
Joiner joiner = 2;
|
||||
}
|
||||
enum FieldSearchMode {
|
||||
FIELD_SEARCH_MODE_NORMAL = 0;
|
||||
FIELD_SEARCH_MODE_REGEX = 1;
|
||||
FIELD_SEARCH_MODE_NOCOMBINING = 2;
|
||||
}
|
||||
message Field {
|
||||
string field_name = 1;
|
||||
string text = 2;
|
||||
bool is_re = 3;
|
||||
FieldSearchMode mode = 3;
|
||||
}
|
||||
|
||||
oneof filter {
|
||||
|
|
|
|||
|
|
@ -37,6 +37,8 @@ message CardStatsResponse {
|
|||
uint32 ease = 5;
|
||||
float taken_secs = 6;
|
||||
optional cards.FsrsMemoryState memory_state = 7;
|
||||
// seconds
|
||||
uint32 last_interval = 8;
|
||||
}
|
||||
repeated StatsRevlogEntry revlog = 1;
|
||||
int64 card_id = 2;
|
||||
|
|
|
|||
|
|
@ -528,7 +528,7 @@ class Collection(DeprecatedNamesMixin):
|
|||
def new_note(self, notetype: NotetypeDict) -> Note:
|
||||
return Note(self, notetype)
|
||||
|
||||
def add_note(self, note: Note, deck_id: DeckId) -> OpChanges:
|
||||
def add_note(self, note: Note, deck_id: DeckId) -> OpChangesWithCount:
|
||||
hooks.note_will_be_added(self, note, deck_id)
|
||||
out = self._backend.add_note(note=note._to_backend_note(), deck_id=deck_id)
|
||||
note.id = NoteId(out.note_id)
|
||||
|
|
|
|||
|
|
@ -175,8 +175,8 @@ class MnemoFact:
|
|||
def fact_view(self) -> type[MnemoFactView]:
|
||||
try:
|
||||
fact_view = self.cards[0].fact_view_id
|
||||
except IndexError as err:
|
||||
raise Exception(f"Fact {self.id} has no cards") from err
|
||||
except IndexError:
|
||||
return FrontOnly
|
||||
|
||||
if fact_view.startswith("1.") or fact_view.startswith("1::"):
|
||||
return FrontOnly
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ from anki._legacy import DeprecatedNamesMixinForModule
|
|||
TR = anki._fluent.LegacyTranslationEnum
|
||||
FormatTimeSpan = _pb.FormatTimespanRequest
|
||||
|
||||
|
||||
# When adding new languages here, check lang_to_disk_lang() below
|
||||
langs = sorted(
|
||||
[
|
||||
("Afrikaans", "af_ZA"),
|
||||
|
|
@ -38,6 +38,7 @@ langs = sorted(
|
|||
("Italiano", "it_IT"),
|
||||
("lo jbobau", "jbo_EN"),
|
||||
("Lenga d'òc", "oc_FR"),
|
||||
("Қазақша", "kk_KZ"),
|
||||
("Magyar", "hu_HU"),
|
||||
("Nederlands", "nl_NL"),
|
||||
("Norsk", "nb_NO"),
|
||||
|
|
@ -64,6 +65,7 @@ langs = sorted(
|
|||
("Українська мова", "uk_UA"),
|
||||
("Հայերեն", "hy_AM"),
|
||||
("עִבְרִית", "he_IL"),
|
||||
("ייִדיש", "yi"),
|
||||
("العربية", "ar_SA"),
|
||||
("فارسی", "fa_IR"),
|
||||
("ภาษาไทย", "th_TH"),
|
||||
|
|
@ -104,6 +106,7 @@ compatMap = {
|
|||
"it": "it_IT",
|
||||
"ja": "ja_JP",
|
||||
"jbo": "jbo_EN",
|
||||
"kk": "kk_KZ",
|
||||
"ko": "ko_KR",
|
||||
"la": "la_LA",
|
||||
"mn": "mn_MN",
|
||||
|
|
@ -126,6 +129,7 @@ compatMap = {
|
|||
"uk": "uk_UA",
|
||||
"uz": "uz_UZ",
|
||||
"vi": "vi_VN",
|
||||
"yi": "yi",
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -233,7 +237,7 @@ def get_def_lang(user_lang: str | None = None) -> tuple[int, str]:
|
|||
|
||||
|
||||
def is_rtl(lang: str) -> bool:
|
||||
return lang in ("he", "ar", "fa", "ug")
|
||||
return lang in ("he", "ar", "fa", "ug", "yi")
|
||||
|
||||
|
||||
# strip off unicode isolation markers from a translated string
|
||||
|
|
|
|||
|
|
@ -32,6 +32,7 @@ def test_find_cards():
|
|||
note = col.newNote()
|
||||
note["Front"] = "cat"
|
||||
note["Back"] = "sheep"
|
||||
note.tags.append("conjunção größte")
|
||||
col.addNote(note)
|
||||
catCard = note.cards()[0]
|
||||
m = col.models.current()
|
||||
|
|
@ -68,6 +69,8 @@ def test_find_cards():
|
|||
col.tags.bulk_remove(col.db.list("select id from notes"), "foo")
|
||||
assert len(col.find_cards("tag:foo")) == 0
|
||||
assert len(col.find_cards("tag:bar")) == 5
|
||||
assert len(col.find_cards("tag:conjuncao tag:groste")) == 0
|
||||
assert len(col.find_cards("tag:nc:conjuncao tag:nc:groste")) == 1
|
||||
# text searches
|
||||
assert len(col.find_cards("cat")) == 2
|
||||
assert len(col.find_cards("cat -dog")) == 1
|
||||
|
|
|
|||
|
|
@ -226,6 +226,7 @@ def show(mw: aqt.AnkiQt) -> QDialog:
|
|||
"Anon_0000",
|
||||
"Bilolbek Normuminov",
|
||||
"Sagiv Marzini",
|
||||
"Zhanibek Rassululy",
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from collections.abc import Callable
|
|||
import aqt.editor
|
||||
import aqt.forms
|
||||
from anki._legacy import deprecated
|
||||
from anki.collection import OpChanges, SearchNode
|
||||
from anki.collection import OpChanges, OpChangesWithCount, SearchNode
|
||||
from anki.decks import DeckId
|
||||
from anki.models import NotetypeId
|
||||
from anki.notes import Note, NoteFieldsCheckResult, NoteId
|
||||
|
|
@ -289,18 +289,22 @@ class AddCards(QMainWindow):
|
|||
def _add_current_note(self) -> None:
|
||||
note = self.editor.note
|
||||
|
||||
# Prevent adding a note that has already been added (e.g., from double-clicking)
|
||||
if note.id != 0:
|
||||
return
|
||||
|
||||
if not self._note_can_be_added(note):
|
||||
return
|
||||
|
||||
target_deck_id = self.deck_chooser.selected_deck_id
|
||||
|
||||
def on_success(changes: OpChanges) -> None:
|
||||
def on_success(changes: OpChangesWithCount) -> None:
|
||||
# only used for detecting changed sticky fields on close
|
||||
self._last_added_note = note
|
||||
|
||||
self.addHistory(note)
|
||||
|
||||
tooltip(tr.adding_added(), period=500)
|
||||
tooltip(tr.importing_cards_added(count=changes.count), period=500)
|
||||
av_player.stop_and_clear_queue()
|
||||
self._load_new_note(sticky_fields_from=note)
|
||||
gui_hooks.add_cards_did_add_note(note)
|
||||
|
|
|
|||
|
|
@ -521,7 +521,7 @@ class Browser(QMainWindow):
|
|||
self.search()
|
||||
|
||||
def current_search(self) -> str:
|
||||
return self._line_edit().text()
|
||||
return self._line_edit().text().replace("\n", " ")
|
||||
|
||||
def search(self) -> None:
|
||||
"""Search triggered programmatically. Caller must have saved note first."""
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ import aqt.browser
|
|||
from anki.cards import Card
|
||||
from anki.collection import Config
|
||||
from anki.tags import MARKED_TAG
|
||||
from aqt import AnkiQt, gui_hooks
|
||||
from aqt import AnkiQt, gui_hooks, is_mac
|
||||
from aqt.qt import (
|
||||
QCheckBox,
|
||||
QDialog,
|
||||
|
|
@ -81,10 +81,15 @@ class Previewer(QDialog):
|
|||
qconnect(self.finished, self._on_finished)
|
||||
self.silentlyClose = True
|
||||
self.vbox = QVBoxLayout()
|
||||
spacing = 6
|
||||
self.vbox.setContentsMargins(0, 0, 0, 0)
|
||||
self.vbox.setSpacing(spacing)
|
||||
self._web: AnkiWebView | None = AnkiWebView(kind=AnkiWebViewKind.PREVIEWER)
|
||||
self.vbox.addWidget(self._web)
|
||||
self.bbox = QDialogButtonBox()
|
||||
self.bbox.setContentsMargins(
|
||||
spacing, spacing if is_mac else 0, spacing, spacing
|
||||
)
|
||||
self.bbox.setLayoutDirection(Qt.LayoutDirection.LeftToRight)
|
||||
|
||||
gui_hooks.card_review_webview_did_init(self._web, AnkiWebViewKind.PREVIEWER)
|
||||
|
|
|
|||
|
|
@ -80,7 +80,7 @@ class SidebarItem:
|
|||
self.search_node = search_node
|
||||
self.on_expanded = on_expanded
|
||||
self.children: list[SidebarItem] = []
|
||||
self.tooltip: str | None = None
|
||||
self.tooltip: str = name
|
||||
self._parent_item: SidebarItem | None = None
|
||||
self._expanded = expanded
|
||||
self._row_in_parent: int | None = None
|
||||
|
|
|
|||
|
|
@ -151,6 +151,7 @@ class Editor:
|
|||
self.add_webview()
|
||||
self.setupWeb()
|
||||
self.setupShortcuts()
|
||||
self.setupColourPalette()
|
||||
gui_hooks.editor_did_init(self)
|
||||
|
||||
# Initial setup
|
||||
|
|
@ -349,6 +350,14 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too
|
|||
keys, fn, _ = row
|
||||
QShortcut(QKeySequence(keys), self.widget, activated=fn) # type: ignore
|
||||
|
||||
def setupColourPalette(self) -> None:
|
||||
if not (colors := self.mw.col.get_config("customColorPickerPalette")):
|
||||
return
|
||||
for i, colour in enumerate(colors[: QColorDialog.customCount()]):
|
||||
if not QColor.isValidColorName(colour):
|
||||
continue
|
||||
QColorDialog.setCustomColor(i, QColor.fromString(colour))
|
||||
|
||||
def _addFocusCheck(self, fn: Callable) -> Callable:
|
||||
def checkFocus() -> None:
|
||||
if self.currentField is None:
|
||||
|
|
|
|||
|
|
@ -85,11 +85,11 @@
|
|||
</item>
|
||||
<item row="2" column="2">
|
||||
<widget class="QSpinBox" name="limit">
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>60</width>
|
||||
<height>16777215</height>
|
||||
</size>
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="minimum">
|
||||
<number>1</number>
|
||||
|
|
@ -168,11 +168,11 @@
|
|||
</item>
|
||||
<item row="1" column="1">
|
||||
<widget class="QSpinBox" name="limit_2">
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>60</width>
|
||||
<height>16777215</height>
|
||||
</size>
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="minimum">
|
||||
<number>1</number>
|
||||
|
|
|
|||
|
|
@ -47,6 +47,9 @@
|
|||
<property name="insertPolicy">
|
||||
<enum>QComboBox::NoInsert</enum>
|
||||
</property>
|
||||
<property name="sizeAdjustPolicy">
|
||||
<enum>QComboBox::SizeAdjustPolicy::AdjustToMinimumContentsLengthWithIcon</enum>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
|
|
|
|||
|
|
@ -170,13 +170,42 @@ def favicon() -> Response:
|
|||
|
||||
def _mime_for_path(path: str) -> str:
|
||||
"Mime type for provided path/filename."
|
||||
if path.endswith(".css"):
|
||||
# some users may have invalid mime type in the Windows registry
|
||||
return "text/css"
|
||||
elif path.endswith(".js") or path.endswith(".mjs"):
|
||||
return "application/javascript"
|
||||
|
||||
_, ext = os.path.splitext(path)
|
||||
ext = ext.lower()
|
||||
|
||||
# Badly-behaved apps on Windows can alter the standard mime types in the registry, which can completely
|
||||
# break Anki's UI. So we hard-code the most common extensions.
|
||||
mime_types = {
|
||||
".css": "text/css",
|
||||
".js": "application/javascript",
|
||||
".mjs": "application/javascript",
|
||||
".html": "text/html",
|
||||
".htm": "text/html",
|
||||
".svg": "image/svg+xml",
|
||||
".png": "image/png",
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".gif": "image/gif",
|
||||
".webp": "image/webp",
|
||||
".ico": "image/x-icon",
|
||||
".json": "application/json",
|
||||
".woff": "font/woff",
|
||||
".woff2": "font/woff2",
|
||||
".ttf": "font/ttf",
|
||||
".otf": "font/otf",
|
||||
".mp3": "audio/mpeg",
|
||||
".mp4": "video/mp4",
|
||||
".webm": "video/webm",
|
||||
".ogg": "audio/ogg",
|
||||
".pdf": "application/pdf",
|
||||
".txt": "text/plain",
|
||||
}
|
||||
|
||||
if mime := mime_types.get(ext):
|
||||
return mime
|
||||
else:
|
||||
# autodetect
|
||||
# fallback to mimetypes, which may consult the registry
|
||||
mime, _encoding = mimetypes.guess_type(path)
|
||||
return mime or "application/octet-stream"
|
||||
|
||||
|
|
@ -599,6 +628,15 @@ def deck_options_ready() -> bytes:
|
|||
return b""
|
||||
|
||||
|
||||
def save_custom_colours() -> bytes:
|
||||
colors = [
|
||||
QColorDialog.customColor(i).name(QColor.NameFormat.HexRgb)
|
||||
for i in range(QColorDialog.customCount())
|
||||
]
|
||||
aqt.mw.col.set_config("customColorPickerPalette", colors)
|
||||
return b""
|
||||
|
||||
|
||||
post_handler_list = [
|
||||
congrats_info,
|
||||
get_deck_configs_for_update,
|
||||
|
|
@ -614,12 +652,14 @@ post_handler_list = [
|
|||
search_in_browser,
|
||||
deck_options_require_close,
|
||||
deck_options_ready,
|
||||
save_custom_colours,
|
||||
]
|
||||
|
||||
|
||||
exposed_backend_list = [
|
||||
# CollectionService
|
||||
"latest_progress",
|
||||
"get_custom_colours",
|
||||
# DeckService
|
||||
"get_deck_names",
|
||||
# I18nService
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ def add_note(
|
|||
parent: QWidget,
|
||||
note: Note,
|
||||
target_deck_id: DeckId,
|
||||
) -> CollectionOp[OpChanges]:
|
||||
) -> CollectionOp[OpChangesWithCount]:
|
||||
return CollectionOp(parent, lambda col: col.add_note(note, target_deck_id))
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -260,6 +260,7 @@ class Preferences(QDialog):
|
|||
self.update_login_status()
|
||||
self.confirm_sync_after_login()
|
||||
|
||||
self.update_network()
|
||||
sync_login(self.mw, on_success)
|
||||
|
||||
def sync_logout(self) -> None:
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ import aqt.browser
|
|||
import aqt.operations
|
||||
from anki.cards import Card, CardId
|
||||
from anki.collection import Config, OpChanges, OpChangesWithCount
|
||||
from anki.lang import with_collapsed_whitespace
|
||||
from anki.scheduler.base import ScheduleCardsAsNew
|
||||
from anki.scheduler.v3 import (
|
||||
CardAnswer,
|
||||
|
|
@ -966,11 +967,15 @@ timerStopped = false;
|
|||
elapsed = self.mw.col.timeboxReached()
|
||||
if elapsed:
|
||||
assert not isinstance(elapsed, bool)
|
||||
part1 = tr.studying_card_studied_in(count=elapsed[1])
|
||||
mins = int(round(elapsed[0] / 60))
|
||||
part2 = tr.studying_minute(count=mins)
|
||||
cards_val = elapsed[1]
|
||||
minutes_val = int(round(elapsed[0] / 60))
|
||||
message = with_collapsed_whitespace(
|
||||
tr.studying_card_studied_in_minute(
|
||||
cards=cards_val, minutes=str(minutes_val)
|
||||
)
|
||||
)
|
||||
fin = tr.studying_finish()
|
||||
diag = askUserDialog(f"{part1} {part2}", [tr.studying_continue(), fin])
|
||||
diag = askUserDialog(message, [tr.studying_continue(), fin])
|
||||
diag.setIcon(QMessageBox.Icon.Information)
|
||||
if diag.run() == fin:
|
||||
self.mw.moveToState("deckBrowser")
|
||||
|
|
|
|||
|
|
@ -32,6 +32,7 @@ from aqt._macos_helper import macos_helper
|
|||
from aqt.mpv import MPV, MPVBase, MPVCommandError
|
||||
from aqt.qt import *
|
||||
from aqt.taskman import TaskManager
|
||||
from aqt.theme import theme_manager
|
||||
from aqt.utils import (
|
||||
disable_help_button,
|
||||
restoreGeom,
|
||||
|
|
@ -630,18 +631,44 @@ class QtAudioInputRecorder(Recorder):
|
|||
self.mw = mw
|
||||
self._parent = parent
|
||||
|
||||
from PyQt6.QtMultimedia import QAudioFormat, QAudioSource # type: ignore
|
||||
from PyQt6.QtMultimedia import QAudioSource, QMediaDevices # type: ignore
|
||||
|
||||
format = QAudioFormat()
|
||||
format.setChannelCount(2)
|
||||
format.setSampleRate(44100)
|
||||
format.setSampleFormat(QAudioFormat.SampleFormat.Int16)
|
||||
# Get the default audio input device
|
||||
device = QMediaDevices.defaultAudioInput()
|
||||
|
||||
source = QAudioSource(format, parent)
|
||||
# Try to use Int16 format first (avoids conversion)
|
||||
preferred_format = device.preferredFormat()
|
||||
int16_format = preferred_format
|
||||
int16_format.setSampleFormat(preferred_format.SampleFormat.Int16)
|
||||
|
||||
if device.isFormatSupported(int16_format):
|
||||
# Use Int16 if supported
|
||||
format = int16_format
|
||||
else:
|
||||
# Fall back to device's preferred format
|
||||
format = preferred_format
|
||||
|
||||
# Create the audio source with the chosen format
|
||||
source = QAudioSource(device, format, parent)
|
||||
|
||||
# Store the actual format being used
|
||||
self._format = source.format()
|
||||
self._audio_input = source
|
||||
|
||||
def _convert_float_to_int16(self, float_buffer: bytearray) -> bytes:
|
||||
"""Convert float32 audio samples to int16 format for WAV output."""
|
||||
import struct
|
||||
|
||||
float_count = len(float_buffer) // 4 # 4 bytes per float32
|
||||
floats = struct.unpack(f"{float_count}f", float_buffer)
|
||||
|
||||
# Convert to int16 range, clipping and scaling in one step
|
||||
int16_samples = [
|
||||
max(-32768, min(32767, int(max(-1.0, min(1.0, f)) * 32767))) for f in floats
|
||||
]
|
||||
|
||||
return struct.pack(f"{len(int16_samples)}h", *int16_samples)
|
||||
|
||||
def start(self, on_done: Callable[[], None]) -> None:
|
||||
self._iodevice = self._audio_input.start()
|
||||
self._buffer = bytearray()
|
||||
|
|
@ -664,18 +691,32 @@ class QtAudioInputRecorder(Recorder):
|
|||
return
|
||||
|
||||
def write_file() -> None:
|
||||
# swallow the first 300ms to allow audio device to quiesce
|
||||
wait = int(44100 * self.STARTUP_DELAY)
|
||||
if len(self._buffer) <= wait:
|
||||
return
|
||||
self._buffer = self._buffer[wait:]
|
||||
from PyQt6.QtMultimedia import QAudioFormat
|
||||
|
||||
# write out the wave file
|
||||
# swallow the first 300ms to allow audio device to quiesce
|
||||
bytes_per_frame = self._format.bytesPerFrame()
|
||||
frames_to_skip = int(self._format.sampleRate() * self.STARTUP_DELAY)
|
||||
bytes_to_skip = frames_to_skip * bytes_per_frame
|
||||
|
||||
if len(self._buffer) <= bytes_to_skip:
|
||||
return
|
||||
self._buffer = self._buffer[bytes_to_skip:]
|
||||
|
||||
# Check if we need to convert float samples to int16
|
||||
if self._format.sampleFormat() == QAudioFormat.SampleFormat.Float:
|
||||
audio_data = self._convert_float_to_int16(self._buffer)
|
||||
sample_width = 2 # int16 is 2 bytes
|
||||
else:
|
||||
# For integer formats, use the data as-is
|
||||
audio_data = bytes(self._buffer)
|
||||
sample_width = self._format.bytesPerSample()
|
||||
|
||||
# write out the wave file with the correct format parameters
|
||||
wf = wave.open(self.output_path, "wb")
|
||||
wf.setnchannels(self._format.channelCount())
|
||||
wf.setsampwidth(2)
|
||||
wf.setsampwidth(sample_width)
|
||||
wf.setframerate(self._format.sampleRate())
|
||||
wf.writeframes(self._buffer)
|
||||
wf.writeframes(audio_data)
|
||||
wf.close()
|
||||
|
||||
def and_then(fut: Future) -> None:
|
||||
|
|
@ -743,7 +784,7 @@ class RecordDialog(QDialog):
|
|||
def _setup_dialog(self) -> None:
|
||||
self.setWindowTitle("Anki")
|
||||
icon = QLabel()
|
||||
qicon = QIcon("icons:media-record.svg")
|
||||
qicon = theme_manager.icon_from_resources("icons:media-record.svg")
|
||||
icon.setPixmap(qicon.pixmap(60, 60))
|
||||
self.label = QLabel("...")
|
||||
hbox = QHBoxLayout()
|
||||
|
|
|
|||
|
|
@ -180,7 +180,7 @@ class CustomStyles:
|
|||
QPushButton {{
|
||||
margin: 1px;
|
||||
}}
|
||||
QPushButton:focus {{
|
||||
QPushButton:focus, QPushButton:default:hover {{
|
||||
border: 2px solid {tm.var(colors.BORDER_FOCUS)};
|
||||
outline: none;
|
||||
margin: 0px;
|
||||
|
|
@ -199,9 +199,6 @@ class CustomStyles:
|
|||
)
|
||||
};
|
||||
}}
|
||||
QPushButton:default:hover {{
|
||||
border-width: 2px;
|
||||
}}
|
||||
QPushButton:pressed,
|
||||
QPushButton:checked,
|
||||
QSpinBox::up-button:pressed,
|
||||
|
|
|
|||
|
|
@ -209,11 +209,20 @@ def on_full_sync_timer(mw: aqt.main.AnkiQt, label: str) -> None:
|
|||
return
|
||||
sync_progress = progress.full_sync
|
||||
|
||||
# If we've reached total, show the "checking" label
|
||||
if sync_progress.transferred == sync_progress.total:
|
||||
label = tr.sync_checking()
|
||||
|
||||
total = sync_progress.total
|
||||
transferred = sync_progress.transferred
|
||||
|
||||
# Scale both to kilobytes with floor division
|
||||
max_for_bar = total // 1024
|
||||
value_for_bar = transferred // 1024
|
||||
|
||||
mw.progress.update(
|
||||
value=sync_progress.transferred,
|
||||
max=sync_progress.total,
|
||||
value=value_for_bar,
|
||||
max=max_for_bar,
|
||||
process=False,
|
||||
label=label,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ class ThemeManager:
|
|||
# Workaround for Qt bug. First attempt was percent-escaping the chars,
|
||||
# but Qt can't handle that.
|
||||
# https://forum.qt.io/topic/55274/solved-qss-with-special-characters/11
|
||||
path = re.sub(r"([\u00A1-\u00FF])", r"\\\1", path)
|
||||
path = re.sub(r"(['\u00A1-\u00FF])", r"\\\1", path)
|
||||
return path
|
||||
|
||||
def icon_from_resources(self, path: str | ColoredIcon) -> QIcon:
|
||||
|
|
|
|||
|
|
@ -94,8 +94,15 @@ class TTSPlayer:
|
|||
|
||||
rank -= 1
|
||||
|
||||
# if no preferred voices match, we fall back on language
|
||||
# with a rank of -100
|
||||
# if no requested voices match, use a preferred fallback voice
|
||||
# (for example, Apple Samantha) with rank of -50
|
||||
for avail in avail_voices:
|
||||
if avail.lang == tag.lang:
|
||||
if avail.lang == "en_US" and avail.name.startswith("Apple_Samantha"):
|
||||
return TTSVoiceMatch(voice=avail, rank=-50)
|
||||
|
||||
# if no requested or preferred voices match, we fall back on
|
||||
# the first available voice for the language, with a rank of -100
|
||||
for avail in avail_voices:
|
||||
if avail.lang == tag.lang:
|
||||
return TTSVoiceMatch(voice=avail, rank=-100)
|
||||
|
|
|
|||
|
|
@ -809,7 +809,7 @@ def ensureWidgetInScreenBoundaries(widget: QWidget) -> None:
|
|||
wsize = widget.size()
|
||||
cappedWidth = min(geom.width(), wsize.width())
|
||||
cappedHeight = min(geom.height(), wsize.height())
|
||||
if cappedWidth > wsize.width() or cappedHeight > wsize.height():
|
||||
if cappedWidth < wsize.width() or cappedHeight < wsize.height():
|
||||
widget.resize(QSize(cappedWidth, cappedHeight))
|
||||
|
||||
# ensure widget is inside top left
|
||||
|
|
|
|||
|
|
@ -8,11 +8,13 @@ publish = false
|
|||
rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anki_i18n.workspace = true
|
||||
anki_io.workspace = true
|
||||
anki_process.workspace = true
|
||||
anyhow.workspace = true
|
||||
camino.workspace = true
|
||||
dirs.workspace = true
|
||||
locale_config.workspace = true
|
||||
serde_json.workspace = true
|
||||
|
||||
[target.'cfg(all(unix, not(target_os = "macos")))'.dependencies]
|
||||
|
|
|
|||
|
|
@ -7,4 +7,7 @@ fn main() {
|
|||
.manifest_required()
|
||||
.unwrap();
|
||||
}
|
||||
println!("cargo:rerun-if-changed=../../out/buildhash");
|
||||
let buildhash = std::fs::read_to_string("../../out/buildhash").unwrap_or_default();
|
||||
println!("cargo:rustc-env=BUILDHASH={buildhash}");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,6 +30,12 @@ lipo -create \
|
|||
-output "$APP_LAUNCHER/Contents/MacOS/launcher"
|
||||
cp "$OUTPUT_DIR/uv" "$APP_LAUNCHER/Contents/MacOS/"
|
||||
|
||||
# Build install_name_tool stub
|
||||
clang -arch arm64 -o "$OUTPUT_DIR/stub_arm64" stub.c
|
||||
clang -arch x86_64 -o "$OUTPUT_DIR/stub_x86_64" stub.c
|
||||
lipo -create "$OUTPUT_DIR/stub_arm64" "$OUTPUT_DIR/stub_x86_64" -output "$APP_LAUNCHER/Contents/MacOS/install_name_tool"
|
||||
rm "$OUTPUT_DIR/stub_arm64" "$OUTPUT_DIR/stub_x86_64"
|
||||
|
||||
# Copy support files
|
||||
ANKI_VERSION=$(cat ../../../.version | tr -d '\n')
|
||||
sed "s/ANKI_VERSION/$ANKI_VERSION/g" Info.plist > "$APP_LAUNCHER/Contents/Info.plist"
|
||||
|
|
@ -40,7 +46,7 @@ cp ../versions.py "$APP_LAUNCHER/Contents/Resources/"
|
|||
|
||||
# Codesign/bundle
|
||||
if [ -z "$NODMG" ]; then
|
||||
for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do
|
||||
for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/install_name_tool" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do
|
||||
codesign --force -vvvv -o runtime -s "Developer ID Application:" \
|
||||
--entitlements entitlements.python.xml \
|
||||
"$i"
|
||||
|
|
|
|||
6
qt/launcher/mac/stub.c
Normal file
6
qt/launcher/mac/stub.c
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
int main(void) {
|
||||
return 0;
|
||||
}
|
||||
|
|
@ -10,6 +10,7 @@ use std::process::Command;
|
|||
use std::time::SystemTime;
|
||||
use std::time::UNIX_EPOCH;
|
||||
|
||||
use anki_i18n::I18n;
|
||||
use anki_io::copy_file;
|
||||
use anki_io::create_dir_all;
|
||||
use anki_io::modified_time;
|
||||
|
|
@ -31,6 +32,7 @@ use crate::platform::respawn_launcher;
|
|||
mod platform;
|
||||
|
||||
struct State {
|
||||
tr: I18n<anki_i18n::Launcher>,
|
||||
current_version: Option<String>,
|
||||
prerelease_marker: std::path::PathBuf,
|
||||
uv_install_root: std::path::PathBuf,
|
||||
|
|
@ -51,6 +53,8 @@ struct State {
|
|||
previous_version: Option<String>,
|
||||
resources_dir: std::path::PathBuf,
|
||||
venv_folder: std::path::PathBuf,
|
||||
/// system Python + PyQt6 library mode
|
||||
system_qt: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
|
@ -88,13 +92,24 @@ fn main() {
|
|||
}
|
||||
|
||||
fn run() -> Result<()> {
|
||||
let uv_install_root = dirs::data_local_dir()
|
||||
.context("Unable to determine data_dir")?
|
||||
.join("AnkiProgramFiles");
|
||||
let uv_install_root = if let Ok(custom_root) = std::env::var("ANKI_LAUNCHER_VENV_ROOT") {
|
||||
std::path::PathBuf::from(custom_root)
|
||||
} else {
|
||||
dirs::data_local_dir()
|
||||
.context("Unable to determine data_dir")?
|
||||
.join("AnkiProgramFiles")
|
||||
};
|
||||
|
||||
let (exe_dir, resources_dir) = get_exe_and_resources_dirs()?;
|
||||
|
||||
let locale = locale_config::Locale::user_default().to_string();
|
||||
|
||||
let mut state = State {
|
||||
tr: I18n::new(&[if !locale.is_empty() {
|
||||
locale
|
||||
} else {
|
||||
"en".to_owned()
|
||||
}]),
|
||||
current_version: None,
|
||||
prerelease_marker: uv_install_root.join("prerelease"),
|
||||
uv_install_root: uv_install_root.clone(),
|
||||
|
|
@ -113,6 +128,8 @@ fn run() -> Result<()> {
|
|||
mirror_path: uv_install_root.join("mirror"),
|
||||
pyproject_modified_by_user: false, // calculated later
|
||||
previous_version: None,
|
||||
system_qt: (cfg!(unix) && !cfg!(target_os = "macos"))
|
||||
&& resources_dir.join("system_qt").exists(),
|
||||
resources_dir,
|
||||
venv_folder: uv_install_root.join(".venv"),
|
||||
};
|
||||
|
|
@ -135,7 +152,9 @@ fn run() -> Result<()> {
|
|||
let sync_time = file_timestamp_secs(&state.sync_complete_marker);
|
||||
state.pyproject_modified_by_user = pyproject_time > sync_time;
|
||||
let pyproject_has_changed = state.pyproject_modified_by_user;
|
||||
if !launcher_requested && !pyproject_has_changed {
|
||||
let different_launcher = diff_launcher_was_installed(&state)?;
|
||||
|
||||
if !launcher_requested && !pyproject_has_changed && !different_launcher {
|
||||
// If no launcher request and venv is already up to date, launch Anki normally
|
||||
let args: Vec<String> = std::env::args().skip(1).collect();
|
||||
let cmd = build_python_command(&state, &args)?;
|
||||
|
|
@ -152,10 +171,12 @@ fn run() -> Result<()> {
|
|||
}
|
||||
|
||||
print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top
|
||||
println!("\x1B[1mAnki Launcher\x1B[0m\n");
|
||||
println!("\x1B[1m{}\x1B[0m\n", state.tr.launcher_title());
|
||||
|
||||
ensure_os_supported()?;
|
||||
|
||||
println!("{}\n", state.tr.launcher_press_enter_to_install());
|
||||
|
||||
check_versions(&mut state);
|
||||
|
||||
main_menu_loop(&state)?;
|
||||
|
|
@ -170,15 +191,18 @@ fn run() -> Result<()> {
|
|||
}
|
||||
|
||||
if cfg!(unix) && !cfg!(target_os = "macos") {
|
||||
println!("\nPress enter to start Anki.");
|
||||
println!("\n{}", state.tr.launcher_press_enter_to_start());
|
||||
let mut input = String::new();
|
||||
let _ = stdin().read_line(&mut input);
|
||||
} else {
|
||||
// on Windows/macOS, the user needs to close the terminal/console
|
||||
// currently, but ideas on how we can avoid this would be good!
|
||||
println!();
|
||||
println!("Anki will start shortly.");
|
||||
println!("\x1B[1mYou can close this window.\x1B[0m\n");
|
||||
println!("{}", state.tr.launcher_anki_will_start_shortly());
|
||||
println!(
|
||||
"\x1B[1m{}\x1B[0m\n",
|
||||
state.tr.launcher_you_can_close_this_window()
|
||||
);
|
||||
}
|
||||
|
||||
// respawn the launcher as a disconnected subprocess for normal startup
|
||||
|
|
@ -193,8 +217,8 @@ fn extract_aqt_version(state: &State) -> Option<String> {
|
|||
return None;
|
||||
}
|
||||
|
||||
let output = Command::new(&state.uv_path)
|
||||
.current_dir(&state.uv_install_root)
|
||||
let output = uv_command(state)
|
||||
.ok()?
|
||||
.env("VIRTUAL_ENV", &state.venv_folder)
|
||||
.args(["pip", "show", "aqt"])
|
||||
.output()
|
||||
|
|
@ -250,7 +274,7 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
|
|||
// Remove sync marker before attempting sync
|
||||
let _ = remove_file(&state.sync_complete_marker);
|
||||
|
||||
println!("Updating Anki...\n");
|
||||
println!("{}\n", state.tr.launcher_updating_anki());
|
||||
|
||||
let python_version_trimmed = if state.user_python_version_path.exists() {
|
||||
let python_version = read_file(&state.user_python_version_path)?;
|
||||
|
|
@ -261,52 +285,64 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
|
|||
None
|
||||
};
|
||||
|
||||
let have_venv = state.venv_folder.exists();
|
||||
if cfg!(target_os = "macos") && !have_developer_tools() && !have_venv {
|
||||
println!("If you see a pop-up about 'install_name_tool', you can cancel it, and ignore the warning below.\n");
|
||||
}
|
||||
|
||||
// Prepare to sync the venv
|
||||
let mut command = Command::new(&state.uv_path);
|
||||
command.current_dir(&state.uv_install_root);
|
||||
let mut command = uv_command(state)?;
|
||||
|
||||
// remove UV_* environment variables to avoid interference
|
||||
for (key, _) in std::env::vars() {
|
||||
if key.starts_with("UV_") || key == "VIRTUAL_ENV" {
|
||||
command.env_remove(key);
|
||||
if cfg!(target_os = "macos") {
|
||||
// remove CONDA_PREFIX/bin from PATH to avoid conda interference
|
||||
if let Ok(conda_prefix) = std::env::var("CONDA_PREFIX") {
|
||||
if let Ok(current_path) = std::env::var("PATH") {
|
||||
let conda_bin = format!("{conda_prefix}/bin");
|
||||
let filtered_paths: Vec<&str> = current_path
|
||||
.split(':')
|
||||
.filter(|&path| path != conda_bin)
|
||||
.collect();
|
||||
let new_path = filtered_paths.join(":");
|
||||
command.env("PATH", new_path);
|
||||
}
|
||||
}
|
||||
// put our fake install_name_tool at the top of the path to override
|
||||
// potential conflicts
|
||||
if let Ok(current_path) = std::env::var("PATH") {
|
||||
let exe_dir = std::env::current_exe()
|
||||
.ok()
|
||||
.and_then(|exe| exe.parent().map(|p| p.to_path_buf()));
|
||||
if let Some(exe_dir) = exe_dir {
|
||||
let new_path = format!("{}:{}", exe_dir.display(), current_path);
|
||||
command.env("PATH", new_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// remove CONDA_PREFIX/bin from PATH to avoid conda interference
|
||||
#[cfg(target_os = "macos")]
|
||||
if let Ok(conda_prefix) = std::env::var("CONDA_PREFIX") {
|
||||
if let Ok(current_path) = std::env::var("PATH") {
|
||||
let conda_bin = format!("{conda_prefix}/bin");
|
||||
let filtered_paths: Vec<&str> = current_path
|
||||
.split(':')
|
||||
.filter(|&path| path != conda_bin)
|
||||
.collect();
|
||||
let new_path = filtered_paths.join(":");
|
||||
command.env("PATH", new_path);
|
||||
}
|
||||
// Create venv with system site packages if system Qt is enabled
|
||||
if state.system_qt {
|
||||
let mut venv_command = uv_command(state)?;
|
||||
venv_command.args([
|
||||
"venv",
|
||||
"--no-managed-python",
|
||||
"--system-site-packages",
|
||||
"--no-config",
|
||||
]);
|
||||
venv_command.ensure_success()?;
|
||||
}
|
||||
|
||||
command
|
||||
.env("UV_CACHE_DIR", &state.uv_cache_dir)
|
||||
.env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir)
|
||||
.env(
|
||||
"UV_HTTP_TIMEOUT",
|
||||
std::env::var("UV_HTTP_TIMEOUT").unwrap_or_else(|_| "180".to_string()),
|
||||
)
|
||||
.args(["sync", "--upgrade", "--managed-python", "--no-config"]);
|
||||
);
|
||||
|
||||
// Add python version if .python-version file exists
|
||||
if let Some(version) = &python_version_trimmed {
|
||||
command.args(["--python", version]);
|
||||
command.args(["sync", "--upgrade", "--no-config"]);
|
||||
if !state.system_qt {
|
||||
command.arg("--managed-python");
|
||||
}
|
||||
|
||||
if state.no_cache_marker.exists() {
|
||||
command.env("UV_NO_CACHE", "1");
|
||||
// Add python version if .python-version file exists (but not for system Qt)
|
||||
if let Some(version) = &python_version_trimmed {
|
||||
if !state.system_qt {
|
||||
command.args(["--python", version]);
|
||||
}
|
||||
}
|
||||
|
||||
match command.ensure_success() {
|
||||
|
|
@ -353,10 +389,10 @@ fn main_menu_loop(state: &State) -> Result<()> {
|
|||
// Toggle beta prerelease file
|
||||
if state.prerelease_marker.exists() {
|
||||
let _ = remove_file(&state.prerelease_marker);
|
||||
println!("Beta releases disabled.");
|
||||
println!("{}", state.tr.launcher_beta_releases_disabled());
|
||||
} else {
|
||||
write_file(&state.prerelease_marker, "")?;
|
||||
println!("Beta releases enabled.");
|
||||
println!("{}", state.tr.launcher_beta_releases_enabled());
|
||||
}
|
||||
println!();
|
||||
continue;
|
||||
|
|
@ -365,14 +401,14 @@ fn main_menu_loop(state: &State) -> Result<()> {
|
|||
// Toggle cache disable file
|
||||
if state.no_cache_marker.exists() {
|
||||
let _ = remove_file(&state.no_cache_marker);
|
||||
println!("Download caching enabled.");
|
||||
println!("{}", state.tr.launcher_download_caching_enabled());
|
||||
} else {
|
||||
write_file(&state.no_cache_marker, "")?;
|
||||
// Delete the cache directory and everything in it
|
||||
if state.uv_cache_dir.exists() {
|
||||
let _ = anki_io::remove_dir_all(&state.uv_cache_dir);
|
||||
}
|
||||
println!("Download caching disabled and cache cleared.");
|
||||
println!("{}", state.tr.launcher_download_caching_disabled());
|
||||
}
|
||||
println!();
|
||||
continue;
|
||||
|
|
@ -415,44 +451,62 @@ fn file_timestamp_secs(path: &std::path::Path) -> i64 {
|
|||
|
||||
fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
|
||||
loop {
|
||||
println!("1) Latest Anki (press Enter)");
|
||||
println!("2) Choose a version");
|
||||
println!("1) {}", state.tr.launcher_latest_anki());
|
||||
println!("2) {}", state.tr.launcher_choose_a_version());
|
||||
|
||||
if let Some(current_version) = &state.current_version {
|
||||
let normalized_current = normalize_version(current_version);
|
||||
|
||||
if state.pyproject_modified_by_user {
|
||||
println!("3) Sync project changes");
|
||||
println!("3) {}", state.tr.launcher_sync_project_changes());
|
||||
} else {
|
||||
println!("3) Keep existing version ({normalized_current})");
|
||||
println!(
|
||||
"3) {}",
|
||||
state.tr.launcher_keep_existing_version(normalized_current)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(prev_version) = &state.previous_version {
|
||||
if state.current_version.as_ref() != Some(prev_version) {
|
||||
let normalized_prev = normalize_version(prev_version);
|
||||
println!("4) Revert to previous version ({normalized_prev})");
|
||||
println!(
|
||||
"4) {}",
|
||||
state.tr.launcher_revert_to_previous(normalized_prev)
|
||||
);
|
||||
}
|
||||
}
|
||||
println!();
|
||||
|
||||
let betas_enabled = state.prerelease_marker.exists();
|
||||
println!(
|
||||
"5) Allow betas: {}",
|
||||
if betas_enabled { "on" } else { "off" }
|
||||
"5) {}",
|
||||
state.tr.launcher_allow_betas(if betas_enabled {
|
||||
state.tr.launcher_on()
|
||||
} else {
|
||||
state.tr.launcher_off()
|
||||
})
|
||||
);
|
||||
let cache_enabled = !state.no_cache_marker.exists();
|
||||
println!(
|
||||
"6) Cache downloads: {}",
|
||||
if cache_enabled { "on" } else { "off" }
|
||||
"6) {}",
|
||||
state.tr.launcher_cache_downloads(if cache_enabled {
|
||||
state.tr.launcher_on()
|
||||
} else {
|
||||
state.tr.launcher_off()
|
||||
})
|
||||
);
|
||||
let mirror_enabled = is_mirror_enabled(state);
|
||||
println!(
|
||||
"7) Download mirror: {}",
|
||||
if mirror_enabled { "on" } else { "off" }
|
||||
"7) {}",
|
||||
state.tr.launcher_download_mirror(if mirror_enabled {
|
||||
state.tr.launcher_on()
|
||||
} else {
|
||||
state.tr.launcher_off()
|
||||
})
|
||||
);
|
||||
println!();
|
||||
println!("8) Uninstall");
|
||||
println!("8) {}", state.tr.launcher_uninstall());
|
||||
print!("> ");
|
||||
let _ = stdout().flush();
|
||||
|
||||
|
|
@ -474,7 +528,7 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
|
|||
if state.current_version.is_some() {
|
||||
MainMenuChoice::KeepExisting
|
||||
} else {
|
||||
println!("Invalid input. Please try again.\n");
|
||||
println!("{}\n", state.tr.launcher_invalid_input());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
|
@ -486,7 +540,7 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
|
|||
}
|
||||
}
|
||||
}
|
||||
println!("Invalid input. Please try again.\n");
|
||||
println!("{}\n", state.tr.launcher_invalid_input());
|
||||
continue;
|
||||
}
|
||||
"5" => MainMenuChoice::ToggleBetas,
|
||||
|
|
@ -494,7 +548,7 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
|
|||
"7" => MainMenuChoice::DownloadMirror,
|
||||
"8" => MainMenuChoice::Uninstall,
|
||||
_ => {
|
||||
println!("Invalid input. Please try again.");
|
||||
println!("{}\n", state.tr.launcher_invalid_input());
|
||||
continue;
|
||||
}
|
||||
});
|
||||
|
|
@ -509,9 +563,9 @@ fn get_version_kind(state: &State) -> Result<Option<VersionKind>> {
|
|||
.map(|v| v.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
println!("Latest releases: {releases_str}");
|
||||
println!("{}", state.tr.launcher_latest_releases(releases_str));
|
||||
|
||||
println!("Enter the version you want to install:");
|
||||
println!("{}", state.tr.launcher_enter_the_version_you_want());
|
||||
print!("> ");
|
||||
let _ = stdout().flush();
|
||||
|
||||
|
|
@ -535,29 +589,38 @@ fn get_version_kind(state: &State) -> Result<Option<VersionKind>> {
|
|||
Ok(Some(version_kind))
|
||||
}
|
||||
(None, true) => {
|
||||
println!("Versions before 2.1.50 can't be installed.");
|
||||
println!("{}", state.tr.launcher_versions_before_cant_be_installed());
|
||||
Ok(None)
|
||||
}
|
||||
_ => {
|
||||
println!("Invalid version.\n");
|
||||
println!("{}\n", state.tr.launcher_invalid_version());
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn with_only_latest_patch(versions: &[String]) -> Vec<String> {
|
||||
// Only show the latest patch release for a given (major, minor)
|
||||
// Assumes versions are sorted in descending order (newest first)
|
||||
// Only show the latest patch release for a given (major, minor),
|
||||
// and exclude pre-releases if a newer major_minor exists
|
||||
let mut seen_major_minor = std::collections::HashSet::new();
|
||||
versions
|
||||
.iter()
|
||||
.filter(|v| {
|
||||
let (major, minor, _, _) = parse_version_for_filtering(v);
|
||||
let (major, minor, _, is_prerelease) = parse_version_for_filtering(v);
|
||||
if major == 2 {
|
||||
return true;
|
||||
}
|
||||
let major_minor = (major, minor);
|
||||
if seen_major_minor.contains(&major_minor) {
|
||||
false
|
||||
} else if is_prerelease
|
||||
&& seen_major_minor
|
||||
.iter()
|
||||
.any(|&(seen_major, seen_minor)| (seen_major, seen_minor) > (major, minor))
|
||||
{
|
||||
// Exclude pre-release if a newer major_minor exists
|
||||
false
|
||||
} else {
|
||||
seen_major_minor.insert(major_minor);
|
||||
true
|
||||
|
|
@ -658,9 +721,8 @@ fn filter_and_normalize_versions(
|
|||
fn fetch_versions(state: &State) -> Result<Vec<String>> {
|
||||
let versions_script = state.resources_dir.join("versions.py");
|
||||
|
||||
let mut cmd = Command::new(&state.uv_path);
|
||||
cmd.current_dir(&state.uv_install_root)
|
||||
.args(["run", "--no-project", "--no-config", "--managed-python"])
|
||||
let mut cmd = uv_command(state)?;
|
||||
cmd.args(["run", "--no-project", "--no-config", "--managed-python"])
|
||||
.args(["--with", "pip-system-certs,requests[socks]"]);
|
||||
|
||||
let python_version = read_file(&state.dist_python_version_path)?;
|
||||
|
|
@ -676,7 +738,7 @@ fn fetch_versions(state: &State) -> Result<Vec<String>> {
|
|||
let output = match cmd.utf8_output() {
|
||||
Ok(output) => output,
|
||||
Err(e) => {
|
||||
print!("Unable to check for Anki versions. Please check your internet connection.\n\n");
|
||||
print!("{}\n\n", state.tr.launcher_unable_to_check_for_versions());
|
||||
return Err(e.into());
|
||||
}
|
||||
};
|
||||
|
|
@ -685,7 +747,7 @@ fn fetch_versions(state: &State) -> Result<Vec<String>> {
|
|||
}
|
||||
|
||||
fn get_releases(state: &State) -> Result<Releases> {
|
||||
println!("Checking for updates...");
|
||||
println!("{}", state.tr.launcher_checking_for_updates());
|
||||
let include_prereleases = state.prerelease_marker.exists();
|
||||
let all_versions = fetch_versions(state)?;
|
||||
let all_versions = filter_and_normalize_versions(all_versions, include_prereleases);
|
||||
|
|
@ -726,9 +788,20 @@ fn apply_version_kind(version_kind: &VersionKind, state: &State) -> Result<()> {
|
|||
),
|
||||
};
|
||||
|
||||
// Add mirror configuration if enabled
|
||||
let final_content = if let Some((python_mirror, pypi_mirror)) = get_mirror_urls(state)? {
|
||||
format!("{updated_content}\n\n[[tool.uv.index]]\nname = \"mirror\"\nurl = \"{pypi_mirror}\"\ndefault = true\n\n[tool.uv]\npython-install-mirror = \"{python_mirror}\"\n")
|
||||
let final_content = if state.system_qt {
|
||||
format!(
|
||||
concat!(
|
||||
"{}\n\n[tool.uv]\n",
|
||||
"override-dependencies = [\n",
|
||||
" \"pyqt6; sys_platform=='never'\",\n",
|
||||
" \"pyqt6-qt6; sys_platform=='never'\",\n",
|
||||
" \"pyqt6-webengine; sys_platform=='never'\",\n",
|
||||
" \"pyqt6-webengine-qt6; sys_platform=='never'\",\n",
|
||||
" \"pyqt6_sip; sys_platform=='never'\"\n",
|
||||
"]\n"
|
||||
),
|
||||
updated_content
|
||||
)
|
||||
} else {
|
||||
updated_content
|
||||
};
|
||||
|
|
@ -876,7 +949,7 @@ fn get_anki_addons21_path() -> Result<std::path::PathBuf> {
|
|||
}
|
||||
|
||||
fn handle_uninstall(state: &State) -> Result<bool> {
|
||||
println!("Uninstall Anki's program files? (y/n)");
|
||||
println!("{}", state.tr.launcher_uninstall_confirm());
|
||||
print!("> ");
|
||||
let _ = stdout().flush();
|
||||
|
||||
|
|
@ -885,7 +958,7 @@ fn handle_uninstall(state: &State) -> Result<bool> {
|
|||
let input = input.trim().to_lowercase();
|
||||
|
||||
if input != "y" {
|
||||
println!("Uninstall cancelled.");
|
||||
println!("{}", state.tr.launcher_uninstall_cancelled());
|
||||
println!();
|
||||
return Ok(false);
|
||||
}
|
||||
|
|
@ -893,11 +966,11 @@ fn handle_uninstall(state: &State) -> Result<bool> {
|
|||
// Remove program files
|
||||
if state.uv_install_root.exists() {
|
||||
anki_io::remove_dir_all(&state.uv_install_root)?;
|
||||
println!("Program files removed.");
|
||||
println!("{}", state.tr.launcher_program_files_removed());
|
||||
}
|
||||
|
||||
println!();
|
||||
println!("Remove all profiles/cards? (y/n)");
|
||||
println!("{}", state.tr.launcher_remove_all_profiles_confirm());
|
||||
print!("> ");
|
||||
let _ = stdout().flush();
|
||||
|
||||
|
|
@ -907,7 +980,7 @@ fn handle_uninstall(state: &State) -> Result<bool> {
|
|||
|
||||
if input == "y" && state.anki_base_folder.exists() {
|
||||
anki_io::remove_dir_all(&state.anki_base_folder)?;
|
||||
println!("User data removed.");
|
||||
println!("{}", state.tr.launcher_user_data_removed());
|
||||
}
|
||||
|
||||
println!();
|
||||
|
|
@ -925,12 +998,37 @@ fn handle_uninstall(state: &State) -> Result<bool> {
|
|||
Ok(true)
|
||||
}
|
||||
|
||||
fn have_developer_tools() -> bool {
|
||||
Command::new("xcode-select")
|
||||
.args(["-p"])
|
||||
.output()
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false)
|
||||
fn uv_command(state: &State) -> Result<Command> {
|
||||
let mut command = Command::new(&state.uv_path);
|
||||
command.current_dir(&state.uv_install_root);
|
||||
|
||||
// remove UV_* environment variables to avoid interference
|
||||
for (key, _) in std::env::vars() {
|
||||
if key.starts_with("UV_") {
|
||||
command.env_remove(key);
|
||||
}
|
||||
}
|
||||
command
|
||||
.env_remove("VIRTUAL_ENV")
|
||||
.env_remove("SSLKEYLOGFILE");
|
||||
|
||||
// Add mirror environment variable if enabled
|
||||
if let Some((python_mirror, pypi_mirror)) = get_mirror_urls(state)? {
|
||||
command
|
||||
.env("UV_PYTHON_INSTALL_MIRROR", &python_mirror)
|
||||
.env("UV_DEFAULT_INDEX", &pypi_mirror);
|
||||
}
|
||||
|
||||
if state.no_cache_marker.exists() {
|
||||
command.env("UV_NO_CACHE", "1");
|
||||
} else {
|
||||
command.env("UV_CACHE_DIR", &state.uv_cache_dir);
|
||||
}
|
||||
|
||||
// have uv use the system certstore instead of webpki-roots'
|
||||
command.env("UV_NATIVE_TLS", "1");
|
||||
|
||||
Ok(command)
|
||||
}
|
||||
|
||||
fn build_python_command(state: &State, args: &[String]) -> Result<Command> {
|
||||
|
|
@ -955,6 +1053,7 @@ fn build_python_command(state: &State, args: &[String]) -> Result<Command> {
|
|||
// Set UV and Python paths for the Python code
|
||||
cmd.env("ANKI_LAUNCHER_UV", state.uv_path.utf8()?.as_str());
|
||||
cmd.env("UV_PROJECT", state.uv_install_root.utf8()?.as_str());
|
||||
cmd.env_remove("SSLKEYLOGFILE");
|
||||
|
||||
Ok(cmd)
|
||||
}
|
||||
|
|
@ -984,9 +1083,9 @@ fn get_mirror_urls(state: &State) -> Result<Option<(String, String)>> {
|
|||
|
||||
fn show_mirror_submenu(state: &State) -> Result<()> {
|
||||
loop {
|
||||
println!("Download mirror options:");
|
||||
println!("1) No mirror");
|
||||
println!("2) China");
|
||||
println!("{}", state.tr.launcher_download_mirror_options());
|
||||
println!("1) {}", state.tr.launcher_mirror_no_mirror());
|
||||
println!("2) {}", state.tr.launcher_mirror_china());
|
||||
print!("> ");
|
||||
let _ = stdout().flush();
|
||||
|
||||
|
|
@ -1000,14 +1099,14 @@ fn show_mirror_submenu(state: &State) -> Result<()> {
|
|||
if state.mirror_path.exists() {
|
||||
let _ = remove_file(&state.mirror_path);
|
||||
}
|
||||
println!("Mirror disabled.");
|
||||
println!("{}", state.tr.launcher_mirror_disabled());
|
||||
break;
|
||||
}
|
||||
"2" => {
|
||||
// Write China mirror URLs
|
||||
let china_mirrors = "https://registry.npmmirror.com/-/binary/python-build-standalone/\nhttps://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple/";
|
||||
write_file(&state.mirror_path, china_mirrors)?;
|
||||
println!("China mirror enabled.");
|
||||
println!("{}", state.tr.launcher_mirror_china_enabled());
|
||||
break;
|
||||
}
|
||||
"" => {
|
||||
|
|
@ -1015,7 +1114,7 @@ fn show_mirror_submenu(state: &State) -> Result<()> {
|
|||
break;
|
||||
}
|
||||
_ => {
|
||||
println!("Invalid input. Please try again.");
|
||||
println!("{}", state.tr.launcher_invalid_input());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
|
@ -1023,6 +1122,20 @@ fn show_mirror_submenu(state: &State) -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn diff_launcher_was_installed(state: &State) -> Result<bool> {
|
||||
let launcher_version = option_env!("BUILDHASH").unwrap_or("dev").trim();
|
||||
let launcher_version_path = state.uv_install_root.join("launcher-version");
|
||||
if let Ok(content) = read_file(&launcher_version_path) {
|
||||
if let Ok(version_str) = String::from_utf8(content) {
|
||||
if version_str.trim() == launcher_version {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
write_file(launcher_version_path, launcher_version)?;
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
|||
|
|
@ -62,8 +62,9 @@ pub fn prepare_for_launch_after_update(mut cmd: Command, root: &Path) -> Result<
|
|||
pub fn relaunch_in_terminal() -> Result<()> {
|
||||
let current_exe = std::env::current_exe().context("Failed to get current executable path")?;
|
||||
Command::new("open")
|
||||
.args(["-a", "Terminal"])
|
||||
.args(["-na", "Terminal"])
|
||||
.arg(current_exe)
|
||||
.env_remove("ANKI_LAUNCHER_WANT_TERMINAL")
|
||||
.ensure_spawn()?;
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -134,5 +134,8 @@ pub fn ensure_os_supported() -> Result<()> {
|
|||
#[cfg(all(unix, not(target_os = "macos")))]
|
||||
unix::ensure_glibc_supported()?;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
windows::ensure_windows_version_supported()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -38,6 +38,26 @@ fn is_windows_10() -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
/// Ensures Windows 10 version 1809 or later
|
||||
pub fn ensure_windows_version_supported() -> Result<()> {
|
||||
unsafe {
|
||||
let mut info = OSVERSIONINFOW {
|
||||
dwOSVersionInfoSize: std::mem::size_of::<OSVERSIONINFOW>() as u32,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if RtlGetVersion(&mut info).is_err() {
|
||||
anyhow::bail!("Failed to get Windows version information");
|
||||
}
|
||||
|
||||
if info.dwBuildNumber >= 17763 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
anyhow::bail!("Windows 10 version 1809 or later is required.")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ensure_terminal_shown() -> Result<()> {
|
||||
unsafe {
|
||||
if !GetConsoleWindow().is_invalid() {
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ inflections.workspace = true
|
|||
anki_io.workspace = true
|
||||
anyhow.workspace = true
|
||||
itertools.workspace = true
|
||||
regex.workspace = true
|
||||
|
||||
[dependencies]
|
||||
fluent.workspace = true
|
||||
|
|
|
|||
|
|
@ -23,10 +23,10 @@ use write_strings::write_strings;
|
|||
|
||||
fn main() -> Result<()> {
|
||||
// generate our own requirements
|
||||
let map = get_ftl_data();
|
||||
let mut map = get_ftl_data();
|
||||
check(&map);
|
||||
let modules = get_modules(&map);
|
||||
write_strings(&map, &modules);
|
||||
let mut modules = get_modules(&map);
|
||||
write_strings(&map, &modules, "strings.rs", "All");
|
||||
|
||||
typescript::write_ts_interface(&modules)?;
|
||||
python::write_py_interface(&modules)?;
|
||||
|
|
@ -41,5 +41,12 @@ fn main() -> Result<()> {
|
|||
write_file_if_changed(path, meta_json)?;
|
||||
}
|
||||
}
|
||||
|
||||
// generate strings for the launcher
|
||||
map.iter_mut()
|
||||
.for_each(|(_, modules)| modules.retain(|module, _| module == "launcher"));
|
||||
modules.retain(|module| module.name == "launcher");
|
||||
write_strings(&map, &modules, "strings_launcher.rs", "Launcher");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,15 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
// Include auto-generated content
|
||||
|
||||
#![allow(clippy::all)]
|
||||
#![allow(text_direction_codepoint_in_literal)]
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct All;
|
||||
|
||||
// Include auto-generated content
|
||||
include!(concat!(env!("OUT_DIR"), "/strings.rs"));
|
||||
|
||||
impl Translations for All {
|
||||
const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>> = &_STRINGS;
|
||||
const KEYS_BY_MODULE: &[&[&str]] = &_KEYS_BY_MODULE;
|
||||
}
|
||||
|
|
|
|||
15
rslib/i18n/src/generated_launcher.rs
Normal file
15
rslib/i18n/src/generated_launcher.rs
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
#![allow(clippy::all)]
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Launcher;
|
||||
|
||||
// Include auto-generated content
|
||||
include!(concat!(env!("OUT_DIR"), "/strings_launcher.rs"));
|
||||
|
||||
impl Translations for Launcher {
|
||||
const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>> = &_STRINGS;
|
||||
const KEYS_BY_MODULE: &[&[&str]] = &_KEYS_BY_MODULE;
|
||||
}
|
||||
|
|
@ -2,8 +2,10 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
mod generated;
|
||||
mod generated_launcher;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::marker::PhantomData;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
|
||||
|
|
@ -12,8 +14,6 @@ use fluent::FluentArgs;
|
|||
use fluent::FluentResource;
|
||||
use fluent::FluentValue;
|
||||
use fluent_bundle::bundle::FluentBundle as FluentBundleOrig;
|
||||
use generated::KEYS_BY_MODULE;
|
||||
use generated::STRINGS;
|
||||
use num_format::Locale;
|
||||
use serde::Serialize;
|
||||
use unic_langid::LanguageIdentifier;
|
||||
|
|
@ -22,6 +22,9 @@ type FluentBundle<T> = FluentBundleOrig<T, intl_memoizer::concurrent::IntlLangMe
|
|||
|
||||
pub use fluent::fluent_args as tr_args;
|
||||
|
||||
pub use crate::generated::All;
|
||||
pub use crate::generated_launcher::Launcher;
|
||||
|
||||
pub trait Number: Into<FluentNumber> {
|
||||
fn round(self) -> Self;
|
||||
}
|
||||
|
|
@ -187,20 +190,67 @@ fn get_bundle_with_extra(
|
|||
get_bundle(text, extra_text, &locales)
|
||||
}
|
||||
|
||||
pub trait Translations {
|
||||
const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>>;
|
||||
const KEYS_BY_MODULE: &[&[&str]];
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct I18n {
|
||||
pub struct I18n<P: Translations = All> {
|
||||
inner: Arc<Mutex<I18nInner>>,
|
||||
_translations_type: std::marker::PhantomData<P>,
|
||||
}
|
||||
|
||||
fn get_key(module_idx: usize, translation_idx: usize) -> &'static str {
|
||||
KEYS_BY_MODULE
|
||||
.get(module_idx)
|
||||
.and_then(|translations| translations.get(translation_idx))
|
||||
.cloned()
|
||||
.unwrap_or("invalid-module-or-translation-index")
|
||||
}
|
||||
impl<P: Translations> I18n<P> {
|
||||
fn get_key(module_idx: usize, translation_idx: usize) -> &'static str {
|
||||
P::KEYS_BY_MODULE
|
||||
.get(module_idx)
|
||||
.and_then(|translations| translations.get(translation_idx))
|
||||
.cloned()
|
||||
.unwrap_or("invalid-module-or-translation-index")
|
||||
}
|
||||
|
||||
fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec<String> {
|
||||
langs
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|lang| {
|
||||
let mut buf = String::new();
|
||||
let lang_name = remapped_lang_name(&lang);
|
||||
if let Some(strings) = P::STRINGS.get(lang_name) {
|
||||
if desired_modules.is_empty() {
|
||||
// empty list, provide all modules
|
||||
for value in strings.values() {
|
||||
buf.push_str(value)
|
||||
}
|
||||
} else {
|
||||
for module_name in desired_modules {
|
||||
if let Some(text) = strings.get(module_name.as_str()) {
|
||||
buf.push_str(text);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
buf
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// This temporarily behaves like the older code; in the future we could
|
||||
/// either access each &str separately, or load them on demand.
|
||||
fn ftl_localized_text(lang: &LanguageIdentifier) -> Option<String> {
|
||||
let lang = remapped_lang_name(lang);
|
||||
if let Some(module) = P::STRINGS.get(lang) {
|
||||
let mut text = String::new();
|
||||
for module_text in module.values() {
|
||||
text.push_str(module_text)
|
||||
}
|
||||
Some(text)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl I18n {
|
||||
pub fn template_only() -> Self {
|
||||
Self::new::<&str>(&[])
|
||||
}
|
||||
|
|
@ -225,7 +275,7 @@ impl I18n {
|
|||
let mut output_langs = vec![];
|
||||
for lang in input_langs {
|
||||
// if the language is bundled in the binary
|
||||
if let Some(text) = ftl_localized_text(&lang).or_else(|| {
|
||||
if let Some(text) = Self::ftl_localized_text(&lang).or_else(|| {
|
||||
// when testing, allow missing translations
|
||||
if cfg!(test) {
|
||||
Some(String::new())
|
||||
|
|
@ -244,7 +294,7 @@ impl I18n {
|
|||
|
||||
// add English templates
|
||||
let template_lang = "en-US".parse().unwrap();
|
||||
let template_text = ftl_localized_text(&template_lang).unwrap();
|
||||
let template_text = Self::ftl_localized_text(&template_lang).unwrap();
|
||||
let template_bundle = get_bundle_with_extra(&template_text, None).unwrap();
|
||||
bundles.push(template_bundle);
|
||||
output_langs.push(template_lang);
|
||||
|
|
@ -261,6 +311,7 @@ impl I18n {
|
|||
bundles,
|
||||
langs: output_langs,
|
||||
})),
|
||||
_translations_type: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -270,7 +321,7 @@ impl I18n {
|
|||
message_index: usize,
|
||||
args: FluentArgs,
|
||||
) -> String {
|
||||
let key = get_key(module_index, message_index);
|
||||
let key = Self::get_key(module_index, message_index);
|
||||
self.translate(key, Some(args)).into()
|
||||
}
|
||||
|
||||
|
|
@ -305,7 +356,7 @@ impl I18n {
|
|||
/// implementation.
|
||||
pub fn resources_for_js(&self, desired_modules: &[String]) -> ResourcesForJavascript {
|
||||
let inner = self.inner.lock().unwrap();
|
||||
let resources = get_modules(&inner.langs, desired_modules);
|
||||
let resources = Self::get_modules(&inner.langs, desired_modules);
|
||||
ResourcesForJavascript {
|
||||
langs: inner.langs.iter().map(ToString::to_string).collect(),
|
||||
resources,
|
||||
|
|
@ -313,47 +364,6 @@ impl I18n {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec<String> {
|
||||
langs
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|lang| {
|
||||
let mut buf = String::new();
|
||||
let lang_name = remapped_lang_name(&lang);
|
||||
if let Some(strings) = STRINGS.get(lang_name) {
|
||||
if desired_modules.is_empty() {
|
||||
// empty list, provide all modules
|
||||
for value in strings.values() {
|
||||
buf.push_str(value)
|
||||
}
|
||||
} else {
|
||||
for module_name in desired_modules {
|
||||
if let Some(text) = strings.get(module_name.as_str()) {
|
||||
buf.push_str(text);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
buf
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// This temporarily behaves like the older code; in the future we could either
|
||||
/// access each &str separately, or load them on demand.
|
||||
fn ftl_localized_text(lang: &LanguageIdentifier) -> Option<String> {
|
||||
let lang = remapped_lang_name(lang);
|
||||
if let Some(module) = STRINGS.get(lang) {
|
||||
let mut text = String::new();
|
||||
for module_text in module.values() {
|
||||
text.push_str(module_text)
|
||||
}
|
||||
Some(text)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
struct I18nInner {
|
||||
// bundles in preferred language order, with template English as the
|
||||
// last element
|
||||
|
|
@ -490,7 +500,7 @@ mod test {
|
|||
#[test]
|
||||
fn i18n() {
|
||||
// English template
|
||||
let tr = I18n::new(&["zz"]);
|
||||
let tr = I18n::<All>::new(&["zz"]);
|
||||
assert_eq!(tr.translate("valid-key", None), "a valid key");
|
||||
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
|
||||
|
||||
|
|
@ -513,7 +523,7 @@ mod test {
|
|||
);
|
||||
|
||||
// Another language
|
||||
let tr = I18n::new(&["ja_JP"]);
|
||||
let tr = I18n::<All>::new(&["ja_JP"]);
|
||||
assert_eq!(tr.translate("valid-key", None), "キー");
|
||||
assert_eq!(tr.translate("only-in-english", None), "not translated");
|
||||
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
|
||||
|
|
@ -524,7 +534,7 @@ mod test {
|
|||
);
|
||||
|
||||
// Decimal separator
|
||||
let tr = I18n::new(&["pl-PL"]);
|
||||
let tr = I18n::<All>::new(&["pl-PL"]);
|
||||
// Polish will use a comma if the string is translated
|
||||
assert_eq!(
|
||||
tr.translate("one-arg-key", Some(tr_args!["one"=>2.07])),
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ use crate::extract::VariableKind;
|
|||
use crate::gather::TranslationsByFile;
|
||||
use crate::gather::TranslationsByLang;
|
||||
|
||||
pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
|
||||
pub fn write_strings(map: &TranslationsByLang, modules: &[Module], out_fn: &str, tag: &str) {
|
||||
let mut buf = String::new();
|
||||
|
||||
// lang->module map
|
||||
|
|
@ -25,23 +25,25 @@ pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
|
|||
// ordered list of translations by module
|
||||
write_translation_key_index(modules, &mut buf);
|
||||
// methods to generate messages
|
||||
write_methods(modules, &mut buf);
|
||||
write_methods(modules, &mut buf, tag);
|
||||
|
||||
let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
|
||||
let path = dir.join("strings.rs");
|
||||
let path = dir.join(out_fn);
|
||||
fs::write(path, buf).unwrap();
|
||||
}
|
||||
|
||||
fn write_methods(modules: &[Module], buf: &mut String) {
|
||||
fn write_methods(modules: &[Module], buf: &mut String, tag: &str) {
|
||||
buf.push_str(
|
||||
r#"
|
||||
use crate::{I18n,Number};
|
||||
#[allow(unused_imports)]
|
||||
use crate::{I18n,Number,Translations};
|
||||
#[allow(unused_imports)]
|
||||
use fluent::{FluentValue, FluentArgs};
|
||||
use std::borrow::Cow;
|
||||
|
||||
impl I18n {
|
||||
"#,
|
||||
);
|
||||
writeln!(buf, "impl I18n<{tag}> {{").unwrap();
|
||||
for module in modules {
|
||||
for translation in &module.translations {
|
||||
let func = translation.key.to_snake_case();
|
||||
|
|
@ -142,7 +144,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) {
|
|||
|
||||
writeln!(
|
||||
buf,
|
||||
"pub(crate) const KEYS_BY_MODULE: [&[&str]; {count}] = [",
|
||||
"pub(crate) const _KEYS_BY_MODULE: [&[&str]; {count}] = [",
|
||||
count = modules.len(),
|
||||
)
|
||||
.unwrap();
|
||||
|
|
@ -162,7 +164,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) {
|
|||
fn write_lang_map(map: &TranslationsByLang, buf: &mut String) {
|
||||
buf.push_str(
|
||||
"
|
||||
pub(crate) const STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! {
|
||||
pub(crate) const _STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! {
|
||||
",
|
||||
);
|
||||
|
||||
|
|
@ -195,12 +197,30 @@ pub(crate) const {lang_name}: phf::Map<&str, &str> = phf::phf_map! {{",
|
|||
.unwrap();
|
||||
|
||||
for (module, contents) in modules {
|
||||
writeln!(buf, r###" "{module}" => r##"{contents}"##,"###).unwrap();
|
||||
let escaped_contents = escape_unicode_control_chars(contents);
|
||||
writeln!(
|
||||
buf,
|
||||
r###" "{module}" => r##"{escaped_contents}"##,"###
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
buf.push_str("};\n");
|
||||
}
|
||||
|
||||
fn escape_unicode_control_chars(input: &str) -> String {
|
||||
use regex::Regex;
|
||||
|
||||
static RE: std::sync::OnceLock<Regex> = std::sync::OnceLock::new();
|
||||
let re = RE.get_or_init(|| Regex::new(r"[\u{202a}-\u{202e}\u{2066}-\u{2069}]").unwrap());
|
||||
|
||||
re.replace_all(input, |caps: ®ex::Captures| {
|
||||
let c = caps.get(0).unwrap().as_str().chars().next().unwrap();
|
||||
format!("\\u{{{:04x}}}", c as u32)
|
||||
})
|
||||
.into_owned()
|
||||
}
|
||||
|
||||
fn lang_constant_name(lang: &str) -> String {
|
||||
lang.to_ascii_uppercase().replace('-', "_")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -42,14 +42,14 @@ enum CheckableUrl {
|
|||
}
|
||||
|
||||
impl CheckableUrl {
|
||||
fn url(&self) -> Cow<str> {
|
||||
fn url(&self) -> Cow<'_, str> {
|
||||
match *self {
|
||||
Self::HelpPage(page) => help_page_to_link(page).into(),
|
||||
Self::String(s) => s.into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn anchor(&self) -> Cow<str> {
|
||||
fn anchor(&self) -> Cow<'_, str> {
|
||||
match *self {
|
||||
Self::HelpPage(page) => help_page_link_suffix(page).into(),
|
||||
Self::String(s) => s.split('#').next_back().unwrap_or_default().into(),
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ pub(crate) fn write_python_interface(services: &[BackendService]) -> Result<()>
|
|||
write_header(&mut out)?;
|
||||
|
||||
for service in services {
|
||||
if service.name == "BackendAnkidroidService" {
|
||||
if ["BackendAnkidroidService", "BackendFrontendService"].contains(&service.name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
for method in service.all_methods() {
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ impl BackendCollectionService for Backend {
|
|||
}
|
||||
|
||||
impl Backend {
|
||||
pub(super) fn lock_open_collection(&self) -> Result<MutexGuard<Option<Collection>>> {
|
||||
pub(super) fn lock_open_collection(&self) -> Result<MutexGuard<'_, Option<Collection>>> {
|
||||
let guard = self.col.lock().unwrap();
|
||||
guard
|
||||
.is_some()
|
||||
|
|
@ -102,7 +102,7 @@ impl Backend {
|
|||
.ok_or(AnkiError::CollectionNotOpen)
|
||||
}
|
||||
|
||||
pub(super) fn lock_closed_collection(&self) -> Result<MutexGuard<Option<Collection>>> {
|
||||
pub(super) fn lock_closed_collection(&self) -> Result<MutexGuard<'_, Option<Collection>>> {
|
||||
let guard = self.col.lock().unwrap();
|
||||
guard
|
||||
.is_none()
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ pub fn prettify_av_tags<S: Into<String> + AsRef<str>>(txt: S) -> String {
|
|||
|
||||
/// Parse `txt` into [CardNodes] and return the result,
|
||||
/// or [None] if it only contains text nodes.
|
||||
fn nodes_or_text_only(txt: &str) -> Option<CardNodes> {
|
||||
fn nodes_or_text_only(txt: &str) -> Option<CardNodes<'_>> {
|
||||
let nodes = CardNodes::parse(txt);
|
||||
(!nodes.text_only).then_some(nodes)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -103,13 +103,13 @@ fn is_not0<'parser, 'arr: 'parser, 's: 'parser>(
|
|||
move |s| alt((is_not(arr), success(""))).parse(s)
|
||||
}
|
||||
|
||||
fn node(s: &str) -> IResult<Node> {
|
||||
fn node(s: &str) -> IResult<'_, Node<'_>> {
|
||||
alt((sound_node, tag_node, text_node)).parse(s)
|
||||
}
|
||||
|
||||
/// A sound tag `[sound:resource]`, where `resource` is pointing to a sound or
|
||||
/// video file.
|
||||
fn sound_node(s: &str) -> IResult<Node> {
|
||||
fn sound_node(s: &str) -> IResult<'_, Node<'_>> {
|
||||
map(
|
||||
delimited(tag("[sound:"), is_not("]"), tag("]")),
|
||||
Node::SoundOrVideo,
|
||||
|
|
@ -117,7 +117,7 @@ fn sound_node(s: &str) -> IResult<Node> {
|
|||
.parse(s)
|
||||
}
|
||||
|
||||
fn take_till_potential_tag_start(s: &str) -> IResult<&str> {
|
||||
fn take_till_potential_tag_start(s: &str) -> IResult<'_, &str> {
|
||||
// first char could be '[', but wasn't part of a node, so skip (eof ends parse)
|
||||
let (after, offset) = anychar(s).map(|(s, c)| (s, c.len_utf8()))?;
|
||||
Ok(match after.find('[') {
|
||||
|
|
@ -127,9 +127,9 @@ fn take_till_potential_tag_start(s: &str) -> IResult<&str> {
|
|||
}
|
||||
|
||||
/// An Anki tag `[anki:tag...]...[/anki:tag]`.
|
||||
fn tag_node(s: &str) -> IResult<Node> {
|
||||
fn tag_node(s: &str) -> IResult<'_, Node<'_>> {
|
||||
/// Match the start of an opening tag and return its name.
|
||||
fn name(s: &str) -> IResult<&str> {
|
||||
fn name(s: &str) -> IResult<'_, &str> {
|
||||
preceded(tag("[anki:"), is_not("] \t\r\n")).parse(s)
|
||||
}
|
||||
|
||||
|
|
@ -139,12 +139,12 @@ fn tag_node(s: &str) -> IResult<Node> {
|
|||
) -> impl FnMut(&'s str) -> IResult<'s, Vec<(&'s str, &'s str)>> + 'name {
|
||||
/// List of whitespace-separated `key=val` tuples, where `val` may be
|
||||
/// empty.
|
||||
fn options(s: &str) -> IResult<Vec<(&str, &str)>> {
|
||||
fn key(s: &str) -> IResult<&str> {
|
||||
fn options(s: &str) -> IResult<'_, Vec<(&str, &str)>> {
|
||||
fn key(s: &str) -> IResult<'_, &str> {
|
||||
is_not("] \t\r\n=").parse(s)
|
||||
}
|
||||
|
||||
fn val(s: &str) -> IResult<&str> {
|
||||
fn val(s: &str) -> IResult<'_, &str> {
|
||||
alt((
|
||||
delimited(tag("\""), is_not0("\""), tag("\"")),
|
||||
is_not0("] \t\r\n\""),
|
||||
|
|
@ -197,7 +197,7 @@ fn tag_node(s: &str) -> IResult<Node> {
|
|||
.parse(s)
|
||||
}
|
||||
|
||||
fn text_node(s: &str) -> IResult<Node> {
|
||||
fn text_node(s: &str) -> IResult<'_, Node<'_>> {
|
||||
map(take_till_potential_tag_start, Node::Text).parse(s)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ use std::sync::LazyLock;
|
|||
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion;
|
||||
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape;
|
||||
use htmlescape::encode_attribute;
|
||||
use itertools::Itertools;
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::bytes::complete::take_while;
|
||||
|
|
@ -26,7 +27,7 @@ use crate::template::RenderContext;
|
|||
use crate::text::strip_html_preserving_entities;
|
||||
|
||||
static CLOZE: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"(?s)\{\{c\d+::(.*?)(::.*?)?\}\}").unwrap());
|
||||
LazyLock::new(|| Regex::new(r"(?s)\{\{c[\d,]+::(.*?)(::.*?)?\}\}").unwrap());
|
||||
|
||||
static MATHJAX: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
|
|
@ -48,39 +49,42 @@ mod mathjax_caps {
|
|||
#[derive(Debug)]
|
||||
enum Token<'a> {
|
||||
// The parameter is the cloze number as is appears in the field content.
|
||||
OpenCloze(u16),
|
||||
OpenCloze(Vec<u16>),
|
||||
Text(&'a str),
|
||||
CloseCloze,
|
||||
}
|
||||
|
||||
/// Tokenize string
|
||||
fn tokenize(mut text: &str) -> impl Iterator<Item = Token> {
|
||||
fn open_cloze(text: &str) -> IResult<&str, Token> {
|
||||
fn tokenize(mut text: &str) -> impl Iterator<Item = Token<'_>> {
|
||||
fn open_cloze(text: &str) -> IResult<&str, Token<'_>> {
|
||||
// opening brackets and 'c'
|
||||
let (text, _opening_brackets_and_c) = tag("{{c")(text)?;
|
||||
// following number
|
||||
let (text, digits) = take_while(|c: char| c.is_ascii_digit())(text)?;
|
||||
let digits: u16 = match digits.parse() {
|
||||
Ok(digits) => digits,
|
||||
Err(_) => {
|
||||
// not a valid number; fail to recognize
|
||||
return Err(nom::Err::Error(nom::error::make_error(
|
||||
text,
|
||||
nom::error::ErrorKind::Digit,
|
||||
)));
|
||||
}
|
||||
};
|
||||
// following comma-seperated numbers
|
||||
let (text, ordinals) = take_while(|c: char| c.is_ascii_digit() || c == ',')(text)?;
|
||||
let ordinals: Vec<u16> = ordinals
|
||||
.split(',')
|
||||
.filter_map(|s| s.parse().ok())
|
||||
.collect::<HashSet<_>>() // deduplicate
|
||||
.into_iter()
|
||||
.sorted() // set conversion can de-order
|
||||
.collect();
|
||||
if ordinals.is_empty() {
|
||||
return Err(nom::Err::Error(nom::error::make_error(
|
||||
text,
|
||||
nom::error::ErrorKind::Digit,
|
||||
)));
|
||||
}
|
||||
// ::
|
||||
let (text, _colons) = tag("::")(text)?;
|
||||
Ok((text, Token::OpenCloze(digits)))
|
||||
Ok((text, Token::OpenCloze(ordinals)))
|
||||
}
|
||||
|
||||
fn close_cloze(text: &str) -> IResult<&str, Token> {
|
||||
fn close_cloze(text: &str) -> IResult<&str, Token<'_>> {
|
||||
map(tag("}}"), |_| Token::CloseCloze).parse(text)
|
||||
}
|
||||
|
||||
/// Match a run of text until an open/close marker is encountered.
|
||||
fn normal_text(text: &str) -> IResult<&str, Token> {
|
||||
fn normal_text(text: &str) -> IResult<&str, Token<'_>> {
|
||||
if text.is_empty() {
|
||||
return Err(nom::Err::Error(nom::error::make_error(
|
||||
text,
|
||||
|
|
@ -121,18 +125,27 @@ enum TextOrCloze<'a> {
|
|||
#[derive(Debug)]
|
||||
struct ExtractedCloze<'a> {
|
||||
// `ordinal` is the cloze number as is appears in the field content.
|
||||
ordinal: u16,
|
||||
ordinals: Vec<u16>,
|
||||
nodes: Vec<TextOrCloze<'a>>,
|
||||
hint: Option<&'a str>,
|
||||
}
|
||||
|
||||
/// Generate a string representation of the ordinals for HTML
|
||||
fn ordinals_str(ordinals: &[u16]) -> String {
|
||||
ordinals
|
||||
.iter()
|
||||
.map(|o| o.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(",")
|
||||
}
|
||||
|
||||
impl ExtractedCloze<'_> {
|
||||
/// Return the cloze's hint, or "..." if none was provided.
|
||||
fn hint(&self) -> &str {
|
||||
self.hint.unwrap_or("...")
|
||||
}
|
||||
|
||||
fn clozed_text(&self) -> Cow<str> {
|
||||
fn clozed_text(&self) -> Cow<'_, str> {
|
||||
// happy efficient path?
|
||||
if self.nodes.len() == 1 {
|
||||
if let TextOrCloze::Text(text) = self.nodes.last().unwrap() {
|
||||
|
|
@ -151,6 +164,11 @@ impl ExtractedCloze<'_> {
|
|||
buf.into()
|
||||
}
|
||||
|
||||
/// Checks if this cloze is active for a given ordinal
|
||||
fn contains_ordinal(&self, ordinal: u16) -> bool {
|
||||
self.ordinals.contains(&ordinal)
|
||||
}
|
||||
|
||||
/// If cloze starts with image-occlusion:, return the text following that.
|
||||
fn image_occlusion(&self) -> Option<&str> {
|
||||
let TextOrCloze::Text(text) = self.nodes.first()? else {
|
||||
|
|
@ -165,10 +183,10 @@ fn parse_text_with_clozes(text: &str) -> Vec<TextOrCloze<'_>> {
|
|||
let mut output = vec![];
|
||||
for token in tokenize(text) {
|
||||
match token {
|
||||
Token::OpenCloze(ordinal) => {
|
||||
Token::OpenCloze(ordinals) => {
|
||||
if open_clozes.len() < 10 {
|
||||
open_clozes.push(ExtractedCloze {
|
||||
ordinal,
|
||||
ordinals,
|
||||
nodes: Vec::with_capacity(1), // common case
|
||||
hint: None,
|
||||
})
|
||||
|
|
@ -214,7 +232,7 @@ fn reveal_cloze_text_in_nodes(
|
|||
output: &mut Vec<String>,
|
||||
) {
|
||||
if let TextOrCloze::Cloze(cloze) = node {
|
||||
if cloze.ordinal == cloze_ord {
|
||||
if cloze.contains_ordinal(cloze_ord) {
|
||||
if question {
|
||||
output.push(cloze.hint().into())
|
||||
} else {
|
||||
|
|
@ -234,14 +252,15 @@ fn reveal_cloze(
|
|||
active_cloze_found_in_text: &mut bool,
|
||||
buf: &mut String,
|
||||
) {
|
||||
let active = cloze.ordinal == cloze_ord;
|
||||
let active = cloze.contains_ordinal(cloze_ord);
|
||||
*active_cloze_found_in_text |= active;
|
||||
|
||||
if let Some(image_occlusion_text) = cloze.image_occlusion() {
|
||||
buf.push_str(&render_image_occlusion(
|
||||
image_occlusion_text,
|
||||
question,
|
||||
active,
|
||||
cloze.ordinal,
|
||||
&cloze.ordinals,
|
||||
));
|
||||
return;
|
||||
}
|
||||
|
|
@ -265,7 +284,7 @@ fn reveal_cloze(
|
|||
buf,
|
||||
r#"<span class="cloze" data-cloze="{}" data-ordinal="{}">[{}]</span>"#,
|
||||
encode_attribute(&content_buf),
|
||||
cloze.ordinal,
|
||||
ordinals_str(&cloze.ordinals),
|
||||
cloze.hint()
|
||||
)
|
||||
.unwrap();
|
||||
|
|
@ -274,7 +293,7 @@ fn reveal_cloze(
|
|||
write!(
|
||||
buf,
|
||||
r#"<span class="cloze" data-ordinal="{}">"#,
|
||||
cloze.ordinal
|
||||
ordinals_str(&cloze.ordinals)
|
||||
)
|
||||
.unwrap();
|
||||
for node in &cloze.nodes {
|
||||
|
|
@ -292,7 +311,7 @@ fn reveal_cloze(
|
|||
write!(
|
||||
buf,
|
||||
r#"<span class="cloze-inactive" data-ordinal="{}">"#,
|
||||
cloze.ordinal
|
||||
ordinals_str(&cloze.ordinals)
|
||||
)
|
||||
.unwrap();
|
||||
for node in &cloze.nodes {
|
||||
|
|
@ -308,23 +327,28 @@ fn reveal_cloze(
|
|||
}
|
||||
}
|
||||
|
||||
fn render_image_occlusion(text: &str, question_side: bool, active: bool, ordinal: u16) -> String {
|
||||
if (question_side && active) || ordinal == 0 {
|
||||
fn render_image_occlusion(
|
||||
text: &str,
|
||||
question_side: bool,
|
||||
active: bool,
|
||||
ordinals: &[u16],
|
||||
) -> String {
|
||||
if (question_side && active) || ordinals.contains(&0) {
|
||||
format!(
|
||||
r#"<div class="cloze" data-ordinal="{}" {}></div>"#,
|
||||
ordinal,
|
||||
ordinals_str(ordinals),
|
||||
&get_image_cloze_data(text)
|
||||
)
|
||||
} else if !active {
|
||||
format!(
|
||||
r#"<div class="cloze-inactive" data-ordinal="{}" {}></div>"#,
|
||||
ordinal,
|
||||
ordinals_str(ordinals),
|
||||
&get_image_cloze_data(text)
|
||||
)
|
||||
} else if !question_side && active {
|
||||
format!(
|
||||
r#"<div class="cloze-highlight" data-ordinal="{}" {}></div>"#,
|
||||
ordinal,
|
||||
ordinals_str(ordinals),
|
||||
&get_image_cloze_data(text)
|
||||
)
|
||||
} else {
|
||||
|
|
@ -338,7 +362,10 @@ pub fn parse_image_occlusions(text: &str) -> Vec<ImageOcclusion> {
|
|||
if let TextOrCloze::Cloze(cloze) = node {
|
||||
if cloze.image_occlusion().is_some() {
|
||||
if let Some(shape) = parse_image_cloze(cloze.image_occlusion().unwrap()) {
|
||||
occlusions.entry(cloze.ordinal).or_default().push(shape);
|
||||
// Associate this occlusion with all ordinals in this cloze
|
||||
for &ordinal in &cloze.ordinals {
|
||||
occlusions.entry(ordinal).or_default().push(shape.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -353,7 +380,7 @@ pub fn parse_image_occlusions(text: &str) -> Vec<ImageOcclusion> {
|
|||
.collect()
|
||||
}
|
||||
|
||||
pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow<str> {
|
||||
pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow<'_, str> {
|
||||
let mut buf = String::new();
|
||||
let mut active_cloze_found_in_text = false;
|
||||
for node in &parse_text_with_clozes(text) {
|
||||
|
|
@ -376,7 +403,7 @@ pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow<str>
|
|||
}
|
||||
}
|
||||
|
||||
pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow<str> {
|
||||
pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow<'_, str> {
|
||||
let mut output = Vec::new();
|
||||
for node in &parse_text_with_clozes(text) {
|
||||
reveal_cloze_text_in_nodes(node, cloze_ord, question, &mut output);
|
||||
|
|
@ -384,7 +411,7 @@ pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow
|
|||
output.join(", ").into()
|
||||
}
|
||||
|
||||
pub fn extract_cloze_for_typing(text: &str, cloze_ord: u16) -> Cow<str> {
|
||||
pub fn extract_cloze_for_typing(text: &str, cloze_ord: u16) -> Cow<'_, str> {
|
||||
let mut output = Vec::new();
|
||||
for node in &parse_text_with_clozes(text) {
|
||||
reveal_cloze_text_in_nodes(node, cloze_ord, false, &mut output);
|
||||
|
|
@ -420,7 +447,7 @@ pub fn expand_clozes_to_reveal_latex(text: &str) -> String {
|
|||
pub(crate) fn contains_cloze(text: &str) -> bool {
|
||||
parse_text_with_clozes(text)
|
||||
.iter()
|
||||
.any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinal != 0))
|
||||
.any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinals.iter().any(|&o| o != 0)))
|
||||
}
|
||||
|
||||
/// Returns the set of cloze number as they appear in the fields's content.
|
||||
|
|
@ -433,10 +460,12 @@ pub fn cloze_numbers_in_string(html: &str) -> HashSet<u16> {
|
|||
fn add_cloze_numbers_in_text_with_clozes(nodes: &[TextOrCloze], set: &mut HashSet<u16>) {
|
||||
for node in nodes {
|
||||
if let TextOrCloze::Cloze(cloze) = node {
|
||||
if cloze.ordinal != 0 {
|
||||
set.insert(cloze.ordinal);
|
||||
add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set);
|
||||
for &ordinal in &cloze.ordinals {
|
||||
if ordinal != 0 {
|
||||
set.insert(ordinal);
|
||||
}
|
||||
}
|
||||
add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -460,7 +489,7 @@ pub(crate) fn strip_clozes(text: &str) -> Cow<'_, str> {
|
|||
CLOZE.replace_all(text, "$1")
|
||||
}
|
||||
|
||||
fn strip_html_inside_mathjax(text: &str) -> Cow<str> {
|
||||
fn strip_html_inside_mathjax(text: &str) -> Cow<'_, str> {
|
||||
MATHJAX.replace_all(text, |caps: &Captures| -> String {
|
||||
format!(
|
||||
"{}{}{}",
|
||||
|
|
@ -654,4 +683,160 @@ mod test {
|
|||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multi_card_card_generation() {
|
||||
let text = "{{c1,2,3::multi}}";
|
||||
assert_eq!(
|
||||
cloze_number_in_fields(vec![text]),
|
||||
vec![1, 2, 3].into_iter().collect::<HashSet<u16>>()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multi_card_cloze_basic() {
|
||||
let text = "{{c1,2::shared}} word and {{c1::first}} vs {{c2::second}}";
|
||||
|
||||
assert_eq!(
|
||||
strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
|
||||
"[...] word and [...] vs second"
|
||||
);
|
||||
assert_eq!(
|
||||
strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
|
||||
"[...] word and first vs [...]"
|
||||
);
|
||||
assert_eq!(
|
||||
strip_html(&reveal_cloze_text(text, 1, false)).as_ref(),
|
||||
"shared word and first vs second"
|
||||
);
|
||||
assert_eq!(
|
||||
strip_html(&reveal_cloze_text(text, 2, false)).as_ref(),
|
||||
"shared word and first vs second"
|
||||
);
|
||||
assert_eq!(
|
||||
cloze_numbers_in_string(text),
|
||||
vec![1, 2].into_iter().collect::<HashSet<u16>>()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multi_card_cloze_html_attributes() {
|
||||
let text = "{{c1,2,3::multi}}";
|
||||
|
||||
let card1_html = reveal_cloze_text(text, 1, true);
|
||||
assert!(card1_html.contains(r#"data-ordinal="1,2,3""#));
|
||||
|
||||
let card2_html = reveal_cloze_text(text, 2, true);
|
||||
assert!(card2_html.contains(r#"data-ordinal="1,2,3""#));
|
||||
|
||||
let card3_html = reveal_cloze_text(text, 3, true);
|
||||
assert!(card3_html.contains(r#"data-ordinal="1,2,3""#));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multi_card_cloze_with_hints() {
|
||||
let text = "{{c1,2::answer::hint}}";
|
||||
|
||||
assert_eq!(
|
||||
strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
|
||||
"[hint]"
|
||||
);
|
||||
assert_eq!(
|
||||
strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
|
||||
"[hint]"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
strip_html(&reveal_cloze_text(text, 1, false)).as_ref(),
|
||||
"answer"
|
||||
);
|
||||
assert_eq!(
|
||||
strip_html(&reveal_cloze_text(text, 2, false)).as_ref(),
|
||||
"answer"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multi_card_cloze_edge_cases() {
|
||||
assert_eq!(
|
||||
cloze_numbers_in_string("{{c1,1,2::test}}"),
|
||||
vec![1, 2].into_iter().collect::<HashSet<u16>>()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
cloze_numbers_in_string("{{c0,1,2::test}}"),
|
||||
vec![1, 2].into_iter().collect::<HashSet<u16>>()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
cloze_numbers_in_string("{{c1,,3::test}}"),
|
||||
vec![1, 3].into_iter().collect::<HashSet<u16>>()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multi_card_cloze_only_filter() {
|
||||
let text = "{{c1,2::shared}} and {{c1::first}} vs {{c2::second}}";
|
||||
|
||||
assert_eq!(reveal_cloze_text_only(text, 1, true), "..., ...");
|
||||
assert_eq!(reveal_cloze_text_only(text, 2, true), "..., ...");
|
||||
assert_eq!(reveal_cloze_text_only(text, 1, false), "shared, first");
|
||||
assert_eq!(reveal_cloze_text_only(text, 2, false), "shared, second");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multi_card_nested_cloze() {
|
||||
let text = "{{c1,2::outer {{c3::inner}}}}";
|
||||
|
||||
assert_eq!(
|
||||
strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
|
||||
"[...]"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
|
||||
"[...]"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
strip_html(&reveal_cloze_text(text, 3, true)).as_ref(),
|
||||
"outer [...]"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
cloze_numbers_in_string(text),
|
||||
vec![1, 2, 3].into_iter().collect::<HashSet<u16>>()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_parent_child_card_same_cloze() {
|
||||
let text = "{{c1::outer {{c1::inner}}}}";
|
||||
|
||||
assert_eq!(
|
||||
strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
|
||||
"[...]"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
cloze_numbers_in_string(text),
|
||||
vec![1].into_iter().collect::<HashSet<u16>>()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multi_card_image_occlusion() {
|
||||
let text = "{{c1,2::image-occlusion:rect:left=10:top=20:width=30:height=40}}";
|
||||
|
||||
let occlusions = parse_image_occlusions(text);
|
||||
assert_eq!(occlusions.len(), 2);
|
||||
assert!(occlusions.iter().any(|o| o.ordinal == 1));
|
||||
assert!(occlusions.iter().any(|o| o.ordinal == 2));
|
||||
|
||||
let card1_html = reveal_cloze_text(text, 1, true);
|
||||
assert!(card1_html.contains(r#"data-ordinal="1,2""#));
|
||||
|
||||
let card2_html = reveal_cloze_text(text, 2, true);
|
||||
assert!(card2_html.contains(r#"data-ordinal="1,2""#));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
use anki_proto::collection::GetCustomColoursResponse;
|
||||
use anki_proto::generic;
|
||||
|
||||
use crate::collection::Collection;
|
||||
use crate::config::ConfigKey;
|
||||
use crate::error;
|
||||
use crate::prelude::BoolKey;
|
||||
use crate::prelude::Op;
|
||||
|
|
@ -62,4 +64,13 @@ impl crate::services::CollectionService for Collection {
|
|||
})
|
||||
.map(Into::into)
|
||||
}
|
||||
|
||||
fn get_custom_colours(
|
||||
&mut self,
|
||||
) -> error::Result<anki_proto::collection::GetCustomColoursResponse> {
|
||||
let colours = self
|
||||
.get_config_optional(ConfigKey::CustomColorPickerPalette)
|
||||
.unwrap_or_default();
|
||||
Ok(GetCustomColoursResponse { colours })
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -71,6 +71,7 @@ pub(crate) enum ConfigKey {
|
|||
NextNewCardPosition,
|
||||
#[strum(to_string = "schedVer")]
|
||||
SchedulerVersion,
|
||||
CustomColorPickerPalette,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Serialize_repr, Deserialize_repr, Clone, Copy, Debug)]
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ impl crate::services::DeckConfigService for Collection {
|
|||
.storage
|
||||
.get_revlog_entries_for_searched_cards_in_card_order()?;
|
||||
|
||||
let config = guard.col.get_optimal_retention_parameters(revlogs)?;
|
||||
let mut config = guard.col.get_optimal_retention_parameters(revlogs)?;
|
||||
let cards = guard
|
||||
.col
|
||||
.storage
|
||||
|
|
@ -125,6 +125,8 @@ impl crate::services::DeckConfigService for Collection {
|
|||
.filter_map(|c| crate::card::Card::convert(c.clone(), days_elapsed, c.memory_state?))
|
||||
.collect::<Vec<fsrs::Card>>();
|
||||
|
||||
config.deck_size = guard.cards;
|
||||
|
||||
let costs = (70u32..=99u32)
|
||||
.into_par_iter()
|
||||
.map(|dr| {
|
||||
|
|
|
|||
|
|
@ -216,9 +216,6 @@ impl Collection {
|
|||
for deck in self.storage.get_all_decks()? {
|
||||
if let Ok(normal) = deck.normal() {
|
||||
let deck_id = deck.id;
|
||||
if let Some(desired_retention) = normal.desired_retention {
|
||||
deck_desired_retention.insert(deck_id, desired_retention);
|
||||
}
|
||||
// previous order & params
|
||||
let previous_config_id = DeckConfigId(normal.config_id);
|
||||
let previous_config = configs_before_update.get(&previous_config_id);
|
||||
|
|
@ -226,21 +223,23 @@ impl Collection {
|
|||
.map(|c| c.inner.new_card_insert_order())
|
||||
.unwrap_or_default();
|
||||
let previous_params = previous_config.map(|c| c.fsrs_params());
|
||||
let previous_retention = previous_config.map(|c| c.inner.desired_retention);
|
||||
let previous_preset_dr = previous_config.map(|c| c.inner.desired_retention);
|
||||
let previous_deck_dr = normal.desired_retention;
|
||||
let previous_dr = previous_deck_dr.or(previous_preset_dr);
|
||||
let previous_easy_days = previous_config.map(|c| &c.inner.easy_days_percentages);
|
||||
|
||||
// if a selected (sub)deck, or its old config was removed, update deck to point
|
||||
// to new config
|
||||
let current_config_id = if selected_deck_ids.contains(&deck.id)
|
||||
let (current_config_id, current_deck_dr) = if selected_deck_ids.contains(&deck.id)
|
||||
|| !configs_after_update.contains_key(&previous_config_id)
|
||||
{
|
||||
let mut updated = deck.clone();
|
||||
updated.normal_mut()?.config_id = selected_config.id.0;
|
||||
update_deck_limits(updated.normal_mut()?, &req.limits, today);
|
||||
self.update_deck_inner(&mut updated, deck, usn)?;
|
||||
selected_config.id
|
||||
(selected_config.id, updated.normal()?.desired_retention)
|
||||
} else {
|
||||
previous_config_id
|
||||
(previous_config_id, previous_deck_dr)
|
||||
};
|
||||
|
||||
// if new order differs, deck needs re-sorting
|
||||
|
|
@ -254,11 +253,12 @@ impl Collection {
|
|||
|
||||
// if params differ, memory state needs to be recomputed
|
||||
let current_params = current_config.map(|c| c.fsrs_params());
|
||||
let current_retention = current_config.map(|c| c.inner.desired_retention);
|
||||
let current_preset_dr = current_config.map(|c| c.inner.desired_retention);
|
||||
let current_dr = current_deck_dr.or(current_preset_dr);
|
||||
let current_easy_days = current_config.map(|c| &c.inner.easy_days_percentages);
|
||||
if fsrs_toggled
|
||||
|| previous_params != current_params
|
||||
|| previous_retention != current_retention
|
||||
|| previous_dr != current_dr
|
||||
|| (req.fsrs_reschedule && previous_easy_days != current_easy_days)
|
||||
{
|
||||
decks_needing_memory_recompute
|
||||
|
|
@ -266,7 +266,9 @@ impl Collection {
|
|||
.or_default()
|
||||
.push(deck_id);
|
||||
}
|
||||
|
||||
if let Some(desired_retention) = current_deck_dr {
|
||||
deck_desired_retention.insert(deck_id, desired_retention);
|
||||
}
|
||||
self.adjust_remaining_steps_in_deck(deck_id, previous_config, current_config, usn)?;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ fn invalid_char_for_deck_component(c: char) -> bool {
|
|||
c.is_ascii_control()
|
||||
}
|
||||
|
||||
fn normalized_deck_name_component(comp: &str) -> Cow<str> {
|
||||
fn normalized_deck_name_component(comp: &str) -> Cow<'_, str> {
|
||||
let mut out = normalize_to_nfc(comp);
|
||||
if out.contains(invalid_char_for_deck_component) {
|
||||
out = out.replace(invalid_char_for_deck_component, "").into();
|
||||
|
|
|
|||
|
|
@ -135,6 +135,8 @@ pub struct NormalDeckSchema11 {
|
|||
review_limit_today: Option<DayLimit>,
|
||||
#[serde(default, deserialize_with = "default_on_invalid")]
|
||||
new_limit_today: Option<DayLimit>,
|
||||
#[serde(default, deserialize_with = "default_on_invalid")]
|
||||
desired_retention: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
||||
|
|
@ -249,6 +251,7 @@ impl Default for NormalDeckSchema11 {
|
|||
new_limit: None,
|
||||
review_limit_today: None,
|
||||
new_limit_today: None,
|
||||
desired_retention: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -325,7 +328,7 @@ impl From<NormalDeckSchema11> for NormalDeck {
|
|||
new_limit: deck.new_limit,
|
||||
review_limit_today: deck.review_limit_today,
|
||||
new_limit_today: deck.new_limit_today,
|
||||
desired_retention: None,
|
||||
desired_retention: deck.desired_retention.map(|v| v as f32 / 100.0),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -367,6 +370,7 @@ impl From<Deck> for DeckSchema11 {
|
|||
new_limit: norm.new_limit,
|
||||
review_limit_today: norm.review_limit_today,
|
||||
new_limit_today: norm.new_limit_today,
|
||||
desired_retention: norm.desired_retention.map(|v| (v * 100.0) as u32),
|
||||
common: deck.into(),
|
||||
}),
|
||||
DeckKind::Filtered(ref filt) => DeckSchema11::Filtered(FilteredDeckSchema11 {
|
||||
|
|
@ -431,7 +435,8 @@ static RESERVED_DECK_KEYS: Set<&'static str> = phf_set! {
|
|||
"browserCollapsed",
|
||||
"extendRev",
|
||||
"id",
|
||||
"collapsed"
|
||||
"collapsed",
|
||||
"desiredRetention",
|
||||
};
|
||||
|
||||
impl From<&Deck> for DeckTodaySchema11 {
|
||||
|
|
|
|||
|
|
@ -231,7 +231,10 @@ fn svg_getter(notetypes: &[Notetype]) -> impl Fn(NotetypeId) -> bool {
|
|||
}
|
||||
|
||||
impl Collection {
|
||||
fn gather_notes(&mut self, search: impl TryIntoSearch) -> Result<(Vec<Note>, NoteTableGuard)> {
|
||||
fn gather_notes(
|
||||
&mut self,
|
||||
search: impl TryIntoSearch,
|
||||
) -> Result<(Vec<Note>, NoteTableGuard<'_>)> {
|
||||
let guard = self.search_notes_into_table(search)?;
|
||||
guard
|
||||
.col
|
||||
|
|
@ -240,7 +243,7 @@ impl Collection {
|
|||
.map(|notes| (notes, guard))
|
||||
}
|
||||
|
||||
fn gather_cards(&mut self) -> Result<(Vec<Card>, CardTableGuard)> {
|
||||
fn gather_cards(&mut self) -> Result<(Vec<Card>, CardTableGuard<'_>)> {
|
||||
let guard = self.search_cards_of_notes_into_table()?;
|
||||
guard
|
||||
.col
|
||||
|
|
|
|||
|
|
@ -664,7 +664,7 @@ mod test {
|
|||
self
|
||||
}
|
||||
|
||||
fn import(self, col: &mut Collection) -> NoteContext {
|
||||
fn import(self, col: &mut Collection) -> NoteContext<'_> {
|
||||
let mut progress_handler = col.new_progress_handler();
|
||||
let media_map = Box::leak(Box::new(self.media_map));
|
||||
let mut ctx = NoteContext::new(
|
||||
|
|
|
|||
|
|
@ -154,7 +154,7 @@ pub(super) fn extract_media_entries(
|
|||
}
|
||||
}
|
||||
|
||||
pub(super) fn safe_normalized_file_name(name: &str) -> Result<Cow<str>> {
|
||||
pub(super) fn safe_normalized_file_name(name: &str) -> Result<Cow<'_, str>> {
|
||||
if !filename_is_safe(name) {
|
||||
Err(AnkiError::ImportError {
|
||||
source: ImportError::Corrupt,
|
||||
|
|
|
|||
|
|
@ -147,7 +147,7 @@ fn rendered_nodes_to_str(nodes: &[RenderedNode]) -> String {
|
|||
.join("")
|
||||
}
|
||||
|
||||
fn field_to_record_field(field: &str, with_html: bool) -> Cow<str> {
|
||||
fn field_to_record_field(field: &str, with_html: bool) -> Cow<'_, str> {
|
||||
let mut text = strip_redundant_sections(field);
|
||||
if !with_html {
|
||||
text = text.map_cow(|t| html_to_text_line(t, false));
|
||||
|
|
@ -155,7 +155,7 @@ fn field_to_record_field(field: &str, with_html: bool) -> Cow<str> {
|
|||
text
|
||||
}
|
||||
|
||||
fn strip_redundant_sections(text: &str) -> Cow<str> {
|
||||
fn strip_redundant_sections(text: &str) -> Cow<'_, str> {
|
||||
static RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"(?isx)
|
||||
|
|
@ -169,7 +169,7 @@ fn strip_redundant_sections(text: &str) -> Cow<str> {
|
|||
RE.replace_all(text.as_ref(), "")
|
||||
}
|
||||
|
||||
fn strip_answer_side_question(text: &str) -> Cow<str> {
|
||||
fn strip_answer_side_question(text: &str) -> Cow<'_, str> {
|
||||
static RE: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"(?is)^.*<hr id=answer>\n*").unwrap());
|
||||
RE.replace_all(text.as_ref(), "")
|
||||
|
|
@ -251,7 +251,7 @@ impl NoteContext {
|
|||
.chain(self.tags(note))
|
||||
}
|
||||
|
||||
fn notetype_name(&self, note: &Note) -> Option<Cow<[u8]>> {
|
||||
fn notetype_name(&self, note: &Note) -> Option<Cow<'_, [u8]>> {
|
||||
self.with_notetype.then(|| {
|
||||
self.notetypes
|
||||
.get(¬e.notetype_id)
|
||||
|
|
@ -259,7 +259,7 @@ impl NoteContext {
|
|||
})
|
||||
}
|
||||
|
||||
fn deck_name(&self, note: &Note) -> Option<Cow<[u8]>> {
|
||||
fn deck_name(&self, note: &Note) -> Option<Cow<'_, [u8]>> {
|
||||
self.with_deck.then(|| {
|
||||
self.deck_ids
|
||||
.get(¬e.id)
|
||||
|
|
@ -268,7 +268,7 @@ impl NoteContext {
|
|||
})
|
||||
}
|
||||
|
||||
fn tags(&self, note: &Note) -> Option<Cow<[u8]>> {
|
||||
fn tags(&self, note: &Note) -> Option<Cow<'_, [u8]>> {
|
||||
self.with_tags
|
||||
.then(|| Cow::from(note.tags.join(" ").into_bytes()))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -511,7 +511,7 @@ impl NoteContext<'_> {
|
|||
}
|
||||
|
||||
impl Note {
|
||||
fn first_field_stripped(&self) -> Cow<str> {
|
||||
fn first_field_stripped(&self) -> Cow<'_, str> {
|
||||
strip_html_preserving_media_filenames(&self.fields()[0])
|
||||
}
|
||||
}
|
||||
|
|
@ -623,7 +623,7 @@ impl ForeignNote {
|
|||
.all(|(opt, field)| opt.as_ref().map(|s| s == field).unwrap_or(true))
|
||||
}
|
||||
|
||||
fn first_field_stripped(&self) -> Option<Cow<str>> {
|
||||
fn first_field_stripped(&self) -> Option<Cow<'_, str>> {
|
||||
self.fields
|
||||
.first()
|
||||
.and_then(|s| s.as_ref())
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ pub struct ExtractedLatex {
|
|||
pub(crate) fn extract_latex_expanding_clozes(
|
||||
text: &str,
|
||||
svg: bool,
|
||||
) -> (Cow<str>, Vec<ExtractedLatex>) {
|
||||
) -> (Cow<'_, str>, Vec<ExtractedLatex>) {
|
||||
if text.contains("{{c") {
|
||||
let expanded = expand_clozes_to_reveal_latex(text);
|
||||
let (text, extracts) = extract_latex(&expanded, svg);
|
||||
|
|
@ -60,7 +60,7 @@ pub(crate) fn extract_latex_expanding_clozes(
|
|||
|
||||
/// Extract LaTeX from the provided text.
|
||||
/// Expects cloze deletions to already be expanded.
|
||||
pub fn extract_latex(text: &str, svg: bool) -> (Cow<str>, Vec<ExtractedLatex>) {
|
||||
pub fn extract_latex(text: &str, svg: bool) -> (Cow<'_, str>, Vec<ExtractedLatex>) {
|
||||
let mut extracted = vec![];
|
||||
|
||||
let new_text = LATEX.replace_all(text, |caps: &Captures| {
|
||||
|
|
@ -84,7 +84,7 @@ pub fn extract_latex(text: &str, svg: bool) -> (Cow<str>, Vec<ExtractedLatex>) {
|
|||
(new_text, extracted)
|
||||
}
|
||||
|
||||
fn strip_html_for_latex(html: &str) -> Cow<str> {
|
||||
fn strip_html_for_latex(html: &str) -> Cow<'_, str> {
|
||||
let mut out: Cow<str> = html.into();
|
||||
if let Cow::Owned(o) = LATEX_NEWLINES.replace_all(html, "\n") {
|
||||
out = o.into();
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ fn nonbreaking_space(char: char) -> bool {
|
|||
/// - Any problem characters are removed.
|
||||
/// - Windows device names like CON and PRN have '_' appended
|
||||
/// - The filename is limited to 120 bytes.
|
||||
pub(crate) fn normalize_filename(fname: &str) -> Cow<str> {
|
||||
pub(crate) fn normalize_filename(fname: &str) -> Cow<'_, str> {
|
||||
let mut output = Cow::Borrowed(fname);
|
||||
|
||||
if !is_nfc(output.as_ref()) {
|
||||
|
|
@ -102,7 +102,7 @@ pub(crate) fn normalize_filename(fname: &str) -> Cow<str> {
|
|||
}
|
||||
|
||||
/// See normalize_filename(). This function expects NFC-normalized input.
|
||||
pub(crate) fn normalize_nfc_filename(mut fname: Cow<str>) -> Cow<str> {
|
||||
pub(crate) fn normalize_nfc_filename(mut fname: Cow<'_, str>) -> Cow<'_, str> {
|
||||
if fname.contains(disallowed_char) {
|
||||
fname = fname.replace(disallowed_char, "").into()
|
||||
}
|
||||
|
|
@ -137,7 +137,7 @@ pub(crate) fn normalize_nfc_filename(mut fname: Cow<str>) -> Cow<str> {
|
|||
/// but can be accessed as NFC. On these devices, if the filename
|
||||
/// is otherwise valid, the filename is returned as NFC.
|
||||
#[allow(clippy::collapsible_else_if)]
|
||||
pub(crate) fn filename_if_normalized(fname: &str) -> Option<Cow<str>> {
|
||||
pub(crate) fn filename_if_normalized(fname: &str) -> Option<Cow<'_, str>> {
|
||||
if cfg!(target_vendor = "apple") {
|
||||
if !is_nfc(fname) {
|
||||
let as_nfc = fname.chars().nfc().collect::<String>();
|
||||
|
|
@ -208,7 +208,7 @@ pub(crate) fn add_hash_suffix_to_file_stem(fname: &str, hash: &Sha1Hash) -> Stri
|
|||
}
|
||||
|
||||
/// If filename is longer than max_bytes, truncate it.
|
||||
fn truncate_filename(fname: &str, max_bytes: usize) -> Cow<str> {
|
||||
fn truncate_filename(fname: &str, max_bytes: usize) -> Cow<'_, str> {
|
||||
if fname.len() <= max_bytes {
|
||||
return Cow::Borrowed(fname);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ impl TryFrom<anki_proto::notes::AddNoteRequest> for AddNoteRequest {
|
|||
}
|
||||
|
||||
impl Collection {
|
||||
pub fn add_note(&mut self, note: &mut Note, did: DeckId) -> Result<OpOutput<()>> {
|
||||
pub fn add_note(&mut self, note: &mut Note, did: DeckId) -> Result<OpOutput<usize>> {
|
||||
self.transact(Op::AddNote, |col| col.add_note_inner(note, did))
|
||||
}
|
||||
|
||||
|
|
@ -372,7 +372,7 @@ impl Collection {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn add_note_inner(&mut self, note: &mut Note, did: DeckId) -> Result<()> {
|
||||
pub(crate) fn add_note_inner(&mut self, note: &mut Note, did: DeckId) -> Result<usize> {
|
||||
let nt = self
|
||||
.get_notetype(note.notetype_id)?
|
||||
.or_invalid("missing note type")?;
|
||||
|
|
@ -383,10 +383,11 @@ impl Collection {
|
|||
note.prepare_for_update(ctx.notetype, normalize_text)?;
|
||||
note.set_modified(ctx.usn);
|
||||
self.add_note_only_undoable(note)?;
|
||||
self.generate_cards_for_new_note(&ctx, note, did)?;
|
||||
let count = self.generate_cards_for_new_note(&ctx, note, did)?;
|
||||
self.set_last_deck_for_notetype(note.notetype_id, did)?;
|
||||
self.set_last_notetype_for_deck(did, note.notetype_id)?;
|
||||
self.set_current_notetype_id(note.notetype_id)
|
||||
self.set_current_notetype_id(note.notetype_id)?;
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
pub fn update_note(&mut self, note: &mut Note) -> Result<OpOutput<()>> {
|
||||
|
|
|
|||
|
|
@ -215,7 +215,7 @@ impl Collection {
|
|||
ctx: &CardGenContext<impl Deref<Target = Notetype>>,
|
||||
note: &Note,
|
||||
target_deck_id: DeckId,
|
||||
) -> Result<()> {
|
||||
) -> Result<usize> {
|
||||
self.generate_cards_for_note(
|
||||
ctx,
|
||||
note,
|
||||
|
|
@ -231,7 +231,8 @@ impl Collection {
|
|||
note: &Note,
|
||||
) -> Result<()> {
|
||||
let existing = self.storage.existing_cards_for_note(note.id)?;
|
||||
self.generate_cards_for_note(ctx, note, &existing, ctx.last_deck, &mut Default::default())
|
||||
self.generate_cards_for_note(ctx, note, &existing, ctx.last_deck, &mut Default::default())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn generate_cards_for_note(
|
||||
|
|
@ -241,12 +242,13 @@ impl Collection {
|
|||
existing: &[AlreadyGeneratedCardInfo],
|
||||
target_deck_id: Option<DeckId>,
|
||||
cache: &mut CardGenCache,
|
||||
) -> Result<()> {
|
||||
) -> Result<usize> {
|
||||
let cards = ctx.new_cards_required(note, existing, true);
|
||||
if cards.is_empty() {
|
||||
return Ok(());
|
||||
return Ok(0);
|
||||
}
|
||||
self.add_generated_cards(note.id, &cards, target_deck_id, cache)
|
||||
self.add_generated_cards(note.id, &cards, target_deck_id, cache)?;
|
||||
Ok(cards.len())
|
||||
}
|
||||
|
||||
pub(crate) fn generate_cards_for_notetype(
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ pub struct RenderCardOutput {
|
|||
|
||||
impl RenderCardOutput {
|
||||
/// The question text. This is only valid to call when partial_render=false.
|
||||
pub fn question(&self) -> Cow<str> {
|
||||
pub fn question(&self) -> Cow<'_, str> {
|
||||
match self.qnodes.as_slice() {
|
||||
[RenderedNode::Text { text }] => text.into(),
|
||||
_ => "not fully rendered".into(),
|
||||
|
|
@ -33,7 +33,7 @@ impl RenderCardOutput {
|
|||
}
|
||||
|
||||
/// The answer text. This is only valid to call when partial_render=false.
|
||||
pub fn answer(&self) -> Cow<str> {
|
||||
pub fn answer(&self) -> Cow<'_, str> {
|
||||
match self.anodes.as_slice() {
|
||||
[RenderedNode::Text { text }] => text.into(),
|
||||
_ => "not fully rendered".into(),
|
||||
|
|
|
|||
|
|
@ -122,7 +122,7 @@ pub(crate) fn basic(tr: &I18n) -> Notetype {
|
|||
|
||||
pub(crate) fn basic_typing(tr: &I18n) -> Notetype {
|
||||
let mut nt = basic(tr);
|
||||
nt.config.original_stock_kind = StockKind::BasicTyping as i32;
|
||||
nt.config.original_stock_kind = OriginalStockKind::BasicTyping as i32;
|
||||
nt.name = tr.notetypes_basic_type_answer_name().into();
|
||||
let front = tr.notetypes_front_field();
|
||||
let back = tr.notetypes_back_field();
|
||||
|
|
@ -138,7 +138,7 @@ pub(crate) fn basic_typing(tr: &I18n) -> Notetype {
|
|||
|
||||
pub(crate) fn basic_forward_reverse(tr: &I18n) -> Notetype {
|
||||
let mut nt = basic(tr);
|
||||
nt.config.original_stock_kind = StockKind::BasicAndReversed as i32;
|
||||
nt.config.original_stock_kind = OriginalStockKind::BasicAndReversed as i32;
|
||||
nt.name = tr.notetypes_basic_reversed_name().into();
|
||||
let front = tr.notetypes_front_field();
|
||||
let back = tr.notetypes_back_field();
|
||||
|
|
@ -156,7 +156,7 @@ pub(crate) fn basic_forward_reverse(tr: &I18n) -> Notetype {
|
|||
|
||||
pub(crate) fn basic_optional_reverse(tr: &I18n) -> Notetype {
|
||||
let mut nt = basic_forward_reverse(tr);
|
||||
nt.config.original_stock_kind = StockKind::BasicOptionalReversed as i32;
|
||||
nt.config.original_stock_kind = OriginalStockKind::BasicOptionalReversed as i32;
|
||||
nt.name = tr.notetypes_basic_optional_reversed_name().into();
|
||||
let addrev = tr.notetypes_add_reverse_field();
|
||||
nt.add_field(addrev.as_ref());
|
||||
|
|
|
|||
|
|
@ -85,6 +85,15 @@ impl RevlogEntry {
|
|||
.unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn last_interval_secs(&self) -> u32 {
|
||||
u32::try_from(if self.last_interval > 0 {
|
||||
self.last_interval.saturating_mul(86_400)
|
||||
} else {
|
||||
self.last_interval.saturating_mul(-1)
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
/// Returns true if this entry represents a reset operation.
|
||||
/// These entries are created when a card is reset using
|
||||
/// [`Collection::reschedule_cards_as_new`].
|
||||
|
|
|
|||
|
|
@ -443,9 +443,20 @@ impl Collection {
|
|||
.storage
|
||||
.get_deck(card.deck_id)?
|
||||
.or_not_found(card.deck_id)?;
|
||||
let config = self.home_deck_config(deck.config_id(), card.original_deck_id)?;
|
||||
let home_deck = if card.original_deck_id.0 == 0 {
|
||||
&deck
|
||||
} else {
|
||||
&self
|
||||
.storage
|
||||
.get_deck(card.original_deck_id)?
|
||||
.or_not_found(card.original_deck_id)?
|
||||
};
|
||||
let config = self
|
||||
.storage
|
||||
.get_deck_config(home_deck.config_id().or_invalid("home deck is filtered")?)?
|
||||
.unwrap_or_default();
|
||||
|
||||
let desired_retention = deck.effective_desired_retention(&config);
|
||||
let desired_retention = home_deck.effective_desired_retention(&config);
|
||||
let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs);
|
||||
let fsrs_next_states = if fsrs_enabled {
|
||||
let params = config.fsrs_params();
|
||||
|
|
|
|||
|
|
@ -13,13 +13,7 @@ impl From<FSRSError> for AnkiError {
|
|||
FSRSError::OptimalNotFound => AnkiError::FsrsUnableToDetermineDesiredRetention,
|
||||
FSRSError::Interrupted => AnkiError::Interrupted,
|
||||
FSRSError::InvalidParameters => AnkiError::FsrsParamsInvalid,
|
||||
FSRSError::InvalidInput => AnkiError::InvalidInput {
|
||||
source: InvalidInputError {
|
||||
message: "invalid params provided".to_string(),
|
||||
source: None,
|
||||
backtrace: None,
|
||||
},
|
||||
},
|
||||
FSRSError::InvalidInput => AnkiError::FsrsParamsInvalid,
|
||||
FSRSError::InvalidDeckSize => AnkiError::InvalidInput {
|
||||
source: InvalidInputError {
|
||||
message: "no cards to simulate".to_string(),
|
||||
|
|
|
|||
|
|
@ -136,6 +136,19 @@ impl Collection {
|
|||
let deckconfig_id = deck.config_id().unwrap();
|
||||
// reschedule it
|
||||
let original_interval = card.interval;
|
||||
let min_interval = |interval: u32| {
|
||||
let previous_interval =
|
||||
last_info.previous_interval.unwrap_or(0);
|
||||
if interval > previous_interval {
|
||||
// interval grew; don't allow fuzzed interval to
|
||||
// be less than previous+1
|
||||
previous_interval + 1
|
||||
} else {
|
||||
// interval shrunk; don't restrict negative fuzz
|
||||
0
|
||||
}
|
||||
.max(1)
|
||||
};
|
||||
let interval = fsrs.next_interval(
|
||||
Some(state.stability),
|
||||
desired_retention,
|
||||
|
|
@ -146,7 +159,7 @@ impl Collection {
|
|||
.and_then(|r| {
|
||||
r.find_interval(
|
||||
interval,
|
||||
1,
|
||||
min_interval(interval as u32),
|
||||
req.max_interval,
|
||||
days_elapsed as u32,
|
||||
deckconfig_id,
|
||||
|
|
@ -157,7 +170,7 @@ impl Collection {
|
|||
with_review_fuzz(
|
||||
card.get_fuzz_factor(true),
|
||||
interval,
|
||||
1,
|
||||
min_interval(interval as u32),
|
||||
req.max_interval,
|
||||
)
|
||||
});
|
||||
|
|
@ -310,6 +323,9 @@ pub(crate) struct LastRevlogInfo {
|
|||
/// reviewed the card and now, so that we can determine an accurate period
|
||||
/// when the card has subsequently been rescheduled to a different day.
|
||||
pub(crate) last_reviewed_at: Option<TimestampSecs>,
|
||||
/// The interval before the latest review. Used to prevent fuzz from going
|
||||
/// backwards when rescheduling the card
|
||||
pub(crate) previous_interval: Option<u32>,
|
||||
}
|
||||
|
||||
/// Return a map of cards to info about last review.
|
||||
|
|
@ -321,14 +337,27 @@ pub(crate) fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, L
|
|||
.into_iter()
|
||||
.for_each(|(card_id, group)| {
|
||||
let mut last_reviewed_at = None;
|
||||
let mut previous_interval = None;
|
||||
for e in group.into_iter() {
|
||||
if e.has_rating_and_affects_scheduling() {
|
||||
last_reviewed_at = Some(e.id.as_secs());
|
||||
previous_interval = if e.last_interval >= 0 && e.button_chosen > 1 {
|
||||
Some(e.last_interval as u32)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
} else if e.is_reset() {
|
||||
last_reviewed_at = None;
|
||||
previous_interval = None;
|
||||
}
|
||||
}
|
||||
out.insert(card_id, LastRevlogInfo { last_reviewed_at });
|
||||
out.insert(
|
||||
card_id,
|
||||
LastRevlogInfo {
|
||||
last_reviewed_at,
|
||||
previous_interval,
|
||||
},
|
||||
);
|
||||
});
|
||||
out
|
||||
}
|
||||
|
|
|
|||
|
|
@ -174,7 +174,7 @@ impl Collection {
|
|||
}
|
||||
}
|
||||
|
||||
let health_check_passed = if health_check {
|
||||
let health_check_passed = if health_check && input.train_set.len() > 300 {
|
||||
let fsrs = FSRS::new(None)?;
|
||||
fsrs.evaluate_with_time_series_splits(input, |_| true)
|
||||
.ok()
|
||||
|
|
@ -478,27 +478,42 @@ pub(crate) fn reviews_for_fsrs(
|
|||
}))
|
||||
.collect_vec();
|
||||
|
||||
let skip = if training { 1 } else { 0 };
|
||||
// Convert the remaining entries into separate FSRSItems, where each item
|
||||
// contains all reviews done until then.
|
||||
let items: Vec<(RevlogId, FSRSItem)> = entries
|
||||
.iter()
|
||||
.enumerate()
|
||||
.skip(skip)
|
||||
.map(|(outer_idx, entry)| {
|
||||
let reviews = entries
|
||||
.iter()
|
||||
.take(outer_idx + 1)
|
||||
.enumerate()
|
||||
.map(|(inner_idx, r)| FSRSReview {
|
||||
rating: r.button_chosen as u32,
|
||||
delta_t: delta_ts[inner_idx],
|
||||
})
|
||||
.collect();
|
||||
(entry.id, FSRSItem { reviews })
|
||||
})
|
||||
.filter(|(_, item)| !training || item.reviews.last().unwrap().delta_t > 0)
|
||||
.collect_vec();
|
||||
let items = if training {
|
||||
// Convert the remaining entries into separate FSRSItems, where each item
|
||||
// contains all reviews done until then.
|
||||
let mut items = Vec::with_capacity(entries.len());
|
||||
let mut current_reviews = Vec::with_capacity(entries.len());
|
||||
for (idx, (entry, &delta_t)) in entries.iter().zip(delta_ts.iter()).enumerate() {
|
||||
current_reviews.push(FSRSReview {
|
||||
rating: entry.button_chosen as u32,
|
||||
delta_t,
|
||||
});
|
||||
if idx >= 1 && delta_t > 0 {
|
||||
items.push((
|
||||
entry.id,
|
||||
FSRSItem {
|
||||
reviews: current_reviews.clone(),
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
items
|
||||
} else {
|
||||
// When not training, we only need the final FSRS item, which represents
|
||||
// the complete history of the card. This avoids expensive clones in a loop.
|
||||
let reviews = entries
|
||||
.iter()
|
||||
.zip(delta_ts.iter())
|
||||
.map(|(entry, &delta_t)| FSRSReview {
|
||||
rating: entry.button_chosen as u32,
|
||||
delta_t,
|
||||
})
|
||||
.collect();
|
||||
let last_entry = entries.last().unwrap();
|
||||
|
||||
vec![(last_entry.id, FSRSItem { reviews })]
|
||||
};
|
||||
|
||||
if items.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
|
@ -738,7 +753,7 @@ pub(crate) mod tests {
|
|||
],
|
||||
false,
|
||||
),
|
||||
fsrs_items!([review(0)], [review(0), review(1)])
|
||||
fsrs_items!([review(0), review(1)])
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -809,7 +824,7 @@ pub(crate) mod tests {
|
|||
// R | A X R
|
||||
assert_eq!(
|
||||
convert_ignore_before(revlogs, false, days_ago_ms(9)),
|
||||
fsrs_items!([review(0)], [review(0), review(2)])
|
||||
fsrs_items!([review(0), review(2)])
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -828,6 +843,9 @@ pub(crate) mod tests {
|
|||
assert_eq!(
|
||||
convert_ignore_before(revlogs, false, days_ago_ms(9))
|
||||
.unwrap()
|
||||
.last()
|
||||
.unwrap()
|
||||
.reviews
|
||||
.len(),
|
||||
2
|
||||
);
|
||||
|
|
@ -849,6 +867,9 @@ pub(crate) mod tests {
|
|||
assert_eq!(
|
||||
convert_ignore_before(revlogs, false, days_ago_ms(9))
|
||||
.unwrap()
|
||||
.last()
|
||||
.unwrap()
|
||||
.reviews
|
||||
.len(),
|
||||
2
|
||||
);
|
||||
|
|
|
|||
|
|
@ -115,13 +115,14 @@ impl Rescheduler {
|
|||
pub fn find_interval(
|
||||
&self,
|
||||
interval: f32,
|
||||
minimum: u32,
|
||||
maximum: u32,
|
||||
minimum_interval: u32,
|
||||
maximum_interval: u32,
|
||||
days_elapsed: u32,
|
||||
deckconfig_id: DeckConfigId,
|
||||
fuzz_seed: Option<u64>,
|
||||
) -> Option<u32> {
|
||||
let (before_days, after_days) = constrained_fuzz_bounds(interval, minimum, maximum);
|
||||
let (before_days, after_days) =
|
||||
constrained_fuzz_bounds(interval, minimum_interval, maximum_interval);
|
||||
|
||||
// Don't reschedule the card when it's overdue
|
||||
if after_days < days_elapsed {
|
||||
|
|
|
|||
|
|
@ -97,7 +97,7 @@ fn create_review_priority_fn(
|
|||
|
||||
// Interval-based ordering
|
||||
IntervalsAscending => wrap!(|c, _w| c.interval as i32),
|
||||
IntervalsDescending => wrap!(|c, _w| -(c.interval as i32)),
|
||||
IntervalsDescending => wrap!(|c, _w| (c.interval as i32).saturating_neg()),
|
||||
// Retrievability-based ordering
|
||||
RetrievabilityAscending => {
|
||||
wrap!(move |c, w| (c.retrievability(w) * 1000.0) as i32)
|
||||
|
|
|
|||
|
|
@ -61,28 +61,26 @@ impl QueueBuilder {
|
|||
}
|
||||
|
||||
fn gather_new_cards(&mut self, col: &mut Collection) -> Result<()> {
|
||||
let salt = Self::knuth_salt(self.context.timing.days_elapsed);
|
||||
match self.context.sort_options.new_gather_priority {
|
||||
NewCardGatherPriority::Deck => {
|
||||
self.gather_new_cards_by_deck(col, NewCardSorting::LowestPosition)
|
||||
}
|
||||
NewCardGatherPriority::DeckThenRandomNotes => self.gather_new_cards_by_deck(
|
||||
col,
|
||||
NewCardSorting::RandomNotes(self.context.timing.days_elapsed),
|
||||
),
|
||||
NewCardGatherPriority::DeckThenRandomNotes => {
|
||||
self.gather_new_cards_by_deck(col, NewCardSorting::RandomNotes(salt))
|
||||
}
|
||||
NewCardGatherPriority::LowestPosition => {
|
||||
self.gather_new_cards_sorted(col, NewCardSorting::LowestPosition)
|
||||
}
|
||||
NewCardGatherPriority::HighestPosition => {
|
||||
self.gather_new_cards_sorted(col, NewCardSorting::HighestPosition)
|
||||
}
|
||||
NewCardGatherPriority::RandomNotes => self.gather_new_cards_sorted(
|
||||
col,
|
||||
NewCardSorting::RandomNotes(self.context.timing.days_elapsed),
|
||||
),
|
||||
NewCardGatherPriority::RandomCards => self.gather_new_cards_sorted(
|
||||
col,
|
||||
NewCardSorting::RandomCards(self.context.timing.days_elapsed),
|
||||
),
|
||||
NewCardGatherPriority::RandomNotes => {
|
||||
self.gather_new_cards_sorted(col, NewCardSorting::RandomNotes(salt))
|
||||
}
|
||||
NewCardGatherPriority::RandomCards => {
|
||||
self.gather_new_cards_sorted(col, NewCardSorting::RandomCards(salt))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -169,4 +167,10 @@ impl QueueBuilder {
|
|||
true
|
||||
}
|
||||
}
|
||||
|
||||
// Generates a salt for use with fnvhash. Useful to increase randomness
|
||||
// when the base salt is a small integer.
|
||||
fn knuth_salt(base_salt: u32) -> u32 {
|
||||
base_salt.wrapping_mul(2654435761)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -174,7 +174,7 @@ impl LoadBalancer {
|
|||
&self,
|
||||
note_id: Option<NoteId>,
|
||||
deckconfig_id: DeckConfigId,
|
||||
) -> LoadBalancerContext {
|
||||
) -> LoadBalancerContext<'_> {
|
||||
LoadBalancerContext {
|
||||
load_balancer: self,
|
||||
note_id,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ use std::mem;
|
|||
use itertools::Itertools;
|
||||
|
||||
use super::writer::write_nodes;
|
||||
use super::FieldSearchMode;
|
||||
use super::Node;
|
||||
use super::SearchNode;
|
||||
use super::StateKind;
|
||||
|
|
@ -174,7 +175,7 @@ impl SearchNode {
|
|||
pub fn from_tag_name(name: &str) -> Self {
|
||||
Self::Tag {
|
||||
tag: escape_anki_wildcards_for_search_node(name),
|
||||
is_re: false,
|
||||
mode: FieldSearchMode::Normal,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ pub use builder::JoinSearches;
|
|||
pub use builder::Negated;
|
||||
pub use builder::SearchBuilder;
|
||||
pub use parser::parse as parse_search;
|
||||
pub use parser::FieldSearchMode;
|
||||
pub use parser::Node;
|
||||
pub use parser::PropertyKind;
|
||||
pub use parser::RatingKind;
|
||||
|
|
@ -226,7 +227,7 @@ impl Collection {
|
|||
&mut self,
|
||||
search: impl TryIntoSearch,
|
||||
mode: SortMode,
|
||||
) -> Result<CardTableGuard> {
|
||||
) -> Result<CardTableGuard<'_>> {
|
||||
let top_node = search.try_into_search()?;
|
||||
let writer = SqlWriter::new(self, ReturnItemType::Cards);
|
||||
let want_order = mode != SortMode::NoOrder;
|
||||
|
|
@ -299,7 +300,7 @@ impl Collection {
|
|||
pub(crate) fn search_notes_into_table(
|
||||
&mut self,
|
||||
search: impl TryIntoSearch,
|
||||
) -> Result<NoteTableGuard> {
|
||||
) -> Result<NoteTableGuard<'_>> {
|
||||
let top_node = search.try_into_search()?;
|
||||
let writer = SqlWriter::new(self, ReturnItemType::Notes);
|
||||
let mode = SortMode::NoOrder;
|
||||
|
|
@ -320,7 +321,7 @@ impl Collection {
|
|||
|
||||
/// Place the ids of cards with notes in 'search_nids' into 'search_cids'.
|
||||
/// Returns number of added cards.
|
||||
pub(crate) fn search_cards_of_notes_into_table(&mut self) -> Result<CardTableGuard> {
|
||||
pub(crate) fn search_cards_of_notes_into_table(&mut self) -> Result<CardTableGuard<'_>> {
|
||||
self.storage.setup_searched_cards_table()?;
|
||||
let cards = self.storage.search_cards_of_notes_into_table()?;
|
||||
Ok(CardTableGuard { cards, col: self })
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anki_proto::search::search_node::FieldSearchMode as FieldSearchModeProto;
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::escaped;
|
||||
use nom::bytes::complete::is_not;
|
||||
|
|
@ -27,7 +28,6 @@ use crate::error::ParseError;
|
|||
use crate::error::Result;
|
||||
use crate::error::SearchErrorKind as FailKind;
|
||||
use crate::prelude::*;
|
||||
|
||||
type IResult<'a, O> = std::result::Result<(&'a str, O), nom::Err<ParseError<'a>>>;
|
||||
type ParseResult<'a, O> = std::result::Result<O, nom::Err<ParseError<'a>>>;
|
||||
|
||||
|
|
@ -48,6 +48,23 @@ pub enum Node {
|
|||
Search(SearchNode),
|
||||
}
|
||||
|
||||
#[derive(Copy, Debug, PartialEq, Eq, Clone)]
|
||||
pub enum FieldSearchMode {
|
||||
Normal,
|
||||
Regex,
|
||||
NoCombining,
|
||||
}
|
||||
|
||||
impl From<FieldSearchModeProto> for FieldSearchMode {
|
||||
fn from(mode: FieldSearchModeProto) -> Self {
|
||||
match mode {
|
||||
FieldSearchModeProto::Normal => Self::Normal,
|
||||
FieldSearchModeProto::Regex => Self::Regex,
|
||||
FieldSearchModeProto::Nocombining => Self::NoCombining,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum SearchNode {
|
||||
// text without a colon
|
||||
|
|
@ -56,7 +73,7 @@ pub enum SearchNode {
|
|||
SingleField {
|
||||
field: String,
|
||||
text: String,
|
||||
is_re: bool,
|
||||
mode: FieldSearchMode,
|
||||
},
|
||||
AddedInDays(u32),
|
||||
EditedInDays(u32),
|
||||
|
|
@ -77,7 +94,7 @@ pub enum SearchNode {
|
|||
},
|
||||
Tag {
|
||||
tag: String,
|
||||
is_re: bool,
|
||||
mode: FieldSearchMode,
|
||||
},
|
||||
Duplicates {
|
||||
notetype_id: NotetypeId,
|
||||
|
|
@ -158,7 +175,7 @@ pub fn parse(input: &str) -> Result<Vec<Node>> {
|
|||
|
||||
/// Zero or more nodes inside brackets, eg 'one OR two -three'.
|
||||
/// Empty vec must be handled by caller.
|
||||
fn group_inner(input: &str) -> IResult<Vec<Node>> {
|
||||
fn group_inner(input: &str) -> IResult<'_, Vec<Node>> {
|
||||
let mut remaining = input;
|
||||
let mut nodes = vec![];
|
||||
|
||||
|
|
@ -203,16 +220,16 @@ fn group_inner(input: &str) -> IResult<Vec<Node>> {
|
|||
Ok((remaining, nodes))
|
||||
}
|
||||
|
||||
fn whitespace0(s: &str) -> IResult<Vec<char>> {
|
||||
fn whitespace0(s: &str) -> IResult<'_, Vec<char>> {
|
||||
many0(one_of(" \u{3000}")).parse(s)
|
||||
}
|
||||
|
||||
/// Optional leading space, then a (negated) group or text
|
||||
fn node(s: &str) -> IResult<Node> {
|
||||
fn node(s: &str) -> IResult<'_, Node> {
|
||||
preceded(whitespace0, alt((negated_node, group, text))).parse(s)
|
||||
}
|
||||
|
||||
fn negated_node(s: &str) -> IResult<Node> {
|
||||
fn negated_node(s: &str) -> IResult<'_, Node> {
|
||||
map(preceded(char('-'), alt((group, text))), |node| {
|
||||
Node::Not(Box::new(node))
|
||||
})
|
||||
|
|
@ -220,7 +237,7 @@ fn negated_node(s: &str) -> IResult<Node> {
|
|||
}
|
||||
|
||||
/// One or more nodes surrounded by brackets, eg (one OR two)
|
||||
fn group(s: &str) -> IResult<Node> {
|
||||
fn group(s: &str) -> IResult<'_, Node> {
|
||||
let (opened, _) = char('(')(s)?;
|
||||
let (tail, inner) = group_inner(opened)?;
|
||||
if let Some(remaining) = tail.strip_prefix(')') {
|
||||
|
|
@ -235,18 +252,18 @@ fn group(s: &str) -> IResult<Node> {
|
|||
}
|
||||
|
||||
/// Either quoted or unquoted text
|
||||
fn text(s: &str) -> IResult<Node> {
|
||||
fn text(s: &str) -> IResult<'_, Node> {
|
||||
alt((quoted_term, partially_quoted_term, unquoted_term)).parse(s)
|
||||
}
|
||||
|
||||
/// Quoted text, including the outer double quotes.
|
||||
fn quoted_term(s: &str) -> IResult<Node> {
|
||||
fn quoted_term(s: &str) -> IResult<'_, Node> {
|
||||
let (remaining, term) = quoted_term_str(s)?;
|
||||
Ok((remaining, Node::Search(search_node_for_text(term)?)))
|
||||
}
|
||||
|
||||
/// eg deck:"foo bar" - quotes must come after the :
|
||||
fn partially_quoted_term(s: &str) -> IResult<Node> {
|
||||
fn partially_quoted_term(s: &str) -> IResult<'_, Node> {
|
||||
let (remaining, (key, val)) = separated_pair(
|
||||
escaped(is_not("\"(): \u{3000}\\"), '\\', none_of(" \u{3000}")),
|
||||
char(':'),
|
||||
|
|
@ -260,7 +277,7 @@ fn partially_quoted_term(s: &str) -> IResult<Node> {
|
|||
}
|
||||
|
||||
/// Unquoted text, terminated by whitespace or unescaped ", ( or )
|
||||
fn unquoted_term(s: &str) -> IResult<Node> {
|
||||
fn unquoted_term(s: &str) -> IResult<'_, Node> {
|
||||
match escaped(is_not("\"() \u{3000}\\"), '\\', none_of(" \u{3000}"))(s) {
|
||||
Ok((tail, term)) => {
|
||||
if term.is_empty() {
|
||||
|
|
@ -297,7 +314,7 @@ fn unquoted_term(s: &str) -> IResult<Node> {
|
|||
}
|
||||
|
||||
/// Non-empty string delimited by unescaped double quotes.
|
||||
fn quoted_term_str(s: &str) -> IResult<&str> {
|
||||
fn quoted_term_str(s: &str) -> IResult<'_, &str> {
|
||||
let (opened, _) = char('"')(s)?;
|
||||
if let Ok((tail, inner)) =
|
||||
escaped::<_, ParseError, _, _>(is_not(r#""\"#), '\\', anychar).parse(opened)
|
||||
|
|
@ -321,7 +338,7 @@ fn quoted_term_str(s: &str) -> IResult<&str> {
|
|||
|
||||
/// Determine if text is a qualified search, and handle escaped chars.
|
||||
/// Expect well-formed input: unempty and no trailing \.
|
||||
fn search_node_for_text(s: &str) -> ParseResult<SearchNode> {
|
||||
fn search_node_for_text(s: &str) -> ParseResult<'_, SearchNode> {
|
||||
// leading : is only possible error for well-formed input
|
||||
let (tail, head) = verify(escaped(is_not(r":\"), '\\', anychar), |t: &str| {
|
||||
!t.is_empty()
|
||||
|
|
@ -369,21 +386,26 @@ fn search_node_for_text_with_argument<'a>(
|
|||
})
|
||||
}
|
||||
|
||||
fn parse_tag(s: &str) -> ParseResult<SearchNode> {
|
||||
fn parse_tag(s: &str) -> ParseResult<'_, SearchNode> {
|
||||
Ok(if let Some(re) = s.strip_prefix("re:") {
|
||||
SearchNode::Tag {
|
||||
tag: unescape_quotes(re),
|
||||
is_re: true,
|
||||
mode: FieldSearchMode::Regex,
|
||||
}
|
||||
} else if let Some(nc) = s.strip_prefix("nc:") {
|
||||
SearchNode::Tag {
|
||||
tag: unescape(nc)?,
|
||||
mode: FieldSearchMode::NoCombining,
|
||||
}
|
||||
} else {
|
||||
SearchNode::Tag {
|
||||
tag: unescape(s)?,
|
||||
is_re: false,
|
||||
mode: FieldSearchMode::Normal,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_template(s: &str) -> ParseResult<SearchNode> {
|
||||
fn parse_template(s: &str) -> ParseResult<'_, SearchNode> {
|
||||
Ok(SearchNode::CardTemplate(match s.parse::<u16>() {
|
||||
Ok(n) => TemplateKind::Ordinal(n.max(1) - 1),
|
||||
Err(_) => TemplateKind::Name(unescape(s)?),
|
||||
|
|
@ -391,7 +413,7 @@ fn parse_template(s: &str) -> ParseResult<SearchNode> {
|
|||
}
|
||||
|
||||
/// flag:0-7
|
||||
fn parse_flag(s: &str) -> ParseResult<SearchNode> {
|
||||
fn parse_flag(s: &str) -> ParseResult<'_, SearchNode> {
|
||||
if let Ok(flag) = s.parse::<u8>() {
|
||||
if flag > 7 {
|
||||
Err(parse_failure(s, FailKind::InvalidFlag))
|
||||
|
|
@ -404,7 +426,7 @@ fn parse_flag(s: &str) -> ParseResult<SearchNode> {
|
|||
}
|
||||
|
||||
/// eg resched:3
|
||||
fn parse_resched(s: &str) -> ParseResult<SearchNode> {
|
||||
fn parse_resched(s: &str) -> ParseResult<'_, SearchNode> {
|
||||
parse_u32(s, "resched:").map(|days| SearchNode::Rated {
|
||||
days,
|
||||
ease: RatingKind::ManualReschedule,
|
||||
|
|
@ -412,7 +434,7 @@ fn parse_resched(s: &str) -> ParseResult<SearchNode> {
|
|||
}
|
||||
|
||||
/// eg prop:ivl>3, prop:ease!=2.5
|
||||
fn parse_prop(prop_clause: &str) -> ParseResult<SearchNode> {
|
||||
fn parse_prop(prop_clause: &str) -> ParseResult<'_, SearchNode> {
|
||||
let (tail, prop) = alt((
|
||||
tag("ivl"),
|
||||
tag("due"),
|
||||
|
|
@ -580,23 +602,23 @@ fn parse_prop_rated<'a>(num: &str, context: &'a str) -> ParseResult<'a, Property
|
|||
}
|
||||
|
||||
/// eg added:1
|
||||
fn parse_added(s: &str) -> ParseResult<SearchNode> {
|
||||
fn parse_added(s: &str) -> ParseResult<'_, SearchNode> {
|
||||
parse_u32(s, "added:").map(|n| SearchNode::AddedInDays(n.max(1)))
|
||||
}
|
||||
|
||||
/// eg edited:1
|
||||
fn parse_edited(s: &str) -> ParseResult<SearchNode> {
|
||||
fn parse_edited(s: &str) -> ParseResult<'_, SearchNode> {
|
||||
parse_u32(s, "edited:").map(|n| SearchNode::EditedInDays(n.max(1)))
|
||||
}
|
||||
|
||||
/// eg introduced:1
|
||||
fn parse_introduced(s: &str) -> ParseResult<SearchNode> {
|
||||
fn parse_introduced(s: &str) -> ParseResult<'_, SearchNode> {
|
||||
parse_u32(s, "introduced:").map(|n| SearchNode::IntroducedInDays(n.max(1)))
|
||||
}
|
||||
|
||||
/// eg rated:3 or rated:10:2
|
||||
/// second arg must be between 1-4
|
||||
fn parse_rated(s: &str) -> ParseResult<SearchNode> {
|
||||
fn parse_rated(s: &str) -> ParseResult<'_, SearchNode> {
|
||||
let mut it = s.splitn(2, ':');
|
||||
let days = parse_u32(it.next().unwrap(), "rated:")?.max(1);
|
||||
let button = parse_answer_button(it.next(), s)?;
|
||||
|
|
@ -604,7 +626,7 @@ fn parse_rated(s: &str) -> ParseResult<SearchNode> {
|
|||
}
|
||||
|
||||
/// eg is:due
|
||||
fn parse_state(s: &str) -> ParseResult<SearchNode> {
|
||||
fn parse_state(s: &str) -> ParseResult<'_, SearchNode> {
|
||||
use StateKind::*;
|
||||
Ok(SearchNode::State(match s {
|
||||
"new" => New,
|
||||
|
|
@ -624,7 +646,7 @@ fn parse_state(s: &str) -> ParseResult<SearchNode> {
|
|||
}))
|
||||
}
|
||||
|
||||
fn parse_mid(s: &str) -> ParseResult<SearchNode> {
|
||||
fn parse_mid(s: &str) -> ParseResult<'_, SearchNode> {
|
||||
parse_i64(s, "mid:").map(|n| SearchNode::NotetypeId(n.into()))
|
||||
}
|
||||
|
||||
|
|
@ -646,7 +668,7 @@ fn check_id_list<'a>(s: &'a str, context: &str) -> ParseResult<'a, &'a str> {
|
|||
}
|
||||
|
||||
/// eg dupe:1231,hello
|
||||
fn parse_dupe(s: &str) -> ParseResult<SearchNode> {
|
||||
fn parse_dupe(s: &str) -> ParseResult<'_, SearchNode> {
|
||||
let mut it = s.splitn(2, ',');
|
||||
let ntid = parse_i64(it.next().unwrap(), s)?;
|
||||
if let Some(text) = it.next() {
|
||||
|
|
@ -670,13 +692,19 @@ fn parse_single_field<'a>(key: &'a str, val: &'a str) -> ParseResult<'a, SearchN
|
|||
SearchNode::SingleField {
|
||||
field: unescape(key)?,
|
||||
text: unescape_quotes(stripped),
|
||||
is_re: true,
|
||||
mode: FieldSearchMode::Regex,
|
||||
}
|
||||
} else if let Some(stripped) = val.strip_prefix("nc:") {
|
||||
SearchNode::SingleField {
|
||||
field: unescape(key)?,
|
||||
text: unescape_quotes(stripped),
|
||||
mode: FieldSearchMode::NoCombining,
|
||||
}
|
||||
} else {
|
||||
SearchNode::SingleField {
|
||||
field: unescape(key)?,
|
||||
text: unescape(val)?,
|
||||
is_re: false,
|
||||
mode: FieldSearchMode::Normal,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
@ -700,7 +728,7 @@ fn unescape_quotes_and_backslashes(s: &str) -> String {
|
|||
}
|
||||
|
||||
/// Unescape chars with special meaning to the parser.
|
||||
fn unescape(txt: &str) -> ParseResult<String> {
|
||||
fn unescape(txt: &str) -> ParseResult<'_, String> {
|
||||
if let Some(seq) = invalid_escape_sequence(txt) {
|
||||
Err(parse_failure(
|
||||
txt,
|
||||
|
|
@ -806,7 +834,7 @@ mod test {
|
|||
Search(SingleField {
|
||||
field: "foo".into(),
|
||||
text: "bar baz".into(),
|
||||
is_re: false,
|
||||
mode: FieldSearchMode::Normal,
|
||||
})
|
||||
]))),
|
||||
Or,
|
||||
|
|
@ -819,7 +847,16 @@ mod test {
|
|||
vec![Search(SingleField {
|
||||
field: "foo".into(),
|
||||
text: "bar".into(),
|
||||
is_re: true
|
||||
mode: FieldSearchMode::Regex,
|
||||
})]
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
parse("foo:nc:bar")?,
|
||||
vec![Search(SingleField {
|
||||
field: "foo".into(),
|
||||
text: "bar".into(),
|
||||
mode: FieldSearchMode::NoCombining,
|
||||
})]
|
||||
);
|
||||
|
||||
|
|
@ -829,7 +866,7 @@ mod test {
|
|||
vec![Search(SingleField {
|
||||
field: "field".into(),
|
||||
text: "va\"lue".into(),
|
||||
is_re: false
|
||||
mode: FieldSearchMode::Normal,
|
||||
})]
|
||||
);
|
||||
assert_eq!(parse(r#""field:va\"lue""#)?, parse(r#"field:"va\"lue""#)?,);
|
||||
|
|
@ -906,14 +943,14 @@ mod test {
|
|||
parse("tag:hard")?,
|
||||
vec![Search(Tag {
|
||||
tag: "hard".into(),
|
||||
is_re: false
|
||||
mode: FieldSearchMode::Normal
|
||||
})]
|
||||
);
|
||||
assert_eq!(
|
||||
parse(r"tag:re:\\")?,
|
||||
vec![Search(Tag {
|
||||
tag: r"\\".into(),
|
||||
is_re: true
|
||||
mode: FieldSearchMode::Regex
|
||||
})]
|
||||
);
|
||||
assert_eq!(
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ use itertools::Itertools;
|
|||
|
||||
use crate::prelude::*;
|
||||
use crate::search::parse_search;
|
||||
use crate::search::FieldSearchMode;
|
||||
use crate::search::Negated;
|
||||
use crate::search::Node;
|
||||
use crate::search::PropertyKind;
|
||||
|
|
@ -40,7 +41,7 @@ impl TryFrom<anki_proto::search::SearchNode> for Node {
|
|||
Filter::FieldName(s) => Node::Search(SearchNode::SingleField {
|
||||
field: escape_anki_wildcards_for_search_node(&s),
|
||||
text: "_*".to_string(),
|
||||
is_re: false,
|
||||
mode: FieldSearchMode::Normal,
|
||||
}),
|
||||
Filter::Rated(rated) => Node::Search(SearchNode::Rated {
|
||||
days: rated.days,
|
||||
|
|
@ -107,7 +108,7 @@ impl TryFrom<anki_proto::search::SearchNode> for Node {
|
|||
Filter::Field(field) => Node::Search(SearchNode::SingleField {
|
||||
field: escape_anki_wildcards(&field.field_name),
|
||||
text: escape_anki_wildcards(&field.text),
|
||||
is_re: field.is_re,
|
||||
mode: field.mode().into(),
|
||||
}),
|
||||
Filter::LiteralText(text) => {
|
||||
let text = escape_anki_wildcards(&text);
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ use std::ops::Range;
|
|||
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::parser::FieldSearchMode;
|
||||
use super::parser::Node;
|
||||
use super::parser::PropertyKind;
|
||||
use super::parser::RatingKind;
|
||||
|
|
@ -138,8 +139,8 @@ impl SqlWriter<'_> {
|
|||
false,
|
||||
)?
|
||||
}
|
||||
SearchNode::SingleField { field, text, is_re } => {
|
||||
self.write_field(&norm(field), &self.norm_note(text), *is_re)?
|
||||
SearchNode::SingleField { field, text, mode } => {
|
||||
self.write_field(&norm(field), &self.norm_note(text), *mode)?
|
||||
}
|
||||
SearchNode::Duplicates { notetype_id, text } => {
|
||||
self.write_dupe(*notetype_id, &self.norm_note(text))?
|
||||
|
|
@ -180,7 +181,7 @@ impl SqlWriter<'_> {
|
|||
SearchNode::Notetype(notetype) => self.write_notetype(&norm(notetype)),
|
||||
SearchNode::Rated { days, ease } => self.write_rated(">", -i64::from(*days), ease)?,
|
||||
|
||||
SearchNode::Tag { tag, is_re } => self.write_tag(&norm(tag), *is_re),
|
||||
SearchNode::Tag { tag, mode } => self.write_tag(&norm(tag), *mode),
|
||||
SearchNode::State(state) => self.write_state(state)?,
|
||||
SearchNode::Flag(flag) => {
|
||||
write!(self.sql, "(c.flags & 7) == {flag}").unwrap();
|
||||
|
|
@ -296,8 +297,8 @@ impl SqlWriter<'_> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn write_tag(&mut self, tag: &str, is_re: bool) {
|
||||
if is_re {
|
||||
fn write_tag(&mut self, tag: &str, mode: FieldSearchMode) {
|
||||
if mode == FieldSearchMode::Regex {
|
||||
self.args.push(format!("(?i){tag}"));
|
||||
write!(self.sql, "regexp_tags(?{}, n.tags)", self.args.len()).unwrap();
|
||||
} else {
|
||||
|
|
@ -310,8 +311,19 @@ impl SqlWriter<'_> {
|
|||
}
|
||||
s if s.contains(' ') => write!(self.sql, "false").unwrap(),
|
||||
text => {
|
||||
write!(self.sql, "n.tags regexp ?").unwrap();
|
||||
let re = &to_custom_re(text, r"\S");
|
||||
let text = if mode == FieldSearchMode::Normal {
|
||||
write!(self.sql, "n.tags regexp ?").unwrap();
|
||||
Cow::from(text)
|
||||
} else {
|
||||
write!(
|
||||
self.sql,
|
||||
"coalesce(process_text(n.tags, {}), n.tags) regexp ?",
|
||||
ProcessTextFlags::NoCombining.bits()
|
||||
)
|
||||
.unwrap();
|
||||
without_combining(text)
|
||||
};
|
||||
let re = &to_custom_re(&text, r"\S");
|
||||
self.args.push(format!("(?i).* {re}(::| ).*"));
|
||||
}
|
||||
}
|
||||
|
|
@ -567,16 +579,18 @@ impl SqlWriter<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn write_field(&mut self, field_name: &str, val: &str, is_re: bool) -> Result<()> {
|
||||
fn write_field(&mut self, field_name: &str, val: &str, mode: FieldSearchMode) -> Result<()> {
|
||||
if matches!(field_name, "*" | "_*" | "*_") {
|
||||
if is_re {
|
||||
if mode == FieldSearchMode::Regex {
|
||||
self.write_all_fields_regexp(val);
|
||||
} else {
|
||||
self.write_all_fields(val);
|
||||
}
|
||||
Ok(())
|
||||
} else if is_re {
|
||||
} else if mode == FieldSearchMode::Regex {
|
||||
self.write_single_field_regexp(field_name, val)
|
||||
} else if mode == FieldSearchMode::NoCombining {
|
||||
self.write_single_field_nc(field_name, val)
|
||||
} else {
|
||||
self.write_single_field(field_name, val)
|
||||
}
|
||||
|
|
@ -592,6 +606,58 @@ impl SqlWriter<'_> {
|
|||
write!(self.sql, "regexp_fields(?{}, n.flds)", self.args.len()).unwrap();
|
||||
}
|
||||
|
||||
fn write_single_field_nc(&mut self, field_name: &str, val: &str) -> Result<()> {
|
||||
let field_indicies_by_notetype = self.num_fields_and_fields_indices_by_notetype(
|
||||
field_name,
|
||||
matches!(val, "*" | "_*" | "*_"),
|
||||
)?;
|
||||
if field_indicies_by_notetype.is_empty() {
|
||||
write!(self.sql, "false").unwrap();
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let val = to_sql(val);
|
||||
let val = without_combining(&val);
|
||||
self.args.push(val.into());
|
||||
let arg_idx = self.args.len();
|
||||
let field_idx_str = format!("' || ?{arg_idx} || '");
|
||||
let other_idx_str = "%".to_string();
|
||||
|
||||
let notetype_clause = |ctx: &FieldQualifiedSearchContext| -> String {
|
||||
let field_index_clause = |range: &Range<u32>| {
|
||||
let f = (0..ctx.total_fields_in_note)
|
||||
.filter_map(|i| {
|
||||
if i as u32 == range.start {
|
||||
Some(&field_idx_str)
|
||||
} else if range.contains(&(i as u32)) {
|
||||
None
|
||||
} else {
|
||||
Some(&other_idx_str)
|
||||
}
|
||||
})
|
||||
.join("\x1f");
|
||||
format!(
|
||||
"coalesce(process_text(n.flds, {}), n.flds) like '{f}' escape '\\'",
|
||||
ProcessTextFlags::NoCombining.bits()
|
||||
)
|
||||
};
|
||||
|
||||
let all_field_clauses = ctx
|
||||
.field_ranges_to_search
|
||||
.iter()
|
||||
.map(field_index_clause)
|
||||
.join(" or ");
|
||||
format!("(n.mid = {mid} and ({all_field_clauses}))", mid = ctx.ntid)
|
||||
};
|
||||
let all_notetype_clauses = field_indicies_by_notetype
|
||||
.iter()
|
||||
.map(notetype_clause)
|
||||
.join(" or ");
|
||||
write!(self.sql, "({all_notetype_clauses})").unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_single_field_regexp(&mut self, field_name: &str, val: &str) -> Result<()> {
|
||||
let field_indicies_by_notetype = self.fields_indices_by_notetype(field_name)?;
|
||||
if field_indicies_by_notetype.is_empty() {
|
||||
|
|
@ -1116,6 +1182,20 @@ mod test {
|
|||
vec!["(?i)te.*st".into()]
|
||||
)
|
||||
);
|
||||
// field search with no-combine
|
||||
assert_eq!(
|
||||
s(ctx, "front:nc:frânçais"),
|
||||
(
|
||||
concat!(
|
||||
"(((n.mid = 1581236385344 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%' escape '\\')) or ",
|
||||
"(n.mid = 1581236385345 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%\u{1f}%' escape '\\')) or ",
|
||||
"(n.mid = 1581236385346 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%' escape '\\')) or ",
|
||||
"(n.mid = 1581236385347 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%' escape '\\'))))"
|
||||
)
|
||||
.into(),
|
||||
vec!["francais".into()]
|
||||
)
|
||||
);
|
||||
// all field search
|
||||
assert_eq!(
|
||||
s(ctx, "*:te*st"),
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ use regex::Regex;
|
|||
use crate::notetype::NotetypeId as NotetypeIdType;
|
||||
use crate::prelude::*;
|
||||
use crate::search::parser::parse;
|
||||
use crate::search::parser::FieldSearchMode;
|
||||
use crate::search::parser::Node;
|
||||
use crate::search::parser::PropertyKind;
|
||||
use crate::search::parser::RatingKind;
|
||||
|
|
@ -69,7 +70,7 @@ fn write_search_node(node: &SearchNode) -> String {
|
|||
use SearchNode::*;
|
||||
match node {
|
||||
UnqualifiedText(s) => maybe_quote(&s.replace(':', "\\:")),
|
||||
SingleField { field, text, is_re } => write_single_field(field, text, *is_re),
|
||||
SingleField { field, text, mode } => write_single_field(field, text, *mode),
|
||||
AddedInDays(u) => format!("added:{u}"),
|
||||
EditedInDays(u) => format!("edited:{u}"),
|
||||
IntroducedInDays(u) => format!("introduced:{u}"),
|
||||
|
|
@ -81,7 +82,7 @@ fn write_search_node(node: &SearchNode) -> String {
|
|||
NotetypeId(NotetypeIdType(i)) => format!("mid:{i}"),
|
||||
Notetype(s) => maybe_quote(&format!("note:{s}")),
|
||||
Rated { days, ease } => write_rated(days, ease),
|
||||
Tag { tag, is_re } => write_single_field("tag", tag, *is_re),
|
||||
Tag { tag, mode } => write_single_field("tag", tag, *mode),
|
||||
Duplicates { notetype_id, text } => write_dupe(notetype_id, text),
|
||||
State(k) => write_state(k),
|
||||
Flag(u) => format!("flag:{u}"),
|
||||
|
|
@ -116,14 +117,25 @@ fn needs_quotation(txt: &str) -> bool {
|
|||
}
|
||||
|
||||
/// Also used by tag search, which has the same syntax.
|
||||
fn write_single_field(field: &str, text: &str, is_re: bool) -> String {
|
||||
let re = if is_re { "re:" } else { "" };
|
||||
let text = if !is_re && text.starts_with("re:") {
|
||||
fn write_single_field(field: &str, text: &str, mode: FieldSearchMode) -> String {
|
||||
let prefix = match mode {
|
||||
FieldSearchMode::Normal => "",
|
||||
FieldSearchMode::Regex => "re:",
|
||||
FieldSearchMode::NoCombining => "nc:",
|
||||
};
|
||||
let text = if mode == FieldSearchMode::Normal
|
||||
&& (text.starts_with("re:") || text.starts_with("nc:"))
|
||||
{
|
||||
text.replacen(':', "\\:", 1)
|
||||
} else {
|
||||
text.to_string()
|
||||
};
|
||||
maybe_quote(&format!("{}:{}{}", field.replace(':', "\\:"), re, &text))
|
||||
maybe_quote(&format!(
|
||||
"{}:{}{}",
|
||||
field.replace(':', "\\:"),
|
||||
prefix,
|
||||
&text
|
||||
))
|
||||
}
|
||||
|
||||
fn write_template(template: &TemplateKind) -> String {
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue