mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 22:12:21 -04:00
Merge branch 'main' into accessibility-color-blind
This commit is contained in:
commit
482cab50d4
62 changed files with 1058 additions and 823 deletions
|
@ -241,6 +241,10 @@ Kevin Nakamura <grinkers@grinkers.net>
|
||||||
Bradley Szoke <bradleyszoke@gmail.com>
|
Bradley Szoke <bradleyszoke@gmail.com>
|
||||||
jcznk <https://github.com/jcznk>
|
jcznk <https://github.com/jcznk>
|
||||||
Thomas Rixen <thomas.rixen@student.uclouvain.be>
|
Thomas Rixen <thomas.rixen@student.uclouvain.be>
|
||||||
|
Siyuan Mattuwu Yan <syan4@ualberta.ca>
|
||||||
|
Lee Doughty <32392044+leedoughty@users.noreply.github.com>
|
||||||
|
memchr <memchr@proton.me>
|
||||||
|
Aldlss <ayaldlss@gmail.com>
|
||||||
|
|
||||||
********************
|
********************
|
||||||
|
|
||||||
|
|
1062
Cargo.lock
generated
1062
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -133,7 +133,7 @@ tokio-util = { version = "0.7.15", features = ["io"] }
|
||||||
tower-http = { version = "0.6.6", features = ["trace"] }
|
tower-http = { version = "0.6.6", features = ["trace"] }
|
||||||
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] }
|
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] }
|
||||||
tracing-appender = "0.2.3"
|
tracing-appender = "0.2.3"
|
||||||
tracing-subscriber = { version = "0.3.19", features = ["fmt", "env-filter"] }
|
tracing-subscriber = { version = "0.3.20", features = ["fmt", "env-filter"] }
|
||||||
unic-langid = { version = "0.9.6", features = ["macros"] }
|
unic-langid = { version = "0.9.6", features = ["macros"] }
|
||||||
unic-ucd-category = "0.9.0"
|
unic-ucd-category = "0.9.0"
|
||||||
unicode-normalization = "0.1.24"
|
unicode-normalization = "0.1.24"
|
||||||
|
|
|
@ -49,6 +49,46 @@ pub trait BuildAction {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
std::any::type_name::<Self>().split("::").last().unwrap()
|
std::any::type_name::<Self>()
|
||||||
|
.split("::")
|
||||||
|
.last()
|
||||||
|
.unwrap()
|
||||||
|
.split('<')
|
||||||
|
.next()
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
trait TestBuildAction {}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl<T: TestBuildAction + ?Sized> BuildAction for T {
|
||||||
|
fn command(&self) -> &str {
|
||||||
|
"test"
|
||||||
|
}
|
||||||
|
fn files(&mut self, _build: &mut impl FilesHandle) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code, unused_variables)]
|
||||||
|
#[test]
|
||||||
|
fn should_strip_regions_in_type_name() {
|
||||||
|
struct Bare;
|
||||||
|
impl TestBuildAction for Bare {}
|
||||||
|
assert_eq!(Bare {}.name(), "Bare");
|
||||||
|
|
||||||
|
struct WithLifeTime<'a>(&'a str);
|
||||||
|
impl TestBuildAction for WithLifeTime<'_> {}
|
||||||
|
assert_eq!(WithLifeTime("test").name(), "WithLifeTime");
|
||||||
|
|
||||||
|
struct WithMultiLifeTime<'a, 'b>(&'a str, &'b str);
|
||||||
|
impl TestBuildAction for WithMultiLifeTime<'_, '_> {}
|
||||||
|
assert_eq!(
|
||||||
|
WithMultiLifeTime("test", "test").name(),
|
||||||
|
"WithMultiLifeTime"
|
||||||
|
);
|
||||||
|
|
||||||
|
struct WithGeneric<T>(T);
|
||||||
|
impl<T> TestBuildAction for WithGeneric<T> {}
|
||||||
|
assert_eq!(WithGeneric(3).name(), "WithGeneric");
|
||||||
|
}
|
||||||
|
|
|
@ -67,7 +67,7 @@ impl Platform {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Append .exe to path if on Windows.
|
/// Append .exe to path if on Windows.
|
||||||
pub fn with_exe(path: &str) -> Cow<str> {
|
pub fn with_exe(path: &str) -> Cow<'_, str> {
|
||||||
if cfg!(windows) {
|
if cfg!(windows) {
|
||||||
format!("{path}.exe").into()
|
format!("{path}.exe").into()
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -98,7 +98,7 @@ impl BuildAction for YarnInstall<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_cmd_ext(bin: &str) -> Cow<str> {
|
fn with_cmd_ext(bin: &str) -> Cow<'_, str> {
|
||||||
if cfg!(windows) {
|
if cfg!(windows) {
|
||||||
format!("{bin}.cmd").into()
|
format!("{bin}.cmd").into()
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -599,6 +599,22 @@
|
||||||
"name": "colored",
|
"name": "colored",
|
||||||
"repository": "https://github.com/mackwic/colored"
|
"repository": "https://github.com/mackwic/colored"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"authors": "Wim Looman <wim@nemo157.com>|Allen Bui <fairingrey@gmail.com>",
|
||||||
|
"description": "Adaptors for various compression algorithms.",
|
||||||
|
"license": "Apache-2.0 OR MIT",
|
||||||
|
"license_file": null,
|
||||||
|
"name": "compression-codecs",
|
||||||
|
"repository": "https://github.com/Nullus157/async-compression"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"authors": "Wim Looman <wim@nemo157.com>|Allen Bui <fairingrey@gmail.com>",
|
||||||
|
"description": "Abstractions for compression algorithms.",
|
||||||
|
"license": "Apache-2.0 OR MIT",
|
||||||
|
"license_file": null,
|
||||||
|
"name": "compression-core",
|
||||||
|
"repository": "https://github.com/Nullus157/async-compression"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"authors": "Stjepan Glavina <stjepang@gmail.com>|Taiki Endo <te316e89@gmail.com>|John Nunley <dev@notgull.net>",
|
"authors": "Stjepan Glavina <stjepang@gmail.com>|Taiki Endo <te316e89@gmail.com>|John Nunley <dev@notgull.net>",
|
||||||
"description": "Concurrent multi-producer multi-consumer queue",
|
"description": "Concurrent multi-producer multi-consumer queue",
|
||||||
|
@ -1759,6 +1775,14 @@
|
||||||
"name": "http-body-util",
|
"name": "http-body-util",
|
||||||
"repository": "https://github.com/hyperium/http-body"
|
"repository": "https://github.com/hyperium/http-body"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"authors": null,
|
||||||
|
"description": "No-dep range header parser",
|
||||||
|
"license": "MIT",
|
||||||
|
"license_file": null,
|
||||||
|
"name": "http-range-header",
|
||||||
|
"repository": "https://github.com/MarcusGrass/parse-range-headers"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"authors": "Sean McArthur <sean@seanmonstar.com>",
|
"authors": "Sean McArthur <sean@seanmonstar.com>",
|
||||||
"description": "A tiny, safe, speedy, zero-copy HTTP/1.x parser.",
|
"description": "A tiny, safe, speedy, zero-copy HTTP/1.x parser.",
|
||||||
|
@ -1943,6 +1967,14 @@
|
||||||
"name": "intl_pluralrules",
|
"name": "intl_pluralrules",
|
||||||
"repository": "https://github.com/zbraniecki/pluralrules"
|
"repository": "https://github.com/zbraniecki/pluralrules"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"authors": "quininer <quininer@live.com>",
|
||||||
|
"description": "The low-level `io_uring` userspace interface for Rust",
|
||||||
|
"license": "Apache-2.0 OR MIT",
|
||||||
|
"license_file": null,
|
||||||
|
"name": "io-uring",
|
||||||
|
"repository": "https://github.com/tokio-rs/io-uring"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"authors": "Kris Price <kris@krisprice.nz>",
|
"authors": "Kris Price <kris@krisprice.nz>",
|
||||||
"description": "Provides types and useful methods for working with IPv4 and IPv6 network addresses, commonly called IP prefixes. The new `IpNet`, `Ipv4Net`, and `Ipv6Net` types build on the existing `IpAddr`, `Ipv4Addr`, and `Ipv6Addr` types already provided in Rust's standard library and align to their design to stay consistent. The module also provides useful traits that extend `Ipv4Addr` and `Ipv6Addr` with methods for `Add`, `Sub`, `BitAnd`, and `BitOr` operations. The module only uses stable feature so it is guaranteed to compile using the stable toolchain.",
|
"description": "Provides types and useful methods for working with IPv4 and IPv6 network addresses, commonly called IP prefixes. The new `IpNet`, `Ipv4Net`, and `Ipv6Net` types build on the existing `IpAddr`, `Ipv4Addr`, and `Ipv6Addr` types already provided in Rust's standard library and align to their design to stay consistent. The module also provides useful traits that extend `Ipv4Addr` and `Ipv6Addr` with methods for `Add`, `Sub`, `BitAnd`, and `BitOr` operations. The module only uses stable feature so it is guaranteed to compile using the stable toolchain.",
|
||||||
|
@ -2168,7 +2200,7 @@
|
||||||
"repository": "https://github.com/servo/html5ever"
|
"repository": "https://github.com/servo/html5ever"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"authors": null,
|
"authors": "The html5ever Project Developers",
|
||||||
"description": "Procedural macro for html5ever.",
|
"description": "Procedural macro for html5ever.",
|
||||||
"license": "Apache-2.0 OR MIT",
|
"license": "Apache-2.0 OR MIT",
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
|
@ -2567,14 +2599,6 @@
|
||||||
"name": "ordered-float",
|
"name": "ordered-float",
|
||||||
"repository": "https://github.com/reem/rust-ordered-float"
|
"repository": "https://github.com/reem/rust-ordered-float"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"authors": "Daniel Salvadori <danaugrs@gmail.com>",
|
|
||||||
"description": "Provides a macro to simplify operator overloading.",
|
|
||||||
"license": "MIT",
|
|
||||||
"license_file": null,
|
|
||||||
"name": "overload",
|
|
||||||
"repository": "https://github.com/danaugrs/overload"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"authors": "Stjepan Glavina <stjepang@gmail.com>|The Rust Project Developers",
|
"authors": "Stjepan Glavina <stjepang@gmail.com>|The Rust Project Developers",
|
||||||
"description": "Thread parking and unparking",
|
"description": "Thread parking and unparking",
|
||||||
|
@ -3040,7 +3064,7 @@
|
||||||
"repository": "https://github.com/bluss/rawpointer/"
|
"repository": "https://github.com/bluss/rawpointer/"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"authors": "Niko Matsakis <niko@alum.mit.edu>|Josh Stone <cuviper@gmail.com>",
|
"authors": null,
|
||||||
"description": "Simple work-stealing parallelism for Rust",
|
"description": "Simple work-stealing parallelism for Rust",
|
||||||
"license": "Apache-2.0 OR MIT",
|
"license": "Apache-2.0 OR MIT",
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
|
@ -3048,7 +3072,7 @@
|
||||||
"repository": "https://github.com/rayon-rs/rayon"
|
"repository": "https://github.com/rayon-rs/rayon"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"authors": "Niko Matsakis <niko@alum.mit.edu>|Josh Stone <cuviper@gmail.com>",
|
"authors": null,
|
||||||
"description": "Core APIs for Rayon",
|
"description": "Core APIs for Rayon",
|
||||||
"license": "Apache-2.0 OR MIT",
|
"license": "Apache-2.0 OR MIT",
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
|
@ -3095,28 +3119,12 @@
|
||||||
"name": "regex",
|
"name": "regex",
|
||||||
"repository": "https://github.com/rust-lang/regex"
|
"repository": "https://github.com/rust-lang/regex"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"authors": "Andrew Gallant <jamslam@gmail.com>",
|
|
||||||
"description": "Automata construction and matching using regular expressions.",
|
|
||||||
"license": "MIT OR Unlicense",
|
|
||||||
"license_file": null,
|
|
||||||
"name": "regex-automata",
|
|
||||||
"repository": "https://github.com/BurntSushi/regex-automata"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"authors": "The Rust Project Developers|Andrew Gallant <jamslam@gmail.com>",
|
"authors": "The Rust Project Developers|Andrew Gallant <jamslam@gmail.com>",
|
||||||
"description": "Automata construction and matching using regular expressions.",
|
"description": "Automata construction and matching using regular expressions.",
|
||||||
"license": "Apache-2.0 OR MIT",
|
"license": "Apache-2.0 OR MIT",
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
"name": "regex-automata",
|
"name": "regex-automata",
|
||||||
"repository": "https://github.com/rust-lang/regex/tree/master/regex-automata"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"authors": "The Rust Project Developers",
|
|
||||||
"description": "A regular expression parser.",
|
|
||||||
"license": "Apache-2.0 OR MIT",
|
|
||||||
"license_file": null,
|
|
||||||
"name": "regex-syntax",
|
|
||||||
"repository": "https://github.com/rust-lang/regex"
|
"repository": "https://github.com/rust-lang/regex"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -3125,7 +3133,7 @@
|
||||||
"license": "Apache-2.0 OR MIT",
|
"license": "Apache-2.0 OR MIT",
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
"name": "regex-syntax",
|
"name": "regex-syntax",
|
||||||
"repository": "https://github.com/rust-lang/regex/tree/master/regex-syntax"
|
"repository": "https://github.com/rust-lang/regex"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"authors": "John-John Tedro <udoprog@tedro.se>",
|
"authors": "John-John Tedro <udoprog@tedro.se>",
|
||||||
|
@ -3455,14 +3463,6 @@
|
||||||
"name": "serde_repr",
|
"name": "serde_repr",
|
||||||
"repository": "https://github.com/dtolnay/serde-repr"
|
"repository": "https://github.com/dtolnay/serde-repr"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"authors": null,
|
|
||||||
"description": "Serde-compatible spanned Value",
|
|
||||||
"license": "Apache-2.0 OR MIT",
|
|
||||||
"license_file": null,
|
|
||||||
"name": "serde_spanned",
|
|
||||||
"repository": "https://github.com/toml-rs/toml"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"authors": "Jacob Brown <kardeiz@gmail.com>",
|
"authors": "Jacob Brown <kardeiz@gmail.com>",
|
||||||
"description": "De/serialize structs with named fields as array of values",
|
"description": "De/serialize structs with named fields as array of values",
|
||||||
|
@ -3711,14 +3711,6 @@
|
||||||
"name": "syn",
|
"name": "syn",
|
||||||
"repository": "https://github.com/dtolnay/syn"
|
"repository": "https://github.com/dtolnay/syn"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"authors": "David Tolnay <dtolnay@gmail.com>",
|
|
||||||
"description": "Parser for Rust source code",
|
|
||||||
"license": "Apache-2.0 OR MIT",
|
|
||||||
"license_file": null,
|
|
||||||
"name": "syn",
|
|
||||||
"repository": "https://github.com/dtolnay/syn"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"authors": "Actyx AG <developer@actyx.io>",
|
"authors": "Actyx AG <developer@actyx.io>",
|
||||||
"description": "A tool for enlisting the compiler's help in proving the absence of concurrency",
|
"description": "A tool for enlisting the compiler's help in proving the absence of concurrency",
|
||||||
|
@ -3927,6 +3919,14 @@
|
||||||
"name": "tokio-rustls",
|
"name": "tokio-rustls",
|
||||||
"repository": "https://github.com/rustls/tokio-rustls"
|
"repository": "https://github.com/rustls/tokio-rustls"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"authors": "Daniel Abramov <dabramov@snapview.de>|Alexey Galakhov <agalakhov@snapview.de>",
|
||||||
|
"description": "Tokio binding for Tungstenite, the Lightweight stream-based WebSocket implementation",
|
||||||
|
"license": "MIT",
|
||||||
|
"license_file": null,
|
||||||
|
"name": "tokio-tungstenite",
|
||||||
|
"repository": "https://github.com/snapview/tokio-tungstenite"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"authors": "Tokio Contributors <team@tokio.rs>",
|
"authors": "Tokio Contributors <team@tokio.rs>",
|
||||||
"description": "Additional utilities for working with Tokio.",
|
"description": "Additional utilities for working with Tokio.",
|
||||||
|
@ -3951,14 +3951,6 @@
|
||||||
"name": "toml_edit",
|
"name": "toml_edit",
|
||||||
"repository": "https://github.com/toml-rs/toml"
|
"repository": "https://github.com/toml-rs/toml"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"authors": null,
|
|
||||||
"description": "A low-level interface for writing out TOML",
|
|
||||||
"license": "Apache-2.0 OR MIT",
|
|
||||||
"license_file": null,
|
|
||||||
"name": "toml_write",
|
|
||||||
"repository": "https://github.com/toml-rs/toml"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"authors": "Tower Maintainers <team@tower-rs.com>",
|
"authors": "Tower Maintainers <team@tower-rs.com>",
|
||||||
"description": "Tower is a library of modular and reusable components for building robust clients and servers.",
|
"description": "Tower is a library of modular and reusable components for building robust clients and servers.",
|
||||||
|
@ -4047,6 +4039,14 @@
|
||||||
"name": "try-lock",
|
"name": "try-lock",
|
||||||
"repository": "https://github.com/seanmonstar/try-lock"
|
"repository": "https://github.com/seanmonstar/try-lock"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"authors": "Alexey Galakhov|Daniel Abramov",
|
||||||
|
"description": "Lightweight stream-based WebSocket implementation",
|
||||||
|
"license": "Apache-2.0 OR MIT",
|
||||||
|
"license_file": null,
|
||||||
|
"name": "tungstenite",
|
||||||
|
"repository": "https://github.com/snapview/tungstenite-rs"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"authors": "Jacob Brown <kardeiz@gmail.com>",
|
"authors": "Jacob Brown <kardeiz@gmail.com>",
|
||||||
"description": "Provides a typemap container with FxHashMap",
|
"description": "Provides a typemap container with FxHashMap",
|
||||||
|
@ -4920,11 +4920,11 @@
|
||||||
"repository": "https://github.com/LukeMathWalker/wiremock-rs"
|
"repository": "https://github.com/LukeMathWalker/wiremock-rs"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"authors": null,
|
"authors": "Alex Crichton <alex@alexcrichton.com>",
|
||||||
"description": "Runtime support for the `wit-bindgen` crate",
|
"description": "Rust bindings generator and runtime support for WIT and the component model. Used when compiling Rust programs to the component model.",
|
||||||
"license": "Apache-2.0 OR Apache-2.0 WITH LLVM-exception OR MIT",
|
"license": "Apache-2.0 OR Apache-2.0 WITH LLVM-exception OR MIT",
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
"name": "wit-bindgen-rt",
|
"name": "wit-bindgen",
|
||||||
"repository": "https://github.com/bytecodealliance/wit-bindgen"
|
"repository": "https://github.com/bytecodealliance/wit-bindgen"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit a599715d3c27ff2eb895c749f3534ab73d83dad1
|
Subproject commit 5897ef3a4589c123b7fa4c7fbd67f84d0b7ee13e
|
|
@ -384,8 +384,6 @@ deck-config-which-deck = Which deck would you like to display options for?
|
||||||
deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }...
|
deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }...
|
||||||
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters.
|
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters.
|
||||||
deck-config-not-enough-history = Insufficient review history to perform this operation.
|
deck-config-not-enough-history = Insufficient review history to perform this operation.
|
||||||
deck-config-unable-to-determine-desired-retention =
|
|
||||||
Unable to determine a minimum recommended retention.
|
|
||||||
deck-config-must-have-400-reviews =
|
deck-config-must-have-400-reviews =
|
||||||
{ $count ->
|
{ $count ->
|
||||||
[one] Only { $count } review was found.
|
[one] Only { $count } review was found.
|
||||||
|
@ -394,7 +392,6 @@ deck-config-must-have-400-reviews =
|
||||||
# Numbers that control how aggressively the FSRS algorithm schedules cards
|
# Numbers that control how aggressively the FSRS algorithm schedules cards
|
||||||
deck-config-weights = FSRS parameters
|
deck-config-weights = FSRS parameters
|
||||||
deck-config-compute-optimal-weights = Optimize FSRS parameters
|
deck-config-compute-optimal-weights = Optimize FSRS parameters
|
||||||
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
|
|
||||||
deck-config-optimize-button = Optimize Current Preset
|
deck-config-optimize-button = Optimize Current Preset
|
||||||
# Indicates that a given function or label, provided via the "text" variable, operates slowly.
|
# Indicates that a given function or label, provided via the "text" variable, operates slowly.
|
||||||
deck-config-slow-suffix = { $text } (slow)
|
deck-config-slow-suffix = { $text } (slow)
|
||||||
|
@ -407,7 +404,6 @@ deck-config-historical-retention = Historical retention
|
||||||
deck-config-smaller-is-better = Smaller numbers indicate a better fit to your review history.
|
deck-config-smaller-is-better = Smaller numbers indicate a better fit to your review history.
|
||||||
deck-config-steps-too-large-for-fsrs = When FSRS is enabled, steps of 1 day or more are not recommended.
|
deck-config-steps-too-large-for-fsrs = When FSRS is enabled, steps of 1 day or more are not recommended.
|
||||||
deck-config-get-params = Get Params
|
deck-config-get-params = Get Params
|
||||||
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
|
|
||||||
deck-config-complete = { $num }% complete.
|
deck-config-complete = { $num }% complete.
|
||||||
deck-config-iterations = Iteration: { $count }...
|
deck-config-iterations = Iteration: { $count }...
|
||||||
deck-config-reschedule-cards-on-change = Reschedule cards on change
|
deck-config-reschedule-cards-on-change = Reschedule cards on change
|
||||||
|
@ -468,12 +464,7 @@ deck-config-compute-optimal-weights-tooltip2 =
|
||||||
By default, parameters will be calculated from the review history of all decks using the current preset. You can
|
By default, parameters will be calculated from the review history of all decks using the current preset. You can
|
||||||
optionally adjust the search before calculating the parameters, if you'd like to alter which cards are used for
|
optionally adjust the search before calculating the parameters, if you'd like to alter which cards are used for
|
||||||
optimizing the parameters.
|
optimizing the parameters.
|
||||||
deck-config-compute-optimal-retention-tooltip4 =
|
|
||||||
This tool will attempt to find the desired retention value
|
|
||||||
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
|
|
||||||
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if you’re
|
|
||||||
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
|
|
||||||
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
|
|
||||||
deck-config-please-save-your-changes-first = Please save your changes first.
|
deck-config-please-save-your-changes-first = Please save your changes first.
|
||||||
deck-config-workload-factor-change = Approximate workload: {$factor}x
|
deck-config-workload-factor-change = Approximate workload: {$factor}x
|
||||||
(compared to {$previousDR}% desired retention)
|
(compared to {$previousDR}% desired retention)
|
||||||
|
@ -533,7 +524,7 @@ deck-config-health-check = Check health when optimizing
|
||||||
deck-config-fsrs-bad-fit-warning = Health Check:
|
deck-config-fsrs-bad-fit-warning = Health Check:
|
||||||
Your memory is difficult for FSRS to predict. Recommendations:
|
Your memory is difficult for FSRS to predict. Recommendations:
|
||||||
|
|
||||||
- Suspend or reformulate leeches.
|
- Suspend or reformulate any cards you constantly forget.
|
||||||
- Use the answer buttons consistently. Keep in mind that "Hard" is a passing grade, not a failing grade.
|
- Use the answer buttons consistently. Keep in mind that "Hard" is a passing grade, not a failing grade.
|
||||||
- Understand before you memorize.
|
- Understand before you memorize.
|
||||||
|
|
||||||
|
@ -544,6 +535,16 @@ deck-config-fsrs-good-fit = Health Check:
|
||||||
|
|
||||||
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
|
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
|
||||||
|
|
||||||
|
deck-config-unable-to-determine-desired-retention =
|
||||||
|
Unable to determine a minimum recommended retention.
|
||||||
|
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
|
||||||
|
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
|
||||||
|
deck-config-compute-optimal-retention-tooltip4 =
|
||||||
|
This tool will attempt to find the desired retention value
|
||||||
|
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
|
||||||
|
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if you’re
|
||||||
|
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
|
||||||
|
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
|
||||||
deck-config-plotted-on-x-axis = (Plotted on the X-axis)
|
deck-config-plotted-on-x-axis = (Plotted on the X-axis)
|
||||||
deck-config-a-100-day-interval =
|
deck-config-a-100-day-interval =
|
||||||
{ $days ->
|
{ $days ->
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit bb4207f3b8e9a7c428db282d12c75b850be532f3
|
Subproject commit dad4e2736a2b53dcdb52d79b5703dd464c05d666
|
|
@ -82,6 +82,7 @@
|
||||||
"resolutions": {
|
"resolutions": {
|
||||||
"canvas": "npm:empty-npm-package@1.0.0",
|
"canvas": "npm:empty-npm-package@1.0.0",
|
||||||
"cookie": "0.7.0",
|
"cookie": "0.7.0",
|
||||||
|
"devalue": "^5.3.2",
|
||||||
"vite": "6"
|
"vite": "6"
|
||||||
},
|
},
|
||||||
"browserslist": [
|
"browserslist": [
|
||||||
|
|
|
@ -175,8 +175,8 @@ class MnemoFact:
|
||||||
def fact_view(self) -> type[MnemoFactView]:
|
def fact_view(self) -> type[MnemoFactView]:
|
||||||
try:
|
try:
|
||||||
fact_view = self.cards[0].fact_view_id
|
fact_view = self.cards[0].fact_view_id
|
||||||
except IndexError as err:
|
except IndexError:
|
||||||
raise Exception(f"Fact {id} has no cards") from err
|
return FrontOnly
|
||||||
|
|
||||||
if fact_view.startswith("1.") or fact_view.startswith("1::"):
|
if fact_view.startswith("1.") or fact_view.startswith("1::"):
|
||||||
return FrontOnly
|
return FrontOnly
|
||||||
|
@ -187,7 +187,7 @@ class MnemoFact:
|
||||||
elif fact_view.startswith("5.1"):
|
elif fact_view.startswith("5.1"):
|
||||||
return Cloze
|
return Cloze
|
||||||
|
|
||||||
raise Exception(f"Fact {id} has unknown fact view: {fact_view}")
|
raise Exception(f"Fact {self.id} has unknown fact view: {fact_view}")
|
||||||
|
|
||||||
def anki_fields(self, fact_view: type[MnemoFactView]) -> list[str]:
|
def anki_fields(self, fact_view: type[MnemoFactView]) -> list[str]:
|
||||||
return [munge_field(self.fields.get(k, "")) for k in fact_view.field_keys]
|
return [munge_field(self.fields.get(k, "")) for k in fact_view.field_keys]
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 727 B |
27
qt/aqt/data/qt/icons/media-record.svg
Normal file
27
qt/aqt/data/qt/icons/media-record.svg
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<svg width="21" height="21" viewBox="0 0 21 21" version="1.1"
|
||||||
|
xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<g id="Layer-1" transform="translate(0.5,0.5)">
|
||||||
|
<rect x="0" y="0" width="20" height="20" fill="none"/>
|
||||||
|
<g transform="translate(14.8974,6.3648)">
|
||||||
|
<path d="M0,0C0,3.403 -2.042,6.161 -4.56,6.161C-7.078,6.161 -9.12,3.403 -9.12,0C-9.12,-3.403 -7.078,-6.161 -4.56,-6.161C-2.042,-6.161 0,-3.403 0,0"
|
||||||
|
fill="black" fill-rule="nonzero"/>
|
||||||
|
</g>
|
||||||
|
<g transform="matrix(0,-1,-1,0,10.3374,1.8048)">
|
||||||
|
<ellipse cx="-4.56" cy="0" rx="6.161" ry="4.56"
|
||||||
|
fill="none" stroke="black" stroke-width="0.25"/>
|
||||||
|
</g>
|
||||||
|
<g transform="translate(3.1987,14.4958)">
|
||||||
|
<path d="M0,-9.484C-0.76,-4.212 3.287,0 7.12,-0.046C10.864,-0.09 14.742,-4.199 14.076,-9.343"
|
||||||
|
fill="none" stroke="black" stroke-width="2" fill-rule="nonzero"/>
|
||||||
|
</g>
|
||||||
|
<g transform="matrix(-1,0,0,1,20.573,18.613)">
|
||||||
|
<rect x="5.387" y="0.601" width="9.799" height="0.185"
|
||||||
|
fill="none" stroke="black" stroke-width="2"/>
|
||||||
|
</g>
|
||||||
|
<g transform="matrix(-1,0,0,1,20.741,13.51)">
|
||||||
|
<rect x="9.899" y="1.163" width="0.943" height="4.164"
|
||||||
|
fill="none" stroke="black" stroke-width="2"/>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 1.3 KiB |
|
@ -483,7 +483,7 @@ def update_deck_configs() -> bytes:
|
||||||
update.abort = True
|
update.abort = True
|
||||||
|
|
||||||
def on_success(changes: OpChanges) -> None:
|
def on_success(changes: OpChanges) -> None:
|
||||||
if isinstance(window := aqt.mw.app.activeWindow(), DeckOptionsDialog):
|
if isinstance(window := aqt.mw.app.activeModalWidget(), DeckOptionsDialog):
|
||||||
window.reject()
|
window.reject()
|
||||||
|
|
||||||
def handle_on_main() -> None:
|
def handle_on_main() -> None:
|
||||||
|
@ -511,7 +511,7 @@ def set_scheduling_states() -> bytes:
|
||||||
|
|
||||||
def import_done() -> bytes:
|
def import_done() -> bytes:
|
||||||
def update_window_modality() -> None:
|
def update_window_modality() -> None:
|
||||||
if window := aqt.mw.app.activeWindow():
|
if window := aqt.mw.app.activeModalWidget():
|
||||||
from aqt.import_export.import_dialog import ImportDialog
|
from aqt.import_export.import_dialog import ImportDialog
|
||||||
|
|
||||||
if isinstance(window, ImportDialog):
|
if isinstance(window, ImportDialog):
|
||||||
|
@ -529,7 +529,7 @@ def import_request(endpoint: str) -> bytes:
|
||||||
response.ParseFromString(output)
|
response.ParseFromString(output)
|
||||||
|
|
||||||
def handle_on_main() -> None:
|
def handle_on_main() -> None:
|
||||||
window = aqt.mw.app.activeWindow()
|
window = aqt.mw.app.activeModalWidget()
|
||||||
on_op_finished(aqt.mw, response, window)
|
on_op_finished(aqt.mw, response, window)
|
||||||
|
|
||||||
aqt.mw.taskman.run_on_main(handle_on_main)
|
aqt.mw.taskman.run_on_main(handle_on_main)
|
||||||
|
@ -569,7 +569,7 @@ def change_notetype() -> bytes:
|
||||||
data = request.data
|
data = request.data
|
||||||
|
|
||||||
def handle_on_main() -> None:
|
def handle_on_main() -> None:
|
||||||
window = aqt.mw.app.activeWindow()
|
window = aqt.mw.app.activeModalWidget()
|
||||||
if isinstance(window, ChangeNotetypeDialog):
|
if isinstance(window, ChangeNotetypeDialog):
|
||||||
window.save(data)
|
window.save(data)
|
||||||
|
|
||||||
|
@ -579,7 +579,7 @@ def change_notetype() -> bytes:
|
||||||
|
|
||||||
def deck_options_require_close() -> bytes:
|
def deck_options_require_close() -> bytes:
|
||||||
def handle_on_main() -> None:
|
def handle_on_main() -> None:
|
||||||
window = aqt.mw.app.activeWindow()
|
window = aqt.mw.app.activeModalWidget()
|
||||||
if isinstance(window, DeckOptionsDialog):
|
if isinstance(window, DeckOptionsDialog):
|
||||||
window.require_close()
|
window.require_close()
|
||||||
|
|
||||||
|
@ -591,7 +591,7 @@ def deck_options_require_close() -> bytes:
|
||||||
|
|
||||||
def deck_options_ready() -> bytes:
|
def deck_options_ready() -> bytes:
|
||||||
def handle_on_main() -> None:
|
def handle_on_main() -> None:
|
||||||
window = aqt.mw.app.activeWindow()
|
window = aqt.mw.app.activeModalWidget()
|
||||||
if isinstance(window, DeckOptionsDialog):
|
if isinstance(window, DeckOptionsDialog):
|
||||||
window.set_ready()
|
window.set_ready()
|
||||||
|
|
||||||
|
|
|
@ -32,6 +32,7 @@ from aqt._macos_helper import macos_helper
|
||||||
from aqt.mpv import MPV, MPVBase, MPVCommandError
|
from aqt.mpv import MPV, MPVBase, MPVCommandError
|
||||||
from aqt.qt import *
|
from aqt.qt import *
|
||||||
from aqt.taskman import TaskManager
|
from aqt.taskman import TaskManager
|
||||||
|
from aqt.theme import theme_manager
|
||||||
from aqt.utils import (
|
from aqt.utils import (
|
||||||
disable_help_button,
|
disable_help_button,
|
||||||
restoreGeom,
|
restoreGeom,
|
||||||
|
@ -743,7 +744,8 @@ class RecordDialog(QDialog):
|
||||||
def _setup_dialog(self) -> None:
|
def _setup_dialog(self) -> None:
|
||||||
self.setWindowTitle("Anki")
|
self.setWindowTitle("Anki")
|
||||||
icon = QLabel()
|
icon = QLabel()
|
||||||
icon.setPixmap(QPixmap("icons:media-record.png"))
|
qicon = theme_manager.icon_from_resources("icons:media-record.svg")
|
||||||
|
icon.setPixmap(qicon.pixmap(60, 60))
|
||||||
self.label = QLabel("...")
|
self.label = QLabel("...")
|
||||||
hbox = QHBoxLayout()
|
hbox = QHBoxLayout()
|
||||||
hbox.addWidget(icon)
|
hbox.addWidget(icon)
|
||||||
|
|
|
@ -73,7 +73,7 @@ def handle_sync_error(mw: aqt.main.AnkiQt, err: Exception) -> None:
|
||||||
elif isinstance(err, Interrupted):
|
elif isinstance(err, Interrupted):
|
||||||
# no message to show
|
# no message to show
|
||||||
return
|
return
|
||||||
show_warning(str(err))
|
show_warning(str(err), parent=mw)
|
||||||
|
|
||||||
|
|
||||||
def on_normal_sync_timer(mw: aqt.main.AnkiQt) -> None:
|
def on_normal_sync_timer(mw: aqt.main.AnkiQt) -> None:
|
||||||
|
|
|
@ -115,7 +115,7 @@ class ThemeManager:
|
||||||
# Workaround for Qt bug. First attempt was percent-escaping the chars,
|
# Workaround for Qt bug. First attempt was percent-escaping the chars,
|
||||||
# but Qt can't handle that.
|
# but Qt can't handle that.
|
||||||
# https://forum.qt.io/topic/55274/solved-qss-with-special-characters/11
|
# https://forum.qt.io/topic/55274/solved-qss-with-special-characters/11
|
||||||
path = re.sub(r"([\u00A1-\u00FF])", r"\\\1", path)
|
path = re.sub(r"(['\u00A1-\u00FF])", r"\\\1", path)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def icon_from_resources(self, path: str | ColoredIcon) -> QIcon:
|
def icon_from_resources(self, path: str | ColoredIcon) -> QIcon:
|
||||||
|
|
|
@ -226,29 +226,45 @@ def ask_user_dialog(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def show_info(text: str, callback: Callable | None = None, **kwargs: Any) -> MessageBox:
|
def show_info(
|
||||||
|
text: str,
|
||||||
|
callback: Callable | None = None,
|
||||||
|
parent: QWidget | None = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> MessageBox:
|
||||||
"Show a small info window with an OK button."
|
"Show a small info window with an OK button."
|
||||||
if "icon" not in kwargs:
|
if "icon" not in kwargs:
|
||||||
kwargs["icon"] = QMessageBox.Icon.Information
|
kwargs["icon"] = QMessageBox.Icon.Information
|
||||||
return MessageBox(
|
return MessageBox(
|
||||||
text,
|
text,
|
||||||
callback=(lambda _: callback()) if callback is not None else None,
|
callback=(lambda _: callback()) if callback is not None else None,
|
||||||
|
parent=parent,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def show_warning(
|
def show_warning(
|
||||||
text: str, callback: Callable | None = None, **kwargs: Any
|
text: str,
|
||||||
|
callback: Callable | None = None,
|
||||||
|
parent: QWidget | None = None,
|
||||||
|
**kwargs: Any,
|
||||||
) -> MessageBox:
|
) -> MessageBox:
|
||||||
"Show a small warning window with an OK button."
|
"Show a small warning window with an OK button."
|
||||||
return show_info(text, icon=QMessageBox.Icon.Warning, callback=callback, **kwargs)
|
return show_info(
|
||||||
|
text, icon=QMessageBox.Icon.Warning, callback=callback, parent=parent, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def show_critical(
|
def show_critical(
|
||||||
text: str, callback: Callable | None = None, **kwargs: Any
|
text: str,
|
||||||
|
callback: Callable | None = None,
|
||||||
|
parent: QWidget | None = None,
|
||||||
|
**kwargs: Any,
|
||||||
) -> MessageBox:
|
) -> MessageBox:
|
||||||
"Show a small critical error window with an OK button."
|
"Show a small critical error window with an OK button."
|
||||||
return show_info(text, icon=QMessageBox.Icon.Critical, callback=callback, **kwargs)
|
return show_info(
|
||||||
|
text, icon=QMessageBox.Icon.Critical, callback=callback, parent=parent, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def showWarning(
|
def showWarning(
|
||||||
|
|
|
@ -309,6 +309,13 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
|
||||||
command.env("UV_NO_CACHE", "1");
|
command.env("UV_NO_CACHE", "1");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add mirror environment variable if enabled
|
||||||
|
if let Some((python_mirror, pypi_mirror)) = get_mirror_urls(state)? {
|
||||||
|
command
|
||||||
|
.env("UV_PYTHON_INSTALL_MIRROR", &python_mirror)
|
||||||
|
.env("UV_DEFAULT_INDEX", &pypi_mirror);
|
||||||
|
}
|
||||||
|
|
||||||
match command.ensure_success() {
|
match command.ensure_success() {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
// Sync succeeded
|
// Sync succeeded
|
||||||
|
@ -673,6 +680,12 @@ fn fetch_versions(state: &State) -> Result<Vec<String>> {
|
||||||
|
|
||||||
cmd.arg(&versions_script);
|
cmd.arg(&versions_script);
|
||||||
|
|
||||||
|
// Add mirror environment variable if enabled
|
||||||
|
if let Some((python_mirror, pypi_mirror)) = get_mirror_urls(state)? {
|
||||||
|
cmd.env("UV_PYTHON_INSTALL_MIRROR", &python_mirror)
|
||||||
|
.env("UV_DEFAULT_INDEX", &pypi_mirror);
|
||||||
|
}
|
||||||
|
|
||||||
let output = match cmd.utf8_output() {
|
let output = match cmd.utf8_output() {
|
||||||
Ok(output) => output,
|
Ok(output) => output,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
@ -725,15 +738,7 @@ fn apply_version_kind(version_kind: &VersionKind, state: &State) -> Result<()> {
|
||||||
&format!("anki-release=={version}\",\n \"anki=={version}\",\n \"aqt=={version}"),
|
&format!("anki-release=={version}\",\n \"anki=={version}\",\n \"aqt=={version}"),
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
write_file(&state.user_pyproject_path, &updated_content)?;
|
||||||
// Add mirror configuration if enabled
|
|
||||||
let final_content = if let Some((python_mirror, pypi_mirror)) = get_mirror_urls(state)? {
|
|
||||||
format!("{updated_content}\n\n[[tool.uv.index]]\nname = \"mirror\"\nurl = \"{pypi_mirror}\"\ndefault = true\n\n[tool.uv]\npython-install-mirror = \"{python_mirror}\"\n")
|
|
||||||
} else {
|
|
||||||
updated_content
|
|
||||||
};
|
|
||||||
|
|
||||||
write_file(&state.user_pyproject_path, &final_content)?;
|
|
||||||
|
|
||||||
// Update .python-version based on version kind
|
// Update .python-version based on version kind
|
||||||
match version_kind {
|
match version_kind {
|
||||||
|
|
|
@ -22,6 +22,7 @@ inflections.workspace = true
|
||||||
anki_io.workspace = true
|
anki_io.workspace = true
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
|
regex.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
fluent.workspace = true
|
fluent.workspace = true
|
||||||
|
|
|
@ -4,6 +4,5 @@
|
||||||
// Include auto-generated content
|
// Include auto-generated content
|
||||||
|
|
||||||
#![allow(clippy::all)]
|
#![allow(clippy::all)]
|
||||||
#![allow(text_direction_codepoint_in_literal)]
|
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/strings.rs"));
|
include!(concat!(env!("OUT_DIR"), "/strings.rs"));
|
||||||
|
|
|
@ -195,12 +195,30 @@ pub(crate) const {lang_name}: phf::Map<&str, &str> = phf::phf_map! {{",
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
for (module, contents) in modules {
|
for (module, contents) in modules {
|
||||||
writeln!(buf, r###" "{module}" => r##"{contents}"##,"###).unwrap();
|
let escaped_contents = escape_unicode_control_chars(contents);
|
||||||
|
writeln!(
|
||||||
|
buf,
|
||||||
|
r###" "{module}" => r##"{escaped_contents}"##,"###
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
buf.push_str("};\n");
|
buf.push_str("};\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn escape_unicode_control_chars(input: &str) -> String {
|
||||||
|
use regex::Regex;
|
||||||
|
|
||||||
|
static RE: std::sync::OnceLock<Regex> = std::sync::OnceLock::new();
|
||||||
|
let re = RE.get_or_init(|| Regex::new(r"[\u{202a}-\u{202e}\u{2066}-\u{2069}]").unwrap());
|
||||||
|
|
||||||
|
re.replace_all(input, |caps: ®ex::Captures| {
|
||||||
|
let c = caps.get(0).unwrap().as_str().chars().next().unwrap();
|
||||||
|
format!("\\u{{{:04x}}}", c as u32)
|
||||||
|
})
|
||||||
|
.into_owned()
|
||||||
|
}
|
||||||
|
|
||||||
fn lang_constant_name(lang: &str) -> String {
|
fn lang_constant_name(lang: &str) -> String {
|
||||||
lang.to_ascii_uppercase().replace('-', "_")
|
lang.to_ascii_uppercase().replace('-', "_")
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,14 +42,14 @@ enum CheckableUrl {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CheckableUrl {
|
impl CheckableUrl {
|
||||||
fn url(&self) -> Cow<str> {
|
fn url(&self) -> Cow<'_, str> {
|
||||||
match *self {
|
match *self {
|
||||||
Self::HelpPage(page) => help_page_to_link(page).into(),
|
Self::HelpPage(page) => help_page_to_link(page).into(),
|
||||||
Self::String(s) => s.into(),
|
Self::String(s) => s.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn anchor(&self) -> Cow<str> {
|
fn anchor(&self) -> Cow<'_, str> {
|
||||||
match *self {
|
match *self {
|
||||||
Self::HelpPage(page) => help_page_link_suffix(page).into(),
|
Self::HelpPage(page) => help_page_link_suffix(page).into(),
|
||||||
Self::String(s) => s.split('#').next_back().unwrap_or_default().into(),
|
Self::String(s) => s.split('#').next_back().unwrap_or_default().into(),
|
||||||
|
|
|
@ -94,7 +94,7 @@ impl BackendCollectionService for Backend {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Backend {
|
impl Backend {
|
||||||
pub(super) fn lock_open_collection(&self) -> Result<MutexGuard<Option<Collection>>> {
|
pub(super) fn lock_open_collection(&self) -> Result<MutexGuard<'_, Option<Collection>>> {
|
||||||
let guard = self.col.lock().unwrap();
|
let guard = self.col.lock().unwrap();
|
||||||
guard
|
guard
|
||||||
.is_some()
|
.is_some()
|
||||||
|
@ -102,7 +102,7 @@ impl Backend {
|
||||||
.ok_or(AnkiError::CollectionNotOpen)
|
.ok_or(AnkiError::CollectionNotOpen)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn lock_closed_collection(&self) -> Result<MutexGuard<Option<Collection>>> {
|
pub(super) fn lock_closed_collection(&self) -> Result<MutexGuard<'_, Option<Collection>>> {
|
||||||
let guard = self.col.lock().unwrap();
|
let guard = self.col.lock().unwrap();
|
||||||
guard
|
guard
|
||||||
.is_none()
|
.is_none()
|
||||||
|
|
|
@ -34,7 +34,7 @@ pub fn prettify_av_tags<S: Into<String> + AsRef<str>>(txt: S) -> String {
|
||||||
|
|
||||||
/// Parse `txt` into [CardNodes] and return the result,
|
/// Parse `txt` into [CardNodes] and return the result,
|
||||||
/// or [None] if it only contains text nodes.
|
/// or [None] if it only contains text nodes.
|
||||||
fn nodes_or_text_only(txt: &str) -> Option<CardNodes> {
|
fn nodes_or_text_only(txt: &str) -> Option<CardNodes<'_>> {
|
||||||
let nodes = CardNodes::parse(txt);
|
let nodes = CardNodes::parse(txt);
|
||||||
(!nodes.text_only).then_some(nodes)
|
(!nodes.text_only).then_some(nodes)
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,13 +103,13 @@ fn is_not0<'parser, 'arr: 'parser, 's: 'parser>(
|
||||||
move |s| alt((is_not(arr), success(""))).parse(s)
|
move |s| alt((is_not(arr), success(""))).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn node(s: &str) -> IResult<Node> {
|
fn node(s: &str) -> IResult<'_, Node<'_>> {
|
||||||
alt((sound_node, tag_node, text_node)).parse(s)
|
alt((sound_node, tag_node, text_node)).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A sound tag `[sound:resource]`, where `resource` is pointing to a sound or
|
/// A sound tag `[sound:resource]`, where `resource` is pointing to a sound or
|
||||||
/// video file.
|
/// video file.
|
||||||
fn sound_node(s: &str) -> IResult<Node> {
|
fn sound_node(s: &str) -> IResult<'_, Node<'_>> {
|
||||||
map(
|
map(
|
||||||
delimited(tag("[sound:"), is_not("]"), tag("]")),
|
delimited(tag("[sound:"), is_not("]"), tag("]")),
|
||||||
Node::SoundOrVideo,
|
Node::SoundOrVideo,
|
||||||
|
@ -117,7 +117,7 @@ fn sound_node(s: &str) -> IResult<Node> {
|
||||||
.parse(s)
|
.parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn take_till_potential_tag_start(s: &str) -> IResult<&str> {
|
fn take_till_potential_tag_start(s: &str) -> IResult<'_, &str> {
|
||||||
// first char could be '[', but wasn't part of a node, so skip (eof ends parse)
|
// first char could be '[', but wasn't part of a node, so skip (eof ends parse)
|
||||||
let (after, offset) = anychar(s).map(|(s, c)| (s, c.len_utf8()))?;
|
let (after, offset) = anychar(s).map(|(s, c)| (s, c.len_utf8()))?;
|
||||||
Ok(match after.find('[') {
|
Ok(match after.find('[') {
|
||||||
|
@ -127,9 +127,9 @@ fn take_till_potential_tag_start(s: &str) -> IResult<&str> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An Anki tag `[anki:tag...]...[/anki:tag]`.
|
/// An Anki tag `[anki:tag...]...[/anki:tag]`.
|
||||||
fn tag_node(s: &str) -> IResult<Node> {
|
fn tag_node(s: &str) -> IResult<'_, Node<'_>> {
|
||||||
/// Match the start of an opening tag and return its name.
|
/// Match the start of an opening tag and return its name.
|
||||||
fn name(s: &str) -> IResult<&str> {
|
fn name(s: &str) -> IResult<'_, &str> {
|
||||||
preceded(tag("[anki:"), is_not("] \t\r\n")).parse(s)
|
preceded(tag("[anki:"), is_not("] \t\r\n")).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -139,12 +139,12 @@ fn tag_node(s: &str) -> IResult<Node> {
|
||||||
) -> impl FnMut(&'s str) -> IResult<'s, Vec<(&'s str, &'s str)>> + 'name {
|
) -> impl FnMut(&'s str) -> IResult<'s, Vec<(&'s str, &'s str)>> + 'name {
|
||||||
/// List of whitespace-separated `key=val` tuples, where `val` may be
|
/// List of whitespace-separated `key=val` tuples, where `val` may be
|
||||||
/// empty.
|
/// empty.
|
||||||
fn options(s: &str) -> IResult<Vec<(&str, &str)>> {
|
fn options(s: &str) -> IResult<'_, Vec<(&str, &str)>> {
|
||||||
fn key(s: &str) -> IResult<&str> {
|
fn key(s: &str) -> IResult<'_, &str> {
|
||||||
is_not("] \t\r\n=").parse(s)
|
is_not("] \t\r\n=").parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn val(s: &str) -> IResult<&str> {
|
fn val(s: &str) -> IResult<'_, &str> {
|
||||||
alt((
|
alt((
|
||||||
delimited(tag("\""), is_not0("\""), tag("\"")),
|
delimited(tag("\""), is_not0("\""), tag("\"")),
|
||||||
is_not0("] \t\r\n\""),
|
is_not0("] \t\r\n\""),
|
||||||
|
@ -197,7 +197,7 @@ fn tag_node(s: &str) -> IResult<Node> {
|
||||||
.parse(s)
|
.parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn text_node(s: &str) -> IResult<Node> {
|
fn text_node(s: &str) -> IResult<'_, Node<'_>> {
|
||||||
map(take_till_potential_tag_start, Node::Text).parse(s)
|
map(take_till_potential_tag_start, Node::Text).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -54,8 +54,8 @@ enum Token<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Tokenize string
|
/// Tokenize string
|
||||||
fn tokenize(mut text: &str) -> impl Iterator<Item = Token> {
|
fn tokenize(mut text: &str) -> impl Iterator<Item = Token<'_>> {
|
||||||
fn open_cloze(text: &str) -> IResult<&str, Token> {
|
fn open_cloze(text: &str) -> IResult<&str, Token<'_>> {
|
||||||
// opening brackets and 'c'
|
// opening brackets and 'c'
|
||||||
let (text, _opening_brackets_and_c) = tag("{{c")(text)?;
|
let (text, _opening_brackets_and_c) = tag("{{c")(text)?;
|
||||||
// following number
|
// following number
|
||||||
|
@ -75,12 +75,12 @@ fn tokenize(mut text: &str) -> impl Iterator<Item = Token> {
|
||||||
Ok((text, Token::OpenCloze(digits)))
|
Ok((text, Token::OpenCloze(digits)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn close_cloze(text: &str) -> IResult<&str, Token> {
|
fn close_cloze(text: &str) -> IResult<&str, Token<'_>> {
|
||||||
map(tag("}}"), |_| Token::CloseCloze).parse(text)
|
map(tag("}}"), |_| Token::CloseCloze).parse(text)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Match a run of text until an open/close marker is encountered.
|
/// Match a run of text until an open/close marker is encountered.
|
||||||
fn normal_text(text: &str) -> IResult<&str, Token> {
|
fn normal_text(text: &str) -> IResult<&str, Token<'_>> {
|
||||||
if text.is_empty() {
|
if text.is_empty() {
|
||||||
return Err(nom::Err::Error(nom::error::make_error(
|
return Err(nom::Err::Error(nom::error::make_error(
|
||||||
text,
|
text,
|
||||||
|
@ -132,7 +132,7 @@ impl ExtractedCloze<'_> {
|
||||||
self.hint.unwrap_or("...")
|
self.hint.unwrap_or("...")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn clozed_text(&self) -> Cow<str> {
|
fn clozed_text(&self) -> Cow<'_, str> {
|
||||||
// happy efficient path?
|
// happy efficient path?
|
||||||
if self.nodes.len() == 1 {
|
if self.nodes.len() == 1 {
|
||||||
if let TextOrCloze::Text(text) = self.nodes.last().unwrap() {
|
if let TextOrCloze::Text(text) = self.nodes.last().unwrap() {
|
||||||
|
@ -353,7 +353,7 @@ pub fn parse_image_occlusions(text: &str) -> Vec<ImageOcclusion> {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow<str> {
|
pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow<'_, str> {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
let mut active_cloze_found_in_text = false;
|
let mut active_cloze_found_in_text = false;
|
||||||
for node in &parse_text_with_clozes(text) {
|
for node in &parse_text_with_clozes(text) {
|
||||||
|
@ -376,7 +376,7 @@ pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow<str>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow<str> {
|
pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow<'_, str> {
|
||||||
let mut output = Vec::new();
|
let mut output = Vec::new();
|
||||||
for node in &parse_text_with_clozes(text) {
|
for node in &parse_text_with_clozes(text) {
|
||||||
reveal_cloze_text_in_nodes(node, cloze_ord, question, &mut output);
|
reveal_cloze_text_in_nodes(node, cloze_ord, question, &mut output);
|
||||||
|
@ -384,7 +384,7 @@ pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow
|
||||||
output.join(", ").into()
|
output.join(", ").into()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract_cloze_for_typing(text: &str, cloze_ord: u16) -> Cow<str> {
|
pub fn extract_cloze_for_typing(text: &str, cloze_ord: u16) -> Cow<'_, str> {
|
||||||
let mut output = Vec::new();
|
let mut output = Vec::new();
|
||||||
for node in &parse_text_with_clozes(text) {
|
for node in &parse_text_with_clozes(text) {
|
||||||
reveal_cloze_text_in_nodes(node, cloze_ord, false, &mut output);
|
reveal_cloze_text_in_nodes(node, cloze_ord, false, &mut output);
|
||||||
|
@ -460,7 +460,7 @@ pub(crate) fn strip_clozes(text: &str) -> Cow<'_, str> {
|
||||||
CLOZE.replace_all(text, "$1")
|
CLOZE.replace_all(text, "$1")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn strip_html_inside_mathjax(text: &str) -> Cow<str> {
|
fn strip_html_inside_mathjax(text: &str) -> Cow<'_, str> {
|
||||||
MATHJAX.replace_all(text, |caps: &Captures| -> String {
|
MATHJAX.replace_all(text, |caps: &Captures| -> String {
|
||||||
format!(
|
format!(
|
||||||
"{}{}{}",
|
"{}{}{}",
|
||||||
|
|
|
@ -115,7 +115,7 @@ impl crate::services::DeckConfigService for Collection {
|
||||||
.storage
|
.storage
|
||||||
.get_revlog_entries_for_searched_cards_in_card_order()?;
|
.get_revlog_entries_for_searched_cards_in_card_order()?;
|
||||||
|
|
||||||
let config = guard.col.get_optimal_retention_parameters(revlogs)?;
|
let mut config = guard.col.get_optimal_retention_parameters(revlogs)?;
|
||||||
let cards = guard
|
let cards = guard
|
||||||
.col
|
.col
|
||||||
.storage
|
.storage
|
||||||
|
@ -125,6 +125,8 @@ impl crate::services::DeckConfigService for Collection {
|
||||||
.filter_map(|c| crate::card::Card::convert(c.clone(), days_elapsed, c.memory_state?))
|
.filter_map(|c| crate::card::Card::convert(c.clone(), days_elapsed, c.memory_state?))
|
||||||
.collect::<Vec<fsrs::Card>>();
|
.collect::<Vec<fsrs::Card>>();
|
||||||
|
|
||||||
|
config.deck_size = guard.cards;
|
||||||
|
|
||||||
let costs = (70u32..=99u32)
|
let costs = (70u32..=99u32)
|
||||||
.into_par_iter()
|
.into_par_iter()
|
||||||
.map(|dr| {
|
.map(|dr| {
|
||||||
|
|
|
@ -216,9 +216,6 @@ impl Collection {
|
||||||
for deck in self.storage.get_all_decks()? {
|
for deck in self.storage.get_all_decks()? {
|
||||||
if let Ok(normal) = deck.normal() {
|
if let Ok(normal) = deck.normal() {
|
||||||
let deck_id = deck.id;
|
let deck_id = deck.id;
|
||||||
if let Some(desired_retention) = normal.desired_retention {
|
|
||||||
deck_desired_retention.insert(deck_id, desired_retention);
|
|
||||||
}
|
|
||||||
// previous order & params
|
// previous order & params
|
||||||
let previous_config_id = DeckConfigId(normal.config_id);
|
let previous_config_id = DeckConfigId(normal.config_id);
|
||||||
let previous_config = configs_before_update.get(&previous_config_id);
|
let previous_config = configs_before_update.get(&previous_config_id);
|
||||||
|
@ -226,21 +223,23 @@ impl Collection {
|
||||||
.map(|c| c.inner.new_card_insert_order())
|
.map(|c| c.inner.new_card_insert_order())
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
let previous_params = previous_config.map(|c| c.fsrs_params());
|
let previous_params = previous_config.map(|c| c.fsrs_params());
|
||||||
let previous_retention = previous_config.map(|c| c.inner.desired_retention);
|
let previous_preset_dr = previous_config.map(|c| c.inner.desired_retention);
|
||||||
|
let previous_deck_dr = normal.desired_retention;
|
||||||
|
let previous_dr = previous_deck_dr.or(previous_preset_dr);
|
||||||
let previous_easy_days = previous_config.map(|c| &c.inner.easy_days_percentages);
|
let previous_easy_days = previous_config.map(|c| &c.inner.easy_days_percentages);
|
||||||
|
|
||||||
// if a selected (sub)deck, or its old config was removed, update deck to point
|
// if a selected (sub)deck, or its old config was removed, update deck to point
|
||||||
// to new config
|
// to new config
|
||||||
let current_config_id = if selected_deck_ids.contains(&deck.id)
|
let (current_config_id, current_deck_dr) = if selected_deck_ids.contains(&deck.id)
|
||||||
|| !configs_after_update.contains_key(&previous_config_id)
|
|| !configs_after_update.contains_key(&previous_config_id)
|
||||||
{
|
{
|
||||||
let mut updated = deck.clone();
|
let mut updated = deck.clone();
|
||||||
updated.normal_mut()?.config_id = selected_config.id.0;
|
updated.normal_mut()?.config_id = selected_config.id.0;
|
||||||
update_deck_limits(updated.normal_mut()?, &req.limits, today);
|
update_deck_limits(updated.normal_mut()?, &req.limits, today);
|
||||||
self.update_deck_inner(&mut updated, deck, usn)?;
|
self.update_deck_inner(&mut updated, deck, usn)?;
|
||||||
selected_config.id
|
(selected_config.id, updated.normal()?.desired_retention)
|
||||||
} else {
|
} else {
|
||||||
previous_config_id
|
(previous_config_id, previous_deck_dr)
|
||||||
};
|
};
|
||||||
|
|
||||||
// if new order differs, deck needs re-sorting
|
// if new order differs, deck needs re-sorting
|
||||||
|
@ -254,11 +253,12 @@ impl Collection {
|
||||||
|
|
||||||
// if params differ, memory state needs to be recomputed
|
// if params differ, memory state needs to be recomputed
|
||||||
let current_params = current_config.map(|c| c.fsrs_params());
|
let current_params = current_config.map(|c| c.fsrs_params());
|
||||||
let current_retention = current_config.map(|c| c.inner.desired_retention);
|
let current_preset_dr = current_config.map(|c| c.inner.desired_retention);
|
||||||
|
let current_dr = current_deck_dr.or(current_preset_dr);
|
||||||
let current_easy_days = current_config.map(|c| &c.inner.easy_days_percentages);
|
let current_easy_days = current_config.map(|c| &c.inner.easy_days_percentages);
|
||||||
if fsrs_toggled
|
if fsrs_toggled
|
||||||
|| previous_params != current_params
|
|| previous_params != current_params
|
||||||
|| previous_retention != current_retention
|
|| previous_dr != current_dr
|
||||||
|| (req.fsrs_reschedule && previous_easy_days != current_easy_days)
|
|| (req.fsrs_reschedule && previous_easy_days != current_easy_days)
|
||||||
{
|
{
|
||||||
decks_needing_memory_recompute
|
decks_needing_memory_recompute
|
||||||
|
@ -266,7 +266,9 @@ impl Collection {
|
||||||
.or_default()
|
.or_default()
|
||||||
.push(deck_id);
|
.push(deck_id);
|
||||||
}
|
}
|
||||||
|
if let Some(desired_retention) = current_deck_dr {
|
||||||
|
deck_desired_retention.insert(deck_id, desired_retention);
|
||||||
|
}
|
||||||
self.adjust_remaining_steps_in_deck(deck_id, previous_config, current_config, usn)?;
|
self.adjust_remaining_steps_in_deck(deck_id, previous_config, current_config, usn)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -191,7 +191,7 @@ fn invalid_char_for_deck_component(c: char) -> bool {
|
||||||
c.is_ascii_control()
|
c.is_ascii_control()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn normalized_deck_name_component(comp: &str) -> Cow<str> {
|
fn normalized_deck_name_component(comp: &str) -> Cow<'_, str> {
|
||||||
let mut out = normalize_to_nfc(comp);
|
let mut out = normalize_to_nfc(comp);
|
||||||
if out.contains(invalid_char_for_deck_component) {
|
if out.contains(invalid_char_for_deck_component) {
|
||||||
out = out.replace(invalid_char_for_deck_component, "").into();
|
out = out.replace(invalid_char_for_deck_component, "").into();
|
||||||
|
|
|
@ -231,7 +231,10 @@ fn svg_getter(notetypes: &[Notetype]) -> impl Fn(NotetypeId) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
fn gather_notes(&mut self, search: impl TryIntoSearch) -> Result<(Vec<Note>, NoteTableGuard)> {
|
fn gather_notes(
|
||||||
|
&mut self,
|
||||||
|
search: impl TryIntoSearch,
|
||||||
|
) -> Result<(Vec<Note>, NoteTableGuard<'_>)> {
|
||||||
let guard = self.search_notes_into_table(search)?;
|
let guard = self.search_notes_into_table(search)?;
|
||||||
guard
|
guard
|
||||||
.col
|
.col
|
||||||
|
@ -240,7 +243,7 @@ impl Collection {
|
||||||
.map(|notes| (notes, guard))
|
.map(|notes| (notes, guard))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gather_cards(&mut self) -> Result<(Vec<Card>, CardTableGuard)> {
|
fn gather_cards(&mut self) -> Result<(Vec<Card>, CardTableGuard<'_>)> {
|
||||||
let guard = self.search_cards_of_notes_into_table()?;
|
let guard = self.search_cards_of_notes_into_table()?;
|
||||||
guard
|
guard
|
||||||
.col
|
.col
|
||||||
|
|
|
@ -664,7 +664,7 @@ mod test {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
fn import(self, col: &mut Collection) -> NoteContext {
|
fn import(self, col: &mut Collection) -> NoteContext<'_> {
|
||||||
let mut progress_handler = col.new_progress_handler();
|
let mut progress_handler = col.new_progress_handler();
|
||||||
let media_map = Box::leak(Box::new(self.media_map));
|
let media_map = Box::leak(Box::new(self.media_map));
|
||||||
let mut ctx = NoteContext::new(
|
let mut ctx = NoteContext::new(
|
||||||
|
|
|
@ -154,7 +154,7 @@ pub(super) fn extract_media_entries(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn safe_normalized_file_name(name: &str) -> Result<Cow<str>> {
|
pub(super) fn safe_normalized_file_name(name: &str) -> Result<Cow<'_, str>> {
|
||||||
if !filename_is_safe(name) {
|
if !filename_is_safe(name) {
|
||||||
Err(AnkiError::ImportError {
|
Err(AnkiError::ImportError {
|
||||||
source: ImportError::Corrupt,
|
source: ImportError::Corrupt,
|
||||||
|
|
|
@ -147,7 +147,7 @@ fn rendered_nodes_to_str(nodes: &[RenderedNode]) -> String {
|
||||||
.join("")
|
.join("")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn field_to_record_field(field: &str, with_html: bool) -> Cow<str> {
|
fn field_to_record_field(field: &str, with_html: bool) -> Cow<'_, str> {
|
||||||
let mut text = strip_redundant_sections(field);
|
let mut text = strip_redundant_sections(field);
|
||||||
if !with_html {
|
if !with_html {
|
||||||
text = text.map_cow(|t| html_to_text_line(t, false));
|
text = text.map_cow(|t| html_to_text_line(t, false));
|
||||||
|
@ -155,7 +155,7 @@ fn field_to_record_field(field: &str, with_html: bool) -> Cow<str> {
|
||||||
text
|
text
|
||||||
}
|
}
|
||||||
|
|
||||||
fn strip_redundant_sections(text: &str) -> Cow<str> {
|
fn strip_redundant_sections(text: &str) -> Cow<'_, str> {
|
||||||
static RE: LazyLock<Regex> = LazyLock::new(|| {
|
static RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(
|
Regex::new(
|
||||||
r"(?isx)
|
r"(?isx)
|
||||||
|
@ -169,7 +169,7 @@ fn strip_redundant_sections(text: &str) -> Cow<str> {
|
||||||
RE.replace_all(text.as_ref(), "")
|
RE.replace_all(text.as_ref(), "")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn strip_answer_side_question(text: &str) -> Cow<str> {
|
fn strip_answer_side_question(text: &str) -> Cow<'_, str> {
|
||||||
static RE: LazyLock<Regex> =
|
static RE: LazyLock<Regex> =
|
||||||
LazyLock::new(|| Regex::new(r"(?is)^.*<hr id=answer>\n*").unwrap());
|
LazyLock::new(|| Regex::new(r"(?is)^.*<hr id=answer>\n*").unwrap());
|
||||||
RE.replace_all(text.as_ref(), "")
|
RE.replace_all(text.as_ref(), "")
|
||||||
|
@ -251,7 +251,7 @@ impl NoteContext {
|
||||||
.chain(self.tags(note))
|
.chain(self.tags(note))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn notetype_name(&self, note: &Note) -> Option<Cow<[u8]>> {
|
fn notetype_name(&self, note: &Note) -> Option<Cow<'_, [u8]>> {
|
||||||
self.with_notetype.then(|| {
|
self.with_notetype.then(|| {
|
||||||
self.notetypes
|
self.notetypes
|
||||||
.get(¬e.notetype_id)
|
.get(¬e.notetype_id)
|
||||||
|
@ -259,7 +259,7 @@ impl NoteContext {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn deck_name(&self, note: &Note) -> Option<Cow<[u8]>> {
|
fn deck_name(&self, note: &Note) -> Option<Cow<'_, [u8]>> {
|
||||||
self.with_deck.then(|| {
|
self.with_deck.then(|| {
|
||||||
self.deck_ids
|
self.deck_ids
|
||||||
.get(¬e.id)
|
.get(¬e.id)
|
||||||
|
@ -268,7 +268,7 @@ impl NoteContext {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tags(&self, note: &Note) -> Option<Cow<[u8]>> {
|
fn tags(&self, note: &Note) -> Option<Cow<'_, [u8]>> {
|
||||||
self.with_tags
|
self.with_tags
|
||||||
.then(|| Cow::from(note.tags.join(" ").into_bytes()))
|
.then(|| Cow::from(note.tags.join(" ").into_bytes()))
|
||||||
}
|
}
|
||||||
|
|
|
@ -511,7 +511,7 @@ impl NoteContext<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Note {
|
impl Note {
|
||||||
fn first_field_stripped(&self) -> Cow<str> {
|
fn first_field_stripped(&self) -> Cow<'_, str> {
|
||||||
strip_html_preserving_media_filenames(&self.fields()[0])
|
strip_html_preserving_media_filenames(&self.fields()[0])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -623,7 +623,7 @@ impl ForeignNote {
|
||||||
.all(|(opt, field)| opt.as_ref().map(|s| s == field).unwrap_or(true))
|
.all(|(opt, field)| opt.as_ref().map(|s| s == field).unwrap_or(true))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn first_field_stripped(&self) -> Option<Cow<str>> {
|
fn first_field_stripped(&self) -> Option<Cow<'_, str>> {
|
||||||
self.fields
|
self.fields
|
||||||
.first()
|
.first()
|
||||||
.and_then(|s| s.as_ref())
|
.and_then(|s| s.as_ref())
|
||||||
|
|
|
@ -48,7 +48,7 @@ pub struct ExtractedLatex {
|
||||||
pub(crate) fn extract_latex_expanding_clozes(
|
pub(crate) fn extract_latex_expanding_clozes(
|
||||||
text: &str,
|
text: &str,
|
||||||
svg: bool,
|
svg: bool,
|
||||||
) -> (Cow<str>, Vec<ExtractedLatex>) {
|
) -> (Cow<'_, str>, Vec<ExtractedLatex>) {
|
||||||
if text.contains("{{c") {
|
if text.contains("{{c") {
|
||||||
let expanded = expand_clozes_to_reveal_latex(text);
|
let expanded = expand_clozes_to_reveal_latex(text);
|
||||||
let (text, extracts) = extract_latex(&expanded, svg);
|
let (text, extracts) = extract_latex(&expanded, svg);
|
||||||
|
@ -60,7 +60,7 @@ pub(crate) fn extract_latex_expanding_clozes(
|
||||||
|
|
||||||
/// Extract LaTeX from the provided text.
|
/// Extract LaTeX from the provided text.
|
||||||
/// Expects cloze deletions to already be expanded.
|
/// Expects cloze deletions to already be expanded.
|
||||||
pub fn extract_latex(text: &str, svg: bool) -> (Cow<str>, Vec<ExtractedLatex>) {
|
pub fn extract_latex(text: &str, svg: bool) -> (Cow<'_, str>, Vec<ExtractedLatex>) {
|
||||||
let mut extracted = vec![];
|
let mut extracted = vec![];
|
||||||
|
|
||||||
let new_text = LATEX.replace_all(text, |caps: &Captures| {
|
let new_text = LATEX.replace_all(text, |caps: &Captures| {
|
||||||
|
@ -84,7 +84,7 @@ pub fn extract_latex(text: &str, svg: bool) -> (Cow<str>, Vec<ExtractedLatex>) {
|
||||||
(new_text, extracted)
|
(new_text, extracted)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn strip_html_for_latex(html: &str) -> Cow<str> {
|
fn strip_html_for_latex(html: &str) -> Cow<'_, str> {
|
||||||
let mut out: Cow<str> = html.into();
|
let mut out: Cow<str> = html.into();
|
||||||
if let Cow::Owned(o) = LATEX_NEWLINES.replace_all(html, "\n") {
|
if let Cow::Owned(o) = LATEX_NEWLINES.replace_all(html, "\n") {
|
||||||
out = o.into();
|
out = o.into();
|
||||||
|
|
|
@ -91,7 +91,7 @@ fn nonbreaking_space(char: char) -> bool {
|
||||||
/// - Any problem characters are removed.
|
/// - Any problem characters are removed.
|
||||||
/// - Windows device names like CON and PRN have '_' appended
|
/// - Windows device names like CON and PRN have '_' appended
|
||||||
/// - The filename is limited to 120 bytes.
|
/// - The filename is limited to 120 bytes.
|
||||||
pub(crate) fn normalize_filename(fname: &str) -> Cow<str> {
|
pub(crate) fn normalize_filename(fname: &str) -> Cow<'_, str> {
|
||||||
let mut output = Cow::Borrowed(fname);
|
let mut output = Cow::Borrowed(fname);
|
||||||
|
|
||||||
if !is_nfc(output.as_ref()) {
|
if !is_nfc(output.as_ref()) {
|
||||||
|
@ -102,7 +102,7 @@ pub(crate) fn normalize_filename(fname: &str) -> Cow<str> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See normalize_filename(). This function expects NFC-normalized input.
|
/// See normalize_filename(). This function expects NFC-normalized input.
|
||||||
pub(crate) fn normalize_nfc_filename(mut fname: Cow<str>) -> Cow<str> {
|
pub(crate) fn normalize_nfc_filename(mut fname: Cow<'_, str>) -> Cow<'_, str> {
|
||||||
if fname.contains(disallowed_char) {
|
if fname.contains(disallowed_char) {
|
||||||
fname = fname.replace(disallowed_char, "").into()
|
fname = fname.replace(disallowed_char, "").into()
|
||||||
}
|
}
|
||||||
|
@ -137,7 +137,7 @@ pub(crate) fn normalize_nfc_filename(mut fname: Cow<str>) -> Cow<str> {
|
||||||
/// but can be accessed as NFC. On these devices, if the filename
|
/// but can be accessed as NFC. On these devices, if the filename
|
||||||
/// is otherwise valid, the filename is returned as NFC.
|
/// is otherwise valid, the filename is returned as NFC.
|
||||||
#[allow(clippy::collapsible_else_if)]
|
#[allow(clippy::collapsible_else_if)]
|
||||||
pub(crate) fn filename_if_normalized(fname: &str) -> Option<Cow<str>> {
|
pub(crate) fn filename_if_normalized(fname: &str) -> Option<Cow<'_, str>> {
|
||||||
if cfg!(target_vendor = "apple") {
|
if cfg!(target_vendor = "apple") {
|
||||||
if !is_nfc(fname) {
|
if !is_nfc(fname) {
|
||||||
let as_nfc = fname.chars().nfc().collect::<String>();
|
let as_nfc = fname.chars().nfc().collect::<String>();
|
||||||
|
@ -208,7 +208,7 @@ pub(crate) fn add_hash_suffix_to_file_stem(fname: &str, hash: &Sha1Hash) -> Stri
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If filename is longer than max_bytes, truncate it.
|
/// If filename is longer than max_bytes, truncate it.
|
||||||
fn truncate_filename(fname: &str, max_bytes: usize) -> Cow<str> {
|
fn truncate_filename(fname: &str, max_bytes: usize) -> Cow<'_, str> {
|
||||||
if fname.len() <= max_bytes {
|
if fname.len() <= max_bytes {
|
||||||
return Cow::Borrowed(fname);
|
return Cow::Borrowed(fname);
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ pub struct RenderCardOutput {
|
||||||
|
|
||||||
impl RenderCardOutput {
|
impl RenderCardOutput {
|
||||||
/// The question text. This is only valid to call when partial_render=false.
|
/// The question text. This is only valid to call when partial_render=false.
|
||||||
pub fn question(&self) -> Cow<str> {
|
pub fn question(&self) -> Cow<'_, str> {
|
||||||
match self.qnodes.as_slice() {
|
match self.qnodes.as_slice() {
|
||||||
[RenderedNode::Text { text }] => text.into(),
|
[RenderedNode::Text { text }] => text.into(),
|
||||||
_ => "not fully rendered".into(),
|
_ => "not fully rendered".into(),
|
||||||
|
@ -33,7 +33,7 @@ impl RenderCardOutput {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The answer text. This is only valid to call when partial_render=false.
|
/// The answer text. This is only valid to call when partial_render=false.
|
||||||
pub fn answer(&self) -> Cow<str> {
|
pub fn answer(&self) -> Cow<'_, str> {
|
||||||
match self.anodes.as_slice() {
|
match self.anodes.as_slice() {
|
||||||
[RenderedNode::Text { text }] => text.into(),
|
[RenderedNode::Text { text }] => text.into(),
|
||||||
_ => "not fully rendered".into(),
|
_ => "not fully rendered".into(),
|
||||||
|
|
|
@ -97,7 +97,7 @@ fn create_review_priority_fn(
|
||||||
|
|
||||||
// Interval-based ordering
|
// Interval-based ordering
|
||||||
IntervalsAscending => wrap!(|c, _w| c.interval as i32),
|
IntervalsAscending => wrap!(|c, _w| c.interval as i32),
|
||||||
IntervalsDescending => wrap!(|c, _w| -(c.interval as i32)),
|
IntervalsDescending => wrap!(|c, _w| (c.interval as i32).saturating_neg()),
|
||||||
// Retrievability-based ordering
|
// Retrievability-based ordering
|
||||||
RetrievabilityAscending => {
|
RetrievabilityAscending => {
|
||||||
wrap!(move |c, w| (c.retrievability(w) * 1000.0) as i32)
|
wrap!(move |c, w| (c.retrievability(w) * 1000.0) as i32)
|
||||||
|
@ -142,10 +142,11 @@ impl Collection {
|
||||||
// calculate any missing memory state
|
// calculate any missing memory state
|
||||||
for c in &mut cards {
|
for c in &mut cards {
|
||||||
if is_included_card(c) && c.memory_state.is_none() {
|
if is_included_card(c) && c.memory_state.is_none() {
|
||||||
let original = c.clone();
|
let fsrs_data = self.compute_memory_state(c.id)?;
|
||||||
let new_state = self.compute_memory_state(c.id)?.state;
|
c.memory_state = fsrs_data.state.map(Into::into);
|
||||||
c.memory_state = new_state.map(Into::into);
|
c.desired_retention = Some(fsrs_data.desired_retention);
|
||||||
self.update_card_inner(c, original, self.usn()?)?;
|
c.decay = Some(fsrs_data.decay);
|
||||||
|
self.storage.update_card(c)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let days_elapsed = self.timing_today().unwrap().days_elapsed as i32;
|
let days_elapsed = self.timing_today().unwrap().days_elapsed as i32;
|
||||||
|
@ -293,7 +294,8 @@ impl Collection {
|
||||||
(
|
(
|
||||||
*result.memorized_cnt_per_day.last().unwrap_or(&0.),
|
*result.memorized_cnt_per_day.last().unwrap_or(&0.),
|
||||||
result.cost_per_day.iter().sum::<f32>(),
|
result.cost_per_day.iter().sum::<f32>(),
|
||||||
result.review_cnt_per_day.iter().sum::<usize>() as u32,
|
result.review_cnt_per_day.iter().sum::<usize>() as u32
|
||||||
|
+ result.learn_cnt_per_day.iter().sum::<usize>() as u32,
|
||||||
),
|
),
|
||||||
))
|
))
|
||||||
})
|
})
|
||||||
|
|
|
@ -61,28 +61,26 @@ impl QueueBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gather_new_cards(&mut self, col: &mut Collection) -> Result<()> {
|
fn gather_new_cards(&mut self, col: &mut Collection) -> Result<()> {
|
||||||
|
let salt = Self::knuth_salt(self.context.timing.days_elapsed);
|
||||||
match self.context.sort_options.new_gather_priority {
|
match self.context.sort_options.new_gather_priority {
|
||||||
NewCardGatherPriority::Deck => {
|
NewCardGatherPriority::Deck => {
|
||||||
self.gather_new_cards_by_deck(col, NewCardSorting::LowestPosition)
|
self.gather_new_cards_by_deck(col, NewCardSorting::LowestPosition)
|
||||||
}
|
}
|
||||||
NewCardGatherPriority::DeckThenRandomNotes => self.gather_new_cards_by_deck(
|
NewCardGatherPriority::DeckThenRandomNotes => {
|
||||||
col,
|
self.gather_new_cards_by_deck(col, NewCardSorting::RandomNotes(salt))
|
||||||
NewCardSorting::RandomNotes(self.context.timing.days_elapsed),
|
}
|
||||||
),
|
|
||||||
NewCardGatherPriority::LowestPosition => {
|
NewCardGatherPriority::LowestPosition => {
|
||||||
self.gather_new_cards_sorted(col, NewCardSorting::LowestPosition)
|
self.gather_new_cards_sorted(col, NewCardSorting::LowestPosition)
|
||||||
}
|
}
|
||||||
NewCardGatherPriority::HighestPosition => {
|
NewCardGatherPriority::HighestPosition => {
|
||||||
self.gather_new_cards_sorted(col, NewCardSorting::HighestPosition)
|
self.gather_new_cards_sorted(col, NewCardSorting::HighestPosition)
|
||||||
}
|
}
|
||||||
NewCardGatherPriority::RandomNotes => self.gather_new_cards_sorted(
|
NewCardGatherPriority::RandomNotes => {
|
||||||
col,
|
self.gather_new_cards_sorted(col, NewCardSorting::RandomNotes(salt))
|
||||||
NewCardSorting::RandomNotes(self.context.timing.days_elapsed),
|
}
|
||||||
),
|
NewCardGatherPriority::RandomCards => {
|
||||||
NewCardGatherPriority::RandomCards => self.gather_new_cards_sorted(
|
self.gather_new_cards_sorted(col, NewCardSorting::RandomCards(salt))
|
||||||
col,
|
}
|
||||||
NewCardSorting::RandomCards(self.context.timing.days_elapsed),
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -169,4 +167,10 @@ impl QueueBuilder {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Generates a salt for use with fnvhash. Useful to increase randomness
|
||||||
|
// when the base salt is a small integer.
|
||||||
|
fn knuth_salt(base_salt: u32) -> u32 {
|
||||||
|
base_salt.wrapping_mul(2654435761)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -174,7 +174,7 @@ impl LoadBalancer {
|
||||||
&self,
|
&self,
|
||||||
note_id: Option<NoteId>,
|
note_id: Option<NoteId>,
|
||||||
deckconfig_id: DeckConfigId,
|
deckconfig_id: DeckConfigId,
|
||||||
) -> LoadBalancerContext {
|
) -> LoadBalancerContext<'_> {
|
||||||
LoadBalancerContext {
|
LoadBalancerContext {
|
||||||
load_balancer: self,
|
load_balancer: self,
|
||||||
note_id,
|
note_id,
|
||||||
|
|
|
@ -226,7 +226,7 @@ impl Collection {
|
||||||
&mut self,
|
&mut self,
|
||||||
search: impl TryIntoSearch,
|
search: impl TryIntoSearch,
|
||||||
mode: SortMode,
|
mode: SortMode,
|
||||||
) -> Result<CardTableGuard> {
|
) -> Result<CardTableGuard<'_>> {
|
||||||
let top_node = search.try_into_search()?;
|
let top_node = search.try_into_search()?;
|
||||||
let writer = SqlWriter::new(self, ReturnItemType::Cards);
|
let writer = SqlWriter::new(self, ReturnItemType::Cards);
|
||||||
let want_order = mode != SortMode::NoOrder;
|
let want_order = mode != SortMode::NoOrder;
|
||||||
|
@ -299,7 +299,7 @@ impl Collection {
|
||||||
pub(crate) fn search_notes_into_table(
|
pub(crate) fn search_notes_into_table(
|
||||||
&mut self,
|
&mut self,
|
||||||
search: impl TryIntoSearch,
|
search: impl TryIntoSearch,
|
||||||
) -> Result<NoteTableGuard> {
|
) -> Result<NoteTableGuard<'_>> {
|
||||||
let top_node = search.try_into_search()?;
|
let top_node = search.try_into_search()?;
|
||||||
let writer = SqlWriter::new(self, ReturnItemType::Notes);
|
let writer = SqlWriter::new(self, ReturnItemType::Notes);
|
||||||
let mode = SortMode::NoOrder;
|
let mode = SortMode::NoOrder;
|
||||||
|
@ -320,7 +320,7 @@ impl Collection {
|
||||||
|
|
||||||
/// Place the ids of cards with notes in 'search_nids' into 'search_cids'.
|
/// Place the ids of cards with notes in 'search_nids' into 'search_cids'.
|
||||||
/// Returns number of added cards.
|
/// Returns number of added cards.
|
||||||
pub(crate) fn search_cards_of_notes_into_table(&mut self) -> Result<CardTableGuard> {
|
pub(crate) fn search_cards_of_notes_into_table(&mut self) -> Result<CardTableGuard<'_>> {
|
||||||
self.storage.setup_searched_cards_table()?;
|
self.storage.setup_searched_cards_table()?;
|
||||||
let cards = self.storage.search_cards_of_notes_into_table()?;
|
let cards = self.storage.search_cards_of_notes_into_table()?;
|
||||||
Ok(CardTableGuard { cards, col: self })
|
Ok(CardTableGuard { cards, col: self })
|
||||||
|
|
|
@ -158,7 +158,7 @@ pub fn parse(input: &str) -> Result<Vec<Node>> {
|
||||||
|
|
||||||
/// Zero or more nodes inside brackets, eg 'one OR two -three'.
|
/// Zero or more nodes inside brackets, eg 'one OR two -three'.
|
||||||
/// Empty vec must be handled by caller.
|
/// Empty vec must be handled by caller.
|
||||||
fn group_inner(input: &str) -> IResult<Vec<Node>> {
|
fn group_inner(input: &str) -> IResult<'_, Vec<Node>> {
|
||||||
let mut remaining = input;
|
let mut remaining = input;
|
||||||
let mut nodes = vec![];
|
let mut nodes = vec![];
|
||||||
|
|
||||||
|
@ -203,16 +203,16 @@ fn group_inner(input: &str) -> IResult<Vec<Node>> {
|
||||||
Ok((remaining, nodes))
|
Ok((remaining, nodes))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn whitespace0(s: &str) -> IResult<Vec<char>> {
|
fn whitespace0(s: &str) -> IResult<'_, Vec<char>> {
|
||||||
many0(one_of(" \u{3000}")).parse(s)
|
many0(one_of(" \u{3000}")).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Optional leading space, then a (negated) group or text
|
/// Optional leading space, then a (negated) group or text
|
||||||
fn node(s: &str) -> IResult<Node> {
|
fn node(s: &str) -> IResult<'_, Node> {
|
||||||
preceded(whitespace0, alt((negated_node, group, text))).parse(s)
|
preceded(whitespace0, alt((negated_node, group, text))).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn negated_node(s: &str) -> IResult<Node> {
|
fn negated_node(s: &str) -> IResult<'_, Node> {
|
||||||
map(preceded(char('-'), alt((group, text))), |node| {
|
map(preceded(char('-'), alt((group, text))), |node| {
|
||||||
Node::Not(Box::new(node))
|
Node::Not(Box::new(node))
|
||||||
})
|
})
|
||||||
|
@ -220,7 +220,7 @@ fn negated_node(s: &str) -> IResult<Node> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// One or more nodes surrounded by brackets, eg (one OR two)
|
/// One or more nodes surrounded by brackets, eg (one OR two)
|
||||||
fn group(s: &str) -> IResult<Node> {
|
fn group(s: &str) -> IResult<'_, Node> {
|
||||||
let (opened, _) = char('(')(s)?;
|
let (opened, _) = char('(')(s)?;
|
||||||
let (tail, inner) = group_inner(opened)?;
|
let (tail, inner) = group_inner(opened)?;
|
||||||
if let Some(remaining) = tail.strip_prefix(')') {
|
if let Some(remaining) = tail.strip_prefix(')') {
|
||||||
|
@ -235,18 +235,18 @@ fn group(s: &str) -> IResult<Node> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Either quoted or unquoted text
|
/// Either quoted or unquoted text
|
||||||
fn text(s: &str) -> IResult<Node> {
|
fn text(s: &str) -> IResult<'_, Node> {
|
||||||
alt((quoted_term, partially_quoted_term, unquoted_term)).parse(s)
|
alt((quoted_term, partially_quoted_term, unquoted_term)).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Quoted text, including the outer double quotes.
|
/// Quoted text, including the outer double quotes.
|
||||||
fn quoted_term(s: &str) -> IResult<Node> {
|
fn quoted_term(s: &str) -> IResult<'_, Node> {
|
||||||
let (remaining, term) = quoted_term_str(s)?;
|
let (remaining, term) = quoted_term_str(s)?;
|
||||||
Ok((remaining, Node::Search(search_node_for_text(term)?)))
|
Ok((remaining, Node::Search(search_node_for_text(term)?)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg deck:"foo bar" - quotes must come after the :
|
/// eg deck:"foo bar" - quotes must come after the :
|
||||||
fn partially_quoted_term(s: &str) -> IResult<Node> {
|
fn partially_quoted_term(s: &str) -> IResult<'_, Node> {
|
||||||
let (remaining, (key, val)) = separated_pair(
|
let (remaining, (key, val)) = separated_pair(
|
||||||
escaped(is_not("\"(): \u{3000}\\"), '\\', none_of(" \u{3000}")),
|
escaped(is_not("\"(): \u{3000}\\"), '\\', none_of(" \u{3000}")),
|
||||||
char(':'),
|
char(':'),
|
||||||
|
@ -260,7 +260,7 @@ fn partially_quoted_term(s: &str) -> IResult<Node> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Unquoted text, terminated by whitespace or unescaped ", ( or )
|
/// Unquoted text, terminated by whitespace or unescaped ", ( or )
|
||||||
fn unquoted_term(s: &str) -> IResult<Node> {
|
fn unquoted_term(s: &str) -> IResult<'_, Node> {
|
||||||
match escaped(is_not("\"() \u{3000}\\"), '\\', none_of(" \u{3000}"))(s) {
|
match escaped(is_not("\"() \u{3000}\\"), '\\', none_of(" \u{3000}"))(s) {
|
||||||
Ok((tail, term)) => {
|
Ok((tail, term)) => {
|
||||||
if term.is_empty() {
|
if term.is_empty() {
|
||||||
|
@ -297,7 +297,7 @@ fn unquoted_term(s: &str) -> IResult<Node> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Non-empty string delimited by unescaped double quotes.
|
/// Non-empty string delimited by unescaped double quotes.
|
||||||
fn quoted_term_str(s: &str) -> IResult<&str> {
|
fn quoted_term_str(s: &str) -> IResult<'_, &str> {
|
||||||
let (opened, _) = char('"')(s)?;
|
let (opened, _) = char('"')(s)?;
|
||||||
if let Ok((tail, inner)) =
|
if let Ok((tail, inner)) =
|
||||||
escaped::<_, ParseError, _, _>(is_not(r#""\"#), '\\', anychar).parse(opened)
|
escaped::<_, ParseError, _, _>(is_not(r#""\"#), '\\', anychar).parse(opened)
|
||||||
|
@ -321,7 +321,7 @@ fn quoted_term_str(s: &str) -> IResult<&str> {
|
||||||
|
|
||||||
/// Determine if text is a qualified search, and handle escaped chars.
|
/// Determine if text is a qualified search, and handle escaped chars.
|
||||||
/// Expect well-formed input: unempty and no trailing \.
|
/// Expect well-formed input: unempty and no trailing \.
|
||||||
fn search_node_for_text(s: &str) -> ParseResult<SearchNode> {
|
fn search_node_for_text(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
// leading : is only possible error for well-formed input
|
// leading : is only possible error for well-formed input
|
||||||
let (tail, head) = verify(escaped(is_not(r":\"), '\\', anychar), |t: &str| {
|
let (tail, head) = verify(escaped(is_not(r":\"), '\\', anychar), |t: &str| {
|
||||||
!t.is_empty()
|
!t.is_empty()
|
||||||
|
@ -369,7 +369,7 @@ fn search_node_for_text_with_argument<'a>(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_tag(s: &str) -> ParseResult<SearchNode> {
|
fn parse_tag(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
Ok(if let Some(re) = s.strip_prefix("re:") {
|
Ok(if let Some(re) = s.strip_prefix("re:") {
|
||||||
SearchNode::Tag {
|
SearchNode::Tag {
|
||||||
tag: unescape_quotes(re),
|
tag: unescape_quotes(re),
|
||||||
|
@ -383,7 +383,7 @@ fn parse_tag(s: &str) -> ParseResult<SearchNode> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_template(s: &str) -> ParseResult<SearchNode> {
|
fn parse_template(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
Ok(SearchNode::CardTemplate(match s.parse::<u16>() {
|
Ok(SearchNode::CardTemplate(match s.parse::<u16>() {
|
||||||
Ok(n) => TemplateKind::Ordinal(n.max(1) - 1),
|
Ok(n) => TemplateKind::Ordinal(n.max(1) - 1),
|
||||||
Err(_) => TemplateKind::Name(unescape(s)?),
|
Err(_) => TemplateKind::Name(unescape(s)?),
|
||||||
|
@ -391,7 +391,7 @@ fn parse_template(s: &str) -> ParseResult<SearchNode> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// flag:0-7
|
/// flag:0-7
|
||||||
fn parse_flag(s: &str) -> ParseResult<SearchNode> {
|
fn parse_flag(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
if let Ok(flag) = s.parse::<u8>() {
|
if let Ok(flag) = s.parse::<u8>() {
|
||||||
if flag > 7 {
|
if flag > 7 {
|
||||||
Err(parse_failure(s, FailKind::InvalidFlag))
|
Err(parse_failure(s, FailKind::InvalidFlag))
|
||||||
|
@ -404,7 +404,7 @@ fn parse_flag(s: &str) -> ParseResult<SearchNode> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg resched:3
|
/// eg resched:3
|
||||||
fn parse_resched(s: &str) -> ParseResult<SearchNode> {
|
fn parse_resched(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
parse_u32(s, "resched:").map(|days| SearchNode::Rated {
|
parse_u32(s, "resched:").map(|days| SearchNode::Rated {
|
||||||
days,
|
days,
|
||||||
ease: RatingKind::ManualReschedule,
|
ease: RatingKind::ManualReschedule,
|
||||||
|
@ -412,7 +412,7 @@ fn parse_resched(s: &str) -> ParseResult<SearchNode> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg prop:ivl>3, prop:ease!=2.5
|
/// eg prop:ivl>3, prop:ease!=2.5
|
||||||
fn parse_prop(prop_clause: &str) -> ParseResult<SearchNode> {
|
fn parse_prop(prop_clause: &str) -> ParseResult<'_, SearchNode> {
|
||||||
let (tail, prop) = alt((
|
let (tail, prop) = alt((
|
||||||
tag("ivl"),
|
tag("ivl"),
|
||||||
tag("due"),
|
tag("due"),
|
||||||
|
@ -580,23 +580,23 @@ fn parse_prop_rated<'a>(num: &str, context: &'a str) -> ParseResult<'a, Property
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg added:1
|
/// eg added:1
|
||||||
fn parse_added(s: &str) -> ParseResult<SearchNode> {
|
fn parse_added(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
parse_u32(s, "added:").map(|n| SearchNode::AddedInDays(n.max(1)))
|
parse_u32(s, "added:").map(|n| SearchNode::AddedInDays(n.max(1)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg edited:1
|
/// eg edited:1
|
||||||
fn parse_edited(s: &str) -> ParseResult<SearchNode> {
|
fn parse_edited(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
parse_u32(s, "edited:").map(|n| SearchNode::EditedInDays(n.max(1)))
|
parse_u32(s, "edited:").map(|n| SearchNode::EditedInDays(n.max(1)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg introduced:1
|
/// eg introduced:1
|
||||||
fn parse_introduced(s: &str) -> ParseResult<SearchNode> {
|
fn parse_introduced(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
parse_u32(s, "introduced:").map(|n| SearchNode::IntroducedInDays(n.max(1)))
|
parse_u32(s, "introduced:").map(|n| SearchNode::IntroducedInDays(n.max(1)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg rated:3 or rated:10:2
|
/// eg rated:3 or rated:10:2
|
||||||
/// second arg must be between 1-4
|
/// second arg must be between 1-4
|
||||||
fn parse_rated(s: &str) -> ParseResult<SearchNode> {
|
fn parse_rated(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
let mut it = s.splitn(2, ':');
|
let mut it = s.splitn(2, ':');
|
||||||
let days = parse_u32(it.next().unwrap(), "rated:")?.max(1);
|
let days = parse_u32(it.next().unwrap(), "rated:")?.max(1);
|
||||||
let button = parse_answer_button(it.next(), s)?;
|
let button = parse_answer_button(it.next(), s)?;
|
||||||
|
@ -604,7 +604,7 @@ fn parse_rated(s: &str) -> ParseResult<SearchNode> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg is:due
|
/// eg is:due
|
||||||
fn parse_state(s: &str) -> ParseResult<SearchNode> {
|
fn parse_state(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
use StateKind::*;
|
use StateKind::*;
|
||||||
Ok(SearchNode::State(match s {
|
Ok(SearchNode::State(match s {
|
||||||
"new" => New,
|
"new" => New,
|
||||||
|
@ -624,7 +624,7 @@ fn parse_state(s: &str) -> ParseResult<SearchNode> {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_mid(s: &str) -> ParseResult<SearchNode> {
|
fn parse_mid(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
parse_i64(s, "mid:").map(|n| SearchNode::NotetypeId(n.into()))
|
parse_i64(s, "mid:").map(|n| SearchNode::NotetypeId(n.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -646,7 +646,7 @@ fn check_id_list<'a>(s: &'a str, context: &str) -> ParseResult<'a, &'a str> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg dupe:1231,hello
|
/// eg dupe:1231,hello
|
||||||
fn parse_dupe(s: &str) -> ParseResult<SearchNode> {
|
fn parse_dupe(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
let mut it = s.splitn(2, ',');
|
let mut it = s.splitn(2, ',');
|
||||||
let ntid = parse_i64(it.next().unwrap(), s)?;
|
let ntid = parse_i64(it.next().unwrap(), s)?;
|
||||||
if let Some(text) = it.next() {
|
if let Some(text) = it.next() {
|
||||||
|
@ -700,7 +700,7 @@ fn unescape_quotes_and_backslashes(s: &str) -> String {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Unescape chars with special meaning to the parser.
|
/// Unescape chars with special meaning to the parser.
|
||||||
fn unescape(txt: &str) -> ParseResult<String> {
|
fn unescape(txt: &str) -> ParseResult<'_, String> {
|
||||||
if let Some(seq) = invalid_escape_sequence(txt) {
|
if let Some(seq) = invalid_escape_sequence(txt) {
|
||||||
Err(parse_failure(
|
Err(parse_failure(
|
||||||
txt,
|
txt,
|
||||||
|
|
|
@ -403,7 +403,9 @@ impl super::SqliteStorage {
|
||||||
let last_revlog_info = get_last_revlog_info(&revlog);
|
let last_revlog_info = get_last_revlog_info(&revlog);
|
||||||
for (card_id, last_revlog_info) in last_revlog_info {
|
for (card_id, last_revlog_info) in last_revlog_info {
|
||||||
let card = self.get_card(card_id)?;
|
let card = self.get_card(card_id)?;
|
||||||
if let Some(mut card) = card {
|
if last_revlog_info.last_reviewed_at.is_none() {
|
||||||
|
continue;
|
||||||
|
} else if let Some(mut card) = card {
|
||||||
if card.ctype != CardType::New && card.last_review_time.is_none() {
|
if card.ctype != CardType::New && card.last_review_time.is_none() {
|
||||||
card.last_review_time = last_revlog_info.last_reviewed_at;
|
card.last_review_time = last_revlog_info.last_reviewed_at;
|
||||||
self.update_card(&card)?;
|
self.update_card(&card)?;
|
||||||
|
|
|
@ -155,7 +155,7 @@ fn invalid_char_for_tag(c: char) -> bool {
|
||||||
c.is_ascii_control() || is_tag_separator(c)
|
c.is_ascii_control() || is_tag_separator(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn normalized_tag_name_component(comp: &str) -> Cow<str> {
|
fn normalized_tag_name_component(comp: &str) -> Cow<'_, str> {
|
||||||
let mut out = normalize_to_nfc(comp);
|
let mut out = normalize_to_nfc(comp);
|
||||||
if out.contains(invalid_char_for_tag) {
|
if out.contains(invalid_char_for_tag) {
|
||||||
out = out.replace(invalid_char_for_tag, "").into();
|
out = out.replace(invalid_char_for_tag, "").into();
|
||||||
|
@ -170,7 +170,7 @@ fn normalized_tag_name_component(comp: &str) -> Cow<str> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn normalize_tag_name(name: &str) -> Result<Cow<str>> {
|
pub(super) fn normalize_tag_name(name: &str) -> Result<Cow<'_, str>> {
|
||||||
let normalized_name: Cow<str> = if name
|
let normalized_name: Cow<str> = if name
|
||||||
.split("::")
|
.split("::")
|
||||||
.any(|comp| matches!(normalized_tag_name_component(comp), Cow::Owned(_)))
|
.any(|comp| matches!(normalized_tag_name_component(comp), Cow::Owned(_)))
|
||||||
|
|
|
@ -121,7 +121,7 @@ pub enum Token<'a> {
|
||||||
CloseConditional(&'a str),
|
CloseConditional(&'a str),
|
||||||
}
|
}
|
||||||
|
|
||||||
fn comment_token(s: &str) -> nom::IResult<&str, Token> {
|
fn comment_token(s: &str) -> nom::IResult<&str, Token<'_>> {
|
||||||
map(
|
map(
|
||||||
delimited(
|
delimited(
|
||||||
tag(COMMENT_START),
|
tag(COMMENT_START),
|
||||||
|
@ -151,7 +151,7 @@ fn tokens(mut template: &str) -> impl Iterator<Item = TemplateResult<Token<'_>>>
|
||||||
}
|
}
|
||||||
|
|
||||||
/// classify handle based on leading character
|
/// classify handle based on leading character
|
||||||
fn classify_handle(s: &str) -> Token {
|
fn classify_handle(s: &str) -> Token<'_> {
|
||||||
let start = s.trim_start_matches('{').trim();
|
let start = s.trim_start_matches('{').trim();
|
||||||
if start.len() < 2 {
|
if start.len() < 2 {
|
||||||
return Token::Replacement(start);
|
return Token::Replacement(start);
|
||||||
|
|
|
@ -117,7 +117,7 @@ fn captured_sound(caps: &Captures) -> bool {
|
||||||
caps.get(2).unwrap().as_str().starts_with("sound:")
|
caps.get(2).unwrap().as_str().starts_with("sound:")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn kana_filter(text: &str) -> Cow<str> {
|
fn kana_filter(text: &str) -> Cow<'_, str> {
|
||||||
FURIGANA
|
FURIGANA
|
||||||
.replace_all(&text.replace(" ", " "), |caps: &Captures| {
|
.replace_all(&text.replace(" ", " "), |caps: &Captures| {
|
||||||
if captured_sound(caps) {
|
if captured_sound(caps) {
|
||||||
|
@ -130,7 +130,7 @@ fn kana_filter(text: &str) -> Cow<str> {
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn kanji_filter(text: &str) -> Cow<str> {
|
fn kanji_filter(text: &str) -> Cow<'_, str> {
|
||||||
FURIGANA
|
FURIGANA
|
||||||
.replace_all(&text.replace(" ", " "), |caps: &Captures| {
|
.replace_all(&text.replace(" ", " "), |caps: &Captures| {
|
||||||
if captured_sound(caps) {
|
if captured_sound(caps) {
|
||||||
|
@ -143,7 +143,7 @@ fn kanji_filter(text: &str) -> Cow<str> {
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn furigana_filter(text: &str) -> Cow<str> {
|
fn furigana_filter(text: &str) -> Cow<'_, str> {
|
||||||
FURIGANA
|
FURIGANA
|
||||||
.replace_all(&text.replace(" ", " "), |caps: &Captures| {
|
.replace_all(&text.replace(" ", " "), |caps: &Captures| {
|
||||||
if captured_sound(caps) {
|
if captured_sound(caps) {
|
||||||
|
|
|
@ -215,8 +215,8 @@ pub fn is_html(text: impl AsRef<str>) -> bool {
|
||||||
HTML.is_match(text.as_ref())
|
HTML.is_match(text.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn html_to_text_line(html: &str, preserve_media_filenames: bool) -> Cow<str> {
|
pub fn html_to_text_line(html: &str, preserve_media_filenames: bool) -> Cow<'_, str> {
|
||||||
let (html_stripper, sound_rep): (fn(&str) -> Cow<str>, _) = if preserve_media_filenames {
|
let (html_stripper, sound_rep): (fn(&str) -> Cow<'_, str>, _) = if preserve_media_filenames {
|
||||||
(strip_html_preserving_media_filenames, "$1")
|
(strip_html_preserving_media_filenames, "$1")
|
||||||
} else {
|
} else {
|
||||||
(strip_html, "")
|
(strip_html, "")
|
||||||
|
@ -229,15 +229,15 @@ pub fn html_to_text_line(html: &str, preserve_media_filenames: bool) -> Cow<str>
|
||||||
.trim()
|
.trim()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn strip_html(html: &str) -> Cow<str> {
|
pub fn strip_html(html: &str) -> Cow<'_, str> {
|
||||||
strip_html_preserving_entities(html).map_cow(decode_entities)
|
strip_html_preserving_entities(html).map_cow(decode_entities)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn strip_html_preserving_entities(html: &str) -> Cow<str> {
|
pub fn strip_html_preserving_entities(html: &str) -> Cow<'_, str> {
|
||||||
HTML.replace_all(html, "")
|
HTML.replace_all(html, "")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn decode_entities(html: &str) -> Cow<str> {
|
pub fn decode_entities(html: &str) -> Cow<'_, str> {
|
||||||
if html.contains('&') {
|
if html.contains('&') {
|
||||||
match htmlescape::decode_html(html) {
|
match htmlescape::decode_html(html) {
|
||||||
Ok(text) => text.replace('\u{a0}', " ").into(),
|
Ok(text) => text.replace('\u{a0}', " ").into(),
|
||||||
|
@ -249,7 +249,7 @@ pub fn decode_entities(html: &str) -> Cow<str> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn newlines_to_spaces(text: &str) -> Cow<str> {
|
pub(crate) fn newlines_to_spaces(text: &str) -> Cow<'_, str> {
|
||||||
if text.contains('\n') {
|
if text.contains('\n') {
|
||||||
text.replace('\n', " ").into()
|
text.replace('\n', " ").into()
|
||||||
} else {
|
} else {
|
||||||
|
@ -257,7 +257,7 @@ pub(crate) fn newlines_to_spaces(text: &str) -> Cow<str> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn strip_html_for_tts(html: &str) -> Cow<str> {
|
pub fn strip_html_for_tts(html: &str) -> Cow<'_, str> {
|
||||||
HTML_LINEBREAK_TAGS
|
HTML_LINEBREAK_TAGS
|
||||||
.replace_all(html, " ")
|
.replace_all(html, " ")
|
||||||
.map_cow(strip_html)
|
.map_cow(strip_html)
|
||||||
|
@ -282,7 +282,7 @@ pub(crate) struct MediaRef<'a> {
|
||||||
pub fname_decoded: Cow<'a, str>,
|
pub fname_decoded: Cow<'a, str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn extract_media_refs(text: &str) -> Vec<MediaRef> {
|
pub(crate) fn extract_media_refs(text: &str) -> Vec<MediaRef<'_>> {
|
||||||
let mut out = vec![];
|
let mut out = vec![];
|
||||||
|
|
||||||
for caps in HTML_MEDIA_TAGS.captures_iter(text) {
|
for caps in HTML_MEDIA_TAGS.captures_iter(text) {
|
||||||
|
@ -359,11 +359,11 @@ pub(crate) fn extract_underscored_references(text: &str) -> Vec<&str> {
|
||||||
/// Returns the first matching group as a str. This is intended for regexes
|
/// Returns the first matching group as a str. This is intended for regexes
|
||||||
/// where exactly one group matches, and will panic for matches without matching
|
/// where exactly one group matches, and will panic for matches without matching
|
||||||
/// groups.
|
/// groups.
|
||||||
fn extract_match(caps: Captures) -> &str {
|
fn extract_match(caps: Captures<'_>) -> &str {
|
||||||
caps.iter().skip(1).find_map(|g| g).unwrap().as_str()
|
caps.iter().skip(1).find_map(|g| g).unwrap().as_str()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn strip_html_preserving_media_filenames(html: &str) -> Cow<str> {
|
pub fn strip_html_preserving_media_filenames(html: &str) -> Cow<'_, str> {
|
||||||
HTML_MEDIA_TAGS
|
HTML_MEDIA_TAGS
|
||||||
.replace_all(html, r" ${1}${2}${3} ")
|
.replace_all(html, r" ${1}${2}${3} ")
|
||||||
.map_cow(strip_html)
|
.map_cow(strip_html)
|
||||||
|
@ -385,7 +385,7 @@ pub(crate) fn sanitize_html_no_images(html: &str) -> String {
|
||||||
.to_string()
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn normalize_to_nfc(s: &str) -> Cow<str> {
|
pub(crate) fn normalize_to_nfc(s: &str) -> Cow<'_, str> {
|
||||||
match is_nfc(s) {
|
match is_nfc(s) {
|
||||||
false => s.chars().nfc().collect::<String>().into(),
|
false => s.chars().nfc().collect::<String>().into(),
|
||||||
true => s.into(),
|
true => s.into(),
|
||||||
|
@ -429,7 +429,7 @@ static EXTRA_NO_COMBINING_REPLACEMENTS: phf::Map<char, &str> = phf::phf_map! {
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Convert provided string to NFKD form and strip combining characters.
|
/// Convert provided string to NFKD form and strip combining characters.
|
||||||
pub(crate) fn without_combining(s: &str) -> Cow<str> {
|
pub(crate) fn without_combining(s: &str) -> Cow<'_, str> {
|
||||||
// if the string is already normalized
|
// if the string is already normalized
|
||||||
if matches!(is_nfkd_quick(s.chars()), IsNormalized::Yes) {
|
if matches!(is_nfkd_quick(s.chars()), IsNormalized::Yes) {
|
||||||
// and no combining characters found, return unchanged
|
// and no combining characters found, return unchanged
|
||||||
|
@ -472,7 +472,7 @@ pub(crate) fn is_glob(txt: &str) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert to a RegEx respecting Anki wildcards.
|
/// Convert to a RegEx respecting Anki wildcards.
|
||||||
pub(crate) fn to_re(txt: &str) -> Cow<str> {
|
pub(crate) fn to_re(txt: &str) -> Cow<'_, str> {
|
||||||
to_custom_re(txt, ".")
|
to_custom_re(txt, ".")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -492,7 +492,7 @@ pub(crate) fn to_custom_re<'a>(txt: &'a str, wildcard: &str) -> Cow<'a, str> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert to SQL respecting Anki wildcards.
|
/// Convert to SQL respecting Anki wildcards.
|
||||||
pub(crate) fn to_sql(txt: &str) -> Cow<str> {
|
pub(crate) fn to_sql(txt: &str) -> Cow<'_, str> {
|
||||||
// escape sequences and unescaped special characters which need conversion
|
// escape sequences and unescaped special characters which need conversion
|
||||||
static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\\[\\*]|[*%]").unwrap());
|
static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\\[\\*]|[*%]").unwrap());
|
||||||
RE.replace_all(txt, |caps: &Captures| {
|
RE.replace_all(txt, |caps: &Captures| {
|
||||||
|
@ -508,7 +508,7 @@ pub(crate) fn to_sql(txt: &str) -> Cow<str> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Unescape everything.
|
/// Unescape everything.
|
||||||
pub(crate) fn to_text(txt: &str) -> Cow<str> {
|
pub(crate) fn to_text(txt: &str) -> Cow<'_, str> {
|
||||||
static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\\(.)").unwrap());
|
static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\\(.)").unwrap());
|
||||||
RE.replace_all(txt, "$1")
|
RE.replace_all(txt, "$1")
|
||||||
}
|
}
|
||||||
|
@ -561,14 +561,14 @@ const FRAGMENT_QUERY_UNION: &AsciiSet = &CONTROLS
|
||||||
.add(b'#');
|
.add(b'#');
|
||||||
|
|
||||||
/// IRI-encode unescaped local paths in HTML fragment.
|
/// IRI-encode unescaped local paths in HTML fragment.
|
||||||
pub(crate) fn encode_iri_paths(unescaped_html: &str) -> Cow<str> {
|
pub(crate) fn encode_iri_paths(unescaped_html: &str) -> Cow<'_, str> {
|
||||||
transform_html_paths(unescaped_html, |fname| {
|
transform_html_paths(unescaped_html, |fname| {
|
||||||
utf8_percent_encode(fname, FRAGMENT_QUERY_UNION).into()
|
utf8_percent_encode(fname, FRAGMENT_QUERY_UNION).into()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// URI-decode escaped local paths in HTML fragment.
|
/// URI-decode escaped local paths in HTML fragment.
|
||||||
pub(crate) fn decode_iri_paths(escaped_html: &str) -> Cow<str> {
|
pub(crate) fn decode_iri_paths(escaped_html: &str) -> Cow<'_, str> {
|
||||||
transform_html_paths(escaped_html, |fname| {
|
transform_html_paths(escaped_html, |fname| {
|
||||||
percent_decode_str(fname).decode_utf8_lossy()
|
percent_decode_str(fname).decode_utf8_lossy()
|
||||||
})
|
})
|
||||||
|
@ -577,9 +577,9 @@ pub(crate) fn decode_iri_paths(escaped_html: &str) -> Cow<str> {
|
||||||
/// Apply a transform to local filename references in tags like IMG.
|
/// Apply a transform to local filename references in tags like IMG.
|
||||||
/// Required at display time, as Anki unfortunately stores the references
|
/// Required at display time, as Anki unfortunately stores the references
|
||||||
/// in unencoded form in the database.
|
/// in unencoded form in the database.
|
||||||
fn transform_html_paths<F>(html: &str, transform: F) -> Cow<str>
|
fn transform_html_paths<F>(html: &str, transform: F) -> Cow<'_, str>
|
||||||
where
|
where
|
||||||
F: Fn(&str) -> Cow<str>,
|
F: Fn(&str) -> Cow<'_, str>,
|
||||||
{
|
{
|
||||||
HTML_MEDIA_TAGS.replace_all(html, |caps: &Captures| {
|
HTML_MEDIA_TAGS.replace_all(html, |caps: &Captures| {
|
||||||
let fname = caps
|
let fname = caps
|
||||||
|
|
|
@ -49,7 +49,7 @@ pub fn compare_answer(expected: &str, typed: &str, combining: bool) -> String {
|
||||||
trait DiffTrait {
|
trait DiffTrait {
|
||||||
fn get_typed(&self) -> &[char];
|
fn get_typed(&self) -> &[char];
|
||||||
fn get_expected(&self) -> &[char];
|
fn get_expected(&self) -> &[char];
|
||||||
fn get_expected_original(&self) -> Cow<str>;
|
fn get_expected_original(&self) -> Cow<'_, str>;
|
||||||
|
|
||||||
fn new(expected: &str, typed: &str) -> Self;
|
fn new(expected: &str, typed: &str) -> Self;
|
||||||
|
|
||||||
|
@ -136,7 +136,7 @@ fn render_tokens(tokens: &[DiffToken]) -> String {
|
||||||
|
|
||||||
/// Prefixes a leading mark character with a non-breaking space to prevent
|
/// Prefixes a leading mark character with a non-breaking space to prevent
|
||||||
/// it from joining the previous token.
|
/// it from joining the previous token.
|
||||||
fn isolate_leading_mark(text: &str) -> Cow<str> {
|
fn isolate_leading_mark(text: &str) -> Cow<'_, str> {
|
||||||
if text
|
if text
|
||||||
.chars()
|
.chars()
|
||||||
.next()
|
.next()
|
||||||
|
@ -161,7 +161,7 @@ impl DiffTrait for Diff {
|
||||||
fn get_expected(&self) -> &[char] {
|
fn get_expected(&self) -> &[char] {
|
||||||
&self.expected
|
&self.expected
|
||||||
}
|
}
|
||||||
fn get_expected_original(&self) -> Cow<str> {
|
fn get_expected_original(&self) -> Cow<'_, str> {
|
||||||
Cow::Owned(self.get_expected().iter().collect::<String>())
|
Cow::Owned(self.get_expected().iter().collect::<String>())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -191,7 +191,7 @@ impl DiffTrait for DiffNonCombining {
|
||||||
fn get_expected(&self) -> &[char] {
|
fn get_expected(&self) -> &[char] {
|
||||||
&self.base.expected
|
&self.base.expected
|
||||||
}
|
}
|
||||||
fn get_expected_original(&self) -> Cow<str> {
|
fn get_expected_original(&self) -> Cow<'_, str> {
|
||||||
Cow::Borrowed(&self.expected_original)
|
Cow::Borrowed(&self.expected_original)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
4
run.bat
4
run.bat
|
@ -9,6 +9,8 @@ set QTWEBENGINE_CHROMIUM_FLAGS=--remote-allow-origins=http://localhost:8080
|
||||||
set ANKI_API_PORT=40000
|
set ANKI_API_PORT=40000
|
||||||
set ANKI_API_HOST=127.0.0.1
|
set ANKI_API_HOST=127.0.0.1
|
||||||
|
|
||||||
|
@if not defined PYENV set PYENV=out\pyenv
|
||||||
|
|
||||||
call tools\ninja pylib qt || exit /b 1
|
call tools\ninja pylib qt || exit /b 1
|
||||||
.\out\pyenv\scripts\python tools\run.py %* || exit /b 1
|
%PYENV%\Scripts\python tools\run.py %* || exit /b 1
|
||||||
popd
|
popd
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
[toolchain]
|
[toolchain]
|
||||||
# older versions may fail to compile; newer versions may fail the clippy tests
|
# older versions may fail to compile; newer versions may fail the clippy tests
|
||||||
channel = "1.88.0"
|
channel = "1.89.0"
|
||||||
|
|
|
@ -12,7 +12,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
export { className as class };
|
export { className as class };
|
||||||
|
|
||||||
export let title: string;
|
export let title: string;
|
||||||
export let onTitleClick: ((_e: MouseEvent | KeyboardEvent) => void) | null = null;
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div
|
<div
|
||||||
|
@ -25,22 +24,9 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
style:--container-margin="0"
|
style:--container-margin="0"
|
||||||
>
|
>
|
||||||
<div class="position-relative">
|
<div class="position-relative">
|
||||||
{#if onTitleClick}
|
|
||||||
<span
|
|
||||||
on:click={onTitleClick}
|
|
||||||
on:keydown={onTitleClick}
|
|
||||||
role="button"
|
|
||||||
tabindex="0"
|
|
||||||
>
|
|
||||||
<h1>
|
<h1>
|
||||||
{title}
|
{title}
|
||||||
</h1>
|
</h1>
|
||||||
</span>
|
|
||||||
{:else}
|
|
||||||
<h1>
|
|
||||||
{title}
|
|
||||||
</h1>
|
|
||||||
{/if}
|
|
||||||
<div class="help-badge position-absolute" class:rtl>
|
<div class="help-badge position-absolute" class:rtl>
|
||||||
<slot name="tooltip" />
|
<slot name="tooltip" />
|
||||||
</div>
|
</div>
|
||||||
|
|
134
ts/licenses.json
134
ts/licenses.json
|
@ -95,8 +95,8 @@
|
||||||
"repository": "https://github.com/TooTallNate/node-agent-base",
|
"repository": "https://github.com/TooTallNate/node-agent-base",
|
||||||
"publisher": "Nathan Rajlich",
|
"publisher": "Nathan Rajlich",
|
||||||
"email": "nathan@tootallnate.net",
|
"email": "nathan@tootallnate.net",
|
||||||
"path": "node_modules/https-proxy-agent/node_modules/agent-base",
|
"path": "node_modules/http-proxy-agent/node_modules/agent-base",
|
||||||
"licenseFile": "node_modules/https-proxy-agent/node_modules/agent-base/README.md"
|
"licenseFile": "node_modules/http-proxy-agent/node_modules/agent-base/README.md"
|
||||||
},
|
},
|
||||||
"asynckit@0.4.0": {
|
"asynckit@0.4.0": {
|
||||||
"licenses": "MIT",
|
"licenses": "MIT",
|
||||||
|
@ -127,6 +127,14 @@
|
||||||
"path": "node_modules/browser-process-hrtime",
|
"path": "node_modules/browser-process-hrtime",
|
||||||
"licenseFile": "node_modules/browser-process-hrtime/LICENSE"
|
"licenseFile": "node_modules/browser-process-hrtime/LICENSE"
|
||||||
},
|
},
|
||||||
|
"call-bind-apply-helpers@1.0.2": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/ljharb/call-bind-apply-helpers",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/call-bind-apply-helpers",
|
||||||
|
"licenseFile": "node_modules/call-bind-apply-helpers/LICENSE"
|
||||||
|
},
|
||||||
"codemirror@5.65.18": {
|
"codemirror@5.65.18": {
|
||||||
"licenses": "MIT",
|
"licenses": "MIT",
|
||||||
"repository": "https://github.com/codemirror/CodeMirror",
|
"repository": "https://github.com/codemirror/CodeMirror",
|
||||||
|
@ -436,10 +444,58 @@
|
||||||
"path": "node_modules/domexception",
|
"path": "node_modules/domexception",
|
||||||
"licenseFile": "node_modules/domexception/LICENSE.txt"
|
"licenseFile": "node_modules/domexception/LICENSE.txt"
|
||||||
},
|
},
|
||||||
|
"dunder-proto@1.0.1": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/es-shims/dunder-proto",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/dunder-proto",
|
||||||
|
"licenseFile": "node_modules/dunder-proto/LICENSE"
|
||||||
|
},
|
||||||
"empty-npm-package@1.0.0": {
|
"empty-npm-package@1.0.0": {
|
||||||
"licenses": "ISC",
|
"licenses": "ISC",
|
||||||
"path": "node_modules/canvas"
|
"path": "node_modules/canvas"
|
||||||
},
|
},
|
||||||
|
"es-define-property@1.0.1": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/ljharb/es-define-property",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/es-set-tostringtag/node_modules/es-define-property",
|
||||||
|
"licenseFile": "node_modules/es-set-tostringtag/node_modules/es-define-property/LICENSE"
|
||||||
|
},
|
||||||
|
"es-errors@1.3.0": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/ljharb/es-errors",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/es-errors",
|
||||||
|
"licenseFile": "node_modules/es-errors/LICENSE"
|
||||||
|
},
|
||||||
|
"es-object-atoms@1.0.0": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/ljharb/es-object-atoms",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/es-object-atoms",
|
||||||
|
"licenseFile": "node_modules/es-object-atoms/LICENSE"
|
||||||
|
},
|
||||||
|
"es-object-atoms@1.1.1": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/ljharb/es-object-atoms",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/es-set-tostringtag/node_modules/es-object-atoms",
|
||||||
|
"licenseFile": "node_modules/es-set-tostringtag/node_modules/es-object-atoms/LICENSE"
|
||||||
|
},
|
||||||
|
"es-set-tostringtag@2.1.0": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/es-shims/es-set-tostringtag",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/es-set-tostringtag",
|
||||||
|
"licenseFile": "node_modules/es-set-tostringtag/LICENSE"
|
||||||
|
},
|
||||||
"escodegen@2.1.0": {
|
"escodegen@2.1.0": {
|
||||||
"licenses": "BSD-2-Clause",
|
"licenses": "BSD-2-Clause",
|
||||||
"repository": "https://github.com/estools/escodegen",
|
"repository": "https://github.com/estools/escodegen",
|
||||||
|
@ -474,7 +530,7 @@
|
||||||
"path": "node_modules/fabric",
|
"path": "node_modules/fabric",
|
||||||
"licenseFile": "node_modules/fabric/LICENSE"
|
"licenseFile": "node_modules/fabric/LICENSE"
|
||||||
},
|
},
|
||||||
"form-data@4.0.1": {
|
"form-data@4.0.4": {
|
||||||
"licenses": "MIT",
|
"licenses": "MIT",
|
||||||
"repository": "https://github.com/form-data/form-data",
|
"repository": "https://github.com/form-data/form-data",
|
||||||
"publisher": "Felix Geisendörfer",
|
"publisher": "Felix Geisendörfer",
|
||||||
|
@ -482,6 +538,38 @@
|
||||||
"path": "node_modules/form-data",
|
"path": "node_modules/form-data",
|
||||||
"licenseFile": "node_modules/form-data/License"
|
"licenseFile": "node_modules/form-data/License"
|
||||||
},
|
},
|
||||||
|
"function-bind@1.1.2": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/Raynos/function-bind",
|
||||||
|
"publisher": "Raynos",
|
||||||
|
"email": "raynos2@gmail.com",
|
||||||
|
"path": "node_modules/function-bind",
|
||||||
|
"licenseFile": "node_modules/function-bind/LICENSE"
|
||||||
|
},
|
||||||
|
"get-intrinsic@1.3.0": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/ljharb/get-intrinsic",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/es-set-tostringtag/node_modules/get-intrinsic",
|
||||||
|
"licenseFile": "node_modules/es-set-tostringtag/node_modules/get-intrinsic/LICENSE"
|
||||||
|
},
|
||||||
|
"get-proto@1.0.1": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/ljharb/get-proto",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/get-proto",
|
||||||
|
"licenseFile": "node_modules/get-proto/LICENSE"
|
||||||
|
},
|
||||||
|
"gopd@1.2.0": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/ljharb/gopd",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/dunder-proto/node_modules/gopd",
|
||||||
|
"licenseFile": "node_modules/dunder-proto/node_modules/gopd/LICENSE"
|
||||||
|
},
|
||||||
"hammerjs@2.0.8": {
|
"hammerjs@2.0.8": {
|
||||||
"licenses": "MIT",
|
"licenses": "MIT",
|
||||||
"repository": "https://github.com/hammerjs/hammer.js",
|
"repository": "https://github.com/hammerjs/hammer.js",
|
||||||
|
@ -490,6 +578,38 @@
|
||||||
"path": "node_modules/hammerjs",
|
"path": "node_modules/hammerjs",
|
||||||
"licenseFile": "node_modules/hammerjs/LICENSE.md"
|
"licenseFile": "node_modules/hammerjs/LICENSE.md"
|
||||||
},
|
},
|
||||||
|
"has-symbols@1.0.3": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/inspect-js/has-symbols",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/has-symbols",
|
||||||
|
"licenseFile": "node_modules/has-symbols/LICENSE"
|
||||||
|
},
|
||||||
|
"has-symbols@1.1.0": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/inspect-js/has-symbols",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/es-set-tostringtag/node_modules/has-symbols",
|
||||||
|
"licenseFile": "node_modules/es-set-tostringtag/node_modules/has-symbols/LICENSE"
|
||||||
|
},
|
||||||
|
"has-tostringtag@1.0.2": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/inspect-js/has-tostringtag",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/has-tostringtag",
|
||||||
|
"licenseFile": "node_modules/has-tostringtag/LICENSE"
|
||||||
|
},
|
||||||
|
"hasown@2.0.2": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/inspect-js/hasOwn",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/hasown",
|
||||||
|
"licenseFile": "node_modules/hasown/LICENSE"
|
||||||
|
},
|
||||||
"html-encoding-sniffer@3.0.0": {
|
"html-encoding-sniffer@3.0.0": {
|
||||||
"licenses": "MIT",
|
"licenses": "MIT",
|
||||||
"repository": "https://github.com/jsdom/html-encoding-sniffer",
|
"repository": "https://github.com/jsdom/html-encoding-sniffer",
|
||||||
|
@ -587,6 +707,14 @@
|
||||||
"path": "node_modules/marked",
|
"path": "node_modules/marked",
|
||||||
"licenseFile": "node_modules/marked/LICENSE.md"
|
"licenseFile": "node_modules/marked/LICENSE.md"
|
||||||
},
|
},
|
||||||
|
"math-intrinsics@1.1.0": {
|
||||||
|
"licenses": "MIT",
|
||||||
|
"repository": "https://github.com/es-shims/math-intrinsics",
|
||||||
|
"publisher": "Jordan Harband",
|
||||||
|
"email": "ljharb@gmail.com",
|
||||||
|
"path": "node_modules/math-intrinsics",
|
||||||
|
"licenseFile": "node_modules/math-intrinsics/LICENSE"
|
||||||
|
},
|
||||||
"mathjax@3.2.2": {
|
"mathjax@3.2.2": {
|
||||||
"licenses": "Apache-2.0",
|
"licenses": "Apache-2.0",
|
||||||
"repository": "https://github.com/mathjax/MathJax",
|
"repository": "https://github.com/mathjax/MathJax",
|
||||||
|
|
|
@ -3,9 +3,9 @@ Copyright: Ankitects Pty Ltd and contributors
|
||||||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
-->
|
-->
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { page } from "$app/stores";
|
import { page } from "$app/state";
|
||||||
|
|
||||||
$: message = $page.error!.message;
|
$: message = page.error!.message;
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
{message}
|
{message}
|
||||||
|
|
|
@ -3,7 +3,7 @@ Copyright: Ankitects Pty Ltd and contributors
|
||||||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
-->
|
-->
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { page } from "$app/stores";
|
import { page } from "$app/state";
|
||||||
|
|
||||||
import CardInfo from "../CardInfo.svelte";
|
import CardInfo from "../CardInfo.svelte";
|
||||||
import type { PageData } from "./$types";
|
import type { PageData } from "./$types";
|
||||||
|
@ -11,7 +11,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
export let data: PageData;
|
export let data: PageData;
|
||||||
|
|
||||||
const showRevlog = $page.url.searchParams.get("revlog") !== "0";
|
const showRevlog = page.url.searchParams.get("revlog") !== "0";
|
||||||
|
|
||||||
globalThis.anki ||= {};
|
globalThis.anki ||= {};
|
||||||
globalThis.anki.updateCard = async (card_id: string): Promise<void> => {
|
globalThis.anki.updateCard = async (card_id: string): Promise<void> => {
|
||||||
|
|
|
@ -3,7 +3,7 @@ Copyright: Ankitects Pty Ltd and contributors
|
||||||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
-->
|
-->
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { page } from "$app/stores";
|
import { page } from "$app/state";
|
||||||
|
|
||||||
import CardInfo from "../../CardInfo.svelte";
|
import CardInfo from "../../CardInfo.svelte";
|
||||||
import type { PageData } from "./$types";
|
import type { PageData } from "./$types";
|
||||||
|
@ -11,8 +11,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
export let data: PageData;
|
export let data: PageData;
|
||||||
|
|
||||||
const showRevlog = $page.url.searchParams.get("revlog") !== "0";
|
const showRevlog = page.url.searchParams.get("revlog") !== "0";
|
||||||
const showCurve = $page.url.searchParams.get("curve") !== "0";
|
const showCurve = page.url.searchParams.get("curve") !== "0";
|
||||||
|
|
||||||
globalThis.anki ||= {};
|
globalThis.anki ||= {};
|
||||||
globalThis.anki.updateCardInfos = async (card_id: string): Promise<void> => {
|
globalThis.anki.updateCardInfos = async (card_id: string): Promise<void> => {
|
||||||
|
|
|
@ -136,12 +136,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
:global(.container-columns) {
|
:global(.container-columns) {
|
||||||
display: grid;
|
display: grid;
|
||||||
gap: 20px;
|
gap: 0px;
|
||||||
}
|
}
|
||||||
|
|
||||||
@include bp.with-breakpoint("lg") {
|
@include bp.with-breakpoint("lg") {
|
||||||
:global(.container-columns) {
|
:global(.container-columns) {
|
||||||
grid-template-columns: repeat(2, 1fr);
|
grid-template-columns: repeat(2, 1fr);
|
||||||
|
gap: 20px;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,11 +59,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
sched: HelpItemScheduler.FSRS,
|
sched: HelpItemScheduler.FSRS,
|
||||||
global: true,
|
global: true,
|
||||||
},
|
},
|
||||||
computeOptimalRetention: {
|
|
||||||
title: tr.deckConfigComputeOptimalRetention(),
|
|
||||||
help: tr.deckConfigComputeOptimalRetentionTooltip4(),
|
|
||||||
sched: HelpItemScheduler.FSRS,
|
|
||||||
},
|
|
||||||
healthCheck: {
|
healthCheck: {
|
||||||
title: tr.deckConfigHealthCheck(),
|
title: tr.deckConfigHealthCheck(),
|
||||||
help:
|
help:
|
||||||
|
|
|
@ -8,7 +8,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
// When title is null (default), the graph is inlined, not having TitledContainer wrapper.
|
// When title is null (default), the graph is inlined, not having TitledContainer wrapper.
|
||||||
export let title: string | null = null;
|
export let title: string | null = null;
|
||||||
export let subtitle: string | null = null;
|
export let subtitle: string | null = null;
|
||||||
export let onTitleClick: ((_e: MouseEvent | KeyboardEvent) => void) | null = null;
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
{#if title == null}
|
{#if title == null}
|
||||||
|
@ -19,8 +18,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
<slot />
|
<slot />
|
||||||
</div>
|
</div>
|
||||||
{:else}
|
{:else}
|
||||||
<TitledContainer class="d-flex flex-column" {title} {onTitleClick}>
|
<TitledContainer class="d-flex flex-column" {title}>
|
||||||
<slot slot="tooltip" name="tooltip"></slot>
|
<slot name="tooltip" slot="tooltip"></slot>
|
||||||
<div class="graph d-flex flex-grow-1 flex-column justify-content-center">
|
<div class="graph d-flex flex-grow-1 flex-column justify-content-center">
|
||||||
{#if subtitle}
|
{#if subtitle}
|
||||||
<div class="subtitle">{subtitle}</div>
|
<div class="subtitle">{subtitle}</div>
|
||||||
|
|
|
@ -57,22 +57,29 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
const title = tr.statisticsTrueRetentionTitle();
|
const title = tr.statisticsTrueRetentionTitle();
|
||||||
const subtitle = tr.statisticsTrueRetentionSubtitle();
|
const subtitle = tr.statisticsTrueRetentionSubtitle();
|
||||||
const onTitleClick = () => {
|
const onHelpClick = () => {
|
||||||
openHelpModal(Object.keys(retentionHelp).indexOf("trueRetention"));
|
openHelpModal(Object.keys(retentionHelp).indexOf("trueRetention"));
|
||||||
};
|
};
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<Graph {title} {subtitle} {onTitleClick}>
|
<Graph {title} {subtitle}>
|
||||||
|
<div
|
||||||
|
slot="tooltip"
|
||||||
|
onclick={onHelpClick}
|
||||||
|
onkeydown={onHelpClick}
|
||||||
|
role="button"
|
||||||
|
tabindex="-1"
|
||||||
|
>
|
||||||
<HelpModal
|
<HelpModal
|
||||||
title={tr.statisticsTrueRetentionTitle()}
|
title={tr.statisticsTrueRetentionTitle()}
|
||||||
url={HelpPage.DeckOptions.fsrs}
|
url={HelpPage.DeckOptions.fsrs}
|
||||||
slot="tooltip"
|
|
||||||
{helpSections}
|
{helpSections}
|
||||||
on:mount={(e) => {
|
on:mount={(e) => {
|
||||||
modal = e.detail.modal;
|
modal = e.detail.modal;
|
||||||
carousel = e.detail.carousel;
|
carousel = e.detail.carousel;
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
</div>
|
||||||
<InputBox>
|
<InputBox>
|
||||||
<label>
|
<label>
|
||||||
<input type="radio" bind:group={mode} value={DisplayMode.Young} />
|
<input type="radio" bind:group={mode} value={DisplayMode.Young} />
|
||||||
|
|
|
@ -3007,10 +3007,10 @@ __metadata:
|
||||||
languageName: node
|
languageName: node
|
||||||
linkType: hard
|
linkType: hard
|
||||||
|
|
||||||
"devalue@npm:^5.1.0":
|
"devalue@npm:^5.3.2":
|
||||||
version: 5.1.1
|
version: 5.3.2
|
||||||
resolution: "devalue@npm:5.1.1"
|
resolution: "devalue@npm:5.3.2"
|
||||||
checksum: 10c0/f6717a856fd54216959abd341cb189e47a9b37d72d8419e055ae77567ff4ed0fb683b1ffb6a71067f645adae5991bffabe6468a3e2385937bff49273e71c1f51
|
checksum: 10c0/2dab403779233224285afe4b30eaded038df10cb89b8f2c1e41dd855a8e6b634aa24175b87f64df665204bb9a6a6e7758d172682719b9c5cf3cef336ff9fa507
|
||||||
languageName: node
|
languageName: node
|
||||||
linkType: hard
|
linkType: hard
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue