From 48f774c7113d8d3730eadbd9648c0ee042c5d486 Mon Sep 17 00:00:00 2001 From: Damien Elmes Date: Sun, 21 Sep 2025 22:45:11 +1000 Subject: [PATCH 01/18] Add Kazakh to language list --- pylib/anki/lang.py | 4 +++- qt/aqt/about.py | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/pylib/anki/lang.py b/pylib/anki/lang.py index b639b0416..b09019811 100644 --- a/pylib/anki/lang.py +++ b/pylib/anki/lang.py @@ -18,7 +18,7 @@ from anki._legacy import DeprecatedNamesMixinForModule TR = anki._fluent.LegacyTranslationEnum FormatTimeSpan = _pb.FormatTimespanRequest - +# When adding new languages here, check lang_to_disk_lang() below langs = sorted( [ ("Afrikaans", "af_ZA"), @@ -38,6 +38,7 @@ langs = sorted( ("Italiano", "it_IT"), ("lo jbobau", "jbo_EN"), ("Lenga d'òc", "oc_FR"), + ("Қазақша", "kk_KZ"), ("Magyar", "hu_HU"), ("Nederlands", "nl_NL"), ("Norsk", "nb_NO"), @@ -104,6 +105,7 @@ compatMap = { "it": "it_IT", "ja": "ja_JP", "jbo": "jbo_EN", + "kk": "kk_KZ", "ko": "ko_KR", "la": "la_LA", "mn": "mn_MN", diff --git a/qt/aqt/about.py b/qt/aqt/about.py index 03e989f2c..95e034037 100644 --- a/qt/aqt/about.py +++ b/qt/aqt/about.py @@ -226,6 +226,7 @@ def show(mw: aqt.AnkiQt) -> QDialog: "Anon_0000", "Bilolbek Normuminov", "Sagiv Marzini", + "Zhanibek Rassululy", ) ) From fb332c4fe11d64be2e0017135c91bf40a3f29432 Mon Sep 17 00:00:00 2001 From: Damien Elmes Date: Sun, 21 Sep 2025 23:33:15 +1000 Subject: [PATCH 02/18] Add Yiddish to language list --- pylib/anki/lang.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pylib/anki/lang.py b/pylib/anki/lang.py index b09019811..1b0599a2f 100644 --- a/pylib/anki/lang.py +++ b/pylib/anki/lang.py @@ -65,6 +65,7 @@ langs = sorted( ("Українська мова", "uk_UA"), ("Հայերեն", "hy_AM"), ("עִבְרִית", "he_IL"), + ("ייִדיש", "yi"), ("العربية", "ar_SA"), ("فارسی", "fa_IR"), ("ภาษาไทย", "th_TH"), @@ -128,6 +129,7 @@ compatMap = { "uk": "uk_UA", "uz": "uz_UZ", "vi": "vi_VN", + "yi": "yi", } @@ -235,7 +237,7 @@ def get_def_lang(user_lang: str | None = None) -> tuple[int, str]: def is_rtl(lang: str) -> bool: - return lang in ("he", "ar", "fa", "ug") + return lang in ("he", "ar", "fa", "ug", "yi") # strip off unicode isolation markers from a translated string From 0d31c6de4a5bd9889d653812bdc43a196c6e1e91 Mon Sep 17 00:00:00 2001 From: Damien Elmes Date: Tue, 23 Sep 2025 17:59:47 +1000 Subject: [PATCH 03/18] Hard-code more mime types If I had a dollar for all the weird and wonderful ways Windows systems can be broken, I'd be a very rich man. https://forums.ankiweb.net/t/the-gear-icon-in-the-anki-interface-is-not-displaying-properly/66274 --- qt/aqt/mediasrv.py | 41 +++++++++++++++++++++++++++++++++++------ 1 file changed, 35 insertions(+), 6 deletions(-) diff --git a/qt/aqt/mediasrv.py b/qt/aqt/mediasrv.py index 3a05eec2c..1a94912ee 100644 --- a/qt/aqt/mediasrv.py +++ b/qt/aqt/mediasrv.py @@ -170,13 +170,42 @@ def favicon() -> Response: def _mime_for_path(path: str) -> str: "Mime type for provided path/filename." - if path.endswith(".css"): - # some users may have invalid mime type in the Windows registry - return "text/css" - elif path.endswith(".js") or path.endswith(".mjs"): - return "application/javascript" + + _, ext = os.path.splitext(path) + ext = ext.lower() + + # Badly-behaved apps on Windows can alter the standard mime types in the registry, which can completely + # break Anki's UI. So we hard-code the most common extensions. + mime_types = { + ".css": "text/css", + ".js": "application/javascript", + ".mjs": "application/javascript", + ".html": "text/html", + ".htm": "text/html", + ".svg": "image/svg+xml", + ".png": "image/png", + ".jpg": "image/jpeg", + ".jpeg": "image/jpeg", + ".gif": "image/gif", + ".webp": "image/webp", + ".ico": "image/x-icon", + ".json": "application/json", + ".woff": "font/woff", + ".woff2": "font/woff2", + ".ttf": "font/ttf", + ".otf": "font/otf", + ".mp3": "audio/mpeg", + ".mp4": "video/mp4", + ".webm": "video/webm", + ".ogg": "audio/ogg", + ".pdf": "application/pdf", + ".txt": "text/plain", + } + + if mime := mime_types.get(ext): + return mime else: - # autodetect + # fallback to mimetypes, which may consult the registry mime, _encoding = mimetypes.guess_type(path) return mime or "application/octet-stream" From 99c67d39cbc1a6e170367fe9d21c82e486c70c71 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Sep 2025 18:14:05 +1000 Subject: [PATCH 04/18] Bump ammonia from 4.1.1 to 4.1.2 (#4355) Bumps [ammonia](https://github.com/rust-ammonia/ammonia) from 4.1.1 to 4.1.2. - [Release notes](https://github.com/rust-ammonia/ammonia/releases) - [Changelog](https://github.com/rust-ammonia/ammonia/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-ammonia/ammonia/compare/v4.1.1...v4.1.2) --- updated-dependencies: - dependency-name: ammonia dependency-version: 4.1.2 dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fe88eb3ab..0abf397e1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -46,9 +46,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "ammonia" -version = "4.1.1" +version = "4.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6b346764dd0814805de8abf899fe03065bcee69bb1a4771c785817e39f3978f" +checksum = "17e913097e1a2124b46746c980134e8c954bc17a6a59bb3fde96f088d126dde6" dependencies = [ "cssparser", "html5ever 0.35.0", diff --git a/Cargo.toml b/Cargo.toml index 2e9489cb8..db77d41a3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,7 +51,7 @@ ninja_gen = { "path" = "build/ninja_gen" } unicase = "=2.6.0" # any changes could invalidate sqlite indexes # normal -ammonia = "4.1.0" +ammonia = "4.1.2" anyhow = "1.0.98" async-compression = { version = "0.4.24", features = ["zstd", "tokio"] } async-stream = "0.3.6" From 04a0b10a15abc61c409d0073eb856fe75db3f6f2 Mon Sep 17 00:00:00 2001 From: Damien Elmes Date: Tue, 23 Sep 2025 19:50:19 +1000 Subject: [PATCH 05/18] Launcher now checks Windows version https://forums.ankiweb.net/t/issue-with-installing-anki-launcher-into-custom-folder/66355 --- qt/launcher/src/platform/mod.rs | 3 +++ qt/launcher/src/platform/windows.rs | 20 ++++++++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/qt/launcher/src/platform/mod.rs b/qt/launcher/src/platform/mod.rs index 6a582f1aa..eec7634f1 100644 --- a/qt/launcher/src/platform/mod.rs +++ b/qt/launcher/src/platform/mod.rs @@ -134,5 +134,8 @@ pub fn ensure_os_supported() -> Result<()> { #[cfg(all(unix, not(target_os = "macos")))] unix::ensure_glibc_supported()?; + #[cfg(target_os = "windows")] + windows::ensure_windows_version_supported()?; + Ok(()) } diff --git a/qt/launcher/src/platform/windows.rs b/qt/launcher/src/platform/windows.rs index ebdff6261..d20c9a8b4 100644 --- a/qt/launcher/src/platform/windows.rs +++ b/qt/launcher/src/platform/windows.rs @@ -38,6 +38,26 @@ fn is_windows_10() -> bool { } } +/// Ensures Windows 10 version 1809 or later +pub fn ensure_windows_version_supported() -> Result<()> { + unsafe { + let mut info = OSVERSIONINFOW { + dwOSVersionInfoSize: std::mem::size_of::() as u32, + ..Default::default() + }; + + if RtlGetVersion(&mut info).is_err() { + anyhow::bail!("Failed to get Windows version information"); + } + + if info.dwBuildNumber >= 17763 { + return Ok(()); + } + + anyhow::bail!("Windows 10 version 1809 or later is required.") + } +} + pub fn ensure_terminal_shown() -> Result<()> { unsafe { if !GetConsoleWindow().is_invalid() { From c56e6e55eca2463f70d27a17d953a3218d65978e Mon Sep 17 00:00:00 2001 From: llama Date: Thu, 25 Sep 2025 11:34:27 +0800 Subject: [PATCH 06/18] feat: show saved custom colours as options in fill tool colour picker on mobile (#4348) * add GetCustomColours rpc method * save colours as rgb instead of argb * show saved custom colours as possible options in colour picker this is primarily for mobile clients, as qt currently ignores this * save custom colours on colour picker change (for desktop) --- proto/anki/collection.proto | 5 +++++ qt/aqt/mediasrv.py | 3 ++- rslib/src/collection/service.rs | 11 +++++++++++ rslib/src/config/mod.rs | 1 + ts/routes/image-occlusion/Toolbar.svelte | 18 +++++++++++++++++- 5 files changed, 36 insertions(+), 2 deletions(-) diff --git a/proto/anki/collection.proto b/proto/anki/collection.proto index de0ff08d6..330413613 100644 --- a/proto/anki/collection.proto +++ b/proto/anki/collection.proto @@ -20,6 +20,7 @@ service CollectionService { rpc LatestProgress(generic.Empty) returns (Progress); rpc SetWantsAbort(generic.Empty) returns (generic.Empty); rpc SetLoadBalancerEnabled(generic.Bool) returns (OpChanges); + rpc GetCustomColours(generic.Empty) returns (GetCustomColoursResponse); } // Implicitly includes any of the above methods that are not listed in the @@ -163,3 +164,7 @@ message CreateBackupRequest { bool force = 2; bool wait_for_completion = 3; } + +message GetCustomColoursResponse { + repeated string colours = 1; +} diff --git a/qt/aqt/mediasrv.py b/qt/aqt/mediasrv.py index 1a94912ee..bedf23e5b 100644 --- a/qt/aqt/mediasrv.py +++ b/qt/aqt/mediasrv.py @@ -630,7 +630,7 @@ def deck_options_ready() -> bytes: def save_custom_colours() -> bytes: colors = [ - QColorDialog.customColor(i).name(QColor.NameFormat.HexArgb) + QColorDialog.customColor(i).name(QColor.NameFormat.HexRgb) for i in range(QColorDialog.customCount()) ] aqt.mw.col.set_config("customColorPickerPalette", colors) @@ -659,6 +659,7 @@ post_handler_list = [ exposed_backend_list = [ # CollectionService "latest_progress", + "get_custom_colours", # DeckService "get_deck_names", # I18nService diff --git a/rslib/src/collection/service.rs b/rslib/src/collection/service.rs index 2050a6897..a37360782 100644 --- a/rslib/src/collection/service.rs +++ b/rslib/src/collection/service.rs @@ -1,8 +1,10 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::collection::GetCustomColoursResponse; use anki_proto::generic; use crate::collection::Collection; +use crate::config::ConfigKey; use crate::error; use crate::prelude::BoolKey; use crate::prelude::Op; @@ -62,4 +64,13 @@ impl crate::services::CollectionService for Collection { }) .map(Into::into) } + + fn get_custom_colours( + &mut self, + ) -> error::Result { + let colours = self + .get_config_optional(ConfigKey::CustomColorPickerPalette) + .unwrap_or_default(); + Ok(GetCustomColoursResponse { colours }) + } } diff --git a/rslib/src/config/mod.rs b/rslib/src/config/mod.rs index 5ece5b7e1..1e507281a 100644 --- a/rslib/src/config/mod.rs +++ b/rslib/src/config/mod.rs @@ -71,6 +71,7 @@ pub(crate) enum ConfigKey { NextNewCardPosition, #[strum(to_string = "schedVer")] SchedulerVersion, + CustomColorPickerPalette, } #[derive(PartialEq, Eq, Serialize_repr, Deserialize_repr, Clone, Copy, Debug)] diff --git a/ts/routes/image-occlusion/Toolbar.svelte b/ts/routes/image-occlusion/Toolbar.svelte index 8775de936..5b627ffdf 100644 --- a/ts/routes/image-occlusion/Toolbar.svelte +++ b/ts/routes/image-occlusion/Toolbar.svelte @@ -55,6 +55,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html onWheelDragX, } from "./tools/tool-zoom"; import { fillMask } from "./tools/tool-fill"; + import { getCustomColours, saveCustomColours } from "@generated/backend"; export let canvas; export let iconSize; @@ -76,6 +77,16 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html let colourRef: HTMLInputElement | undefined; const colour = writable(SHAPE_MASK_COLOR); + const customColorPickerPalette = writable([]); + + async function loadCustomColours() { + customColorPickerPalette.set( + (await getCustomColours({})).colours.filter( + (hex) => !hex.startsWith("#ffffff"), + ), + ); + } + function onClick(event: MouseEvent) { const upperCanvas = document.querySelector(".upper-canvas"); if (event.target == upperCanvas) { @@ -233,6 +244,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html on(document, "touchstart", onTouchstart), on(document, "mousemove", onMousemoveDocument), ); + loadCustomColours(); }); onDestroy(() => { @@ -241,7 +253,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - + + {#each $customColorPickerPalette as colour} + + {/each} ($colour = e.currentTarget!.value)} + on:change={() => saveCustomColours({})} />
From 436590f4c2930e0a6d150d2866f83623d9c76d79 Mon Sep 17 00:00:00 2001 From: llama Date: Thu, 25 Sep 2025 12:11:07 +0800 Subject: [PATCH 07/18] feat: add support for `tag:nc:...` searches (#4344) * feat: add support for `tag:nc:...` searches * add test --- pylib/tests/test_find.py | 3 +++ rslib/src/search/parser.rs | 5 +++++ rslib/src/search/sqlwriter.rs | 15 +++++++++++++-- 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/pylib/tests/test_find.py b/pylib/tests/test_find.py index 236096572..72e7fdb8a 100644 --- a/pylib/tests/test_find.py +++ b/pylib/tests/test_find.py @@ -32,6 +32,7 @@ def test_find_cards(): note = col.newNote() note["Front"] = "cat" note["Back"] = "sheep" + note.tags.append("conjunção größte") col.addNote(note) catCard = note.cards()[0] m = col.models.current() @@ -68,6 +69,8 @@ def test_find_cards(): col.tags.bulk_remove(col.db.list("select id from notes"), "foo") assert len(col.find_cards("tag:foo")) == 0 assert len(col.find_cards("tag:bar")) == 5 + assert len(col.find_cards("tag:conjuncao tag:groste")) == 0 + assert len(col.find_cards("tag:nc:conjuncao tag:nc:groste")) == 1 # text searches assert len(col.find_cards("cat")) == 2 assert len(col.find_cards("cat -dog")) == 1 diff --git a/rslib/src/search/parser.rs b/rslib/src/search/parser.rs index cbdba3d9f..5928bf486 100644 --- a/rslib/src/search/parser.rs +++ b/rslib/src/search/parser.rs @@ -392,6 +392,11 @@ fn parse_tag(s: &str) -> ParseResult<'_, SearchNode> { tag: unescape_quotes(re), mode: FieldSearchMode::Regex, } + } else if let Some(nc) = s.strip_prefix("nc:") { + SearchNode::Tag { + tag: unescape(nc)?, + mode: FieldSearchMode::NoCombining, + } } else { SearchNode::Tag { tag: unescape(s)?, diff --git a/rslib/src/search/sqlwriter.rs b/rslib/src/search/sqlwriter.rs index 95249276c..f6237d6fd 100644 --- a/rslib/src/search/sqlwriter.rs +++ b/rslib/src/search/sqlwriter.rs @@ -311,8 +311,19 @@ impl SqlWriter<'_> { } s if s.contains(' ') => write!(self.sql, "false").unwrap(), text => { - write!(self.sql, "n.tags regexp ?").unwrap(); - let re = &to_custom_re(text, r"\S"); + let text = if mode == FieldSearchMode::Normal { + write!(self.sql, "n.tags regexp ?").unwrap(); + Cow::from(text) + } else { + write!( + self.sql, + "coalesce(process_text(n.tags, {}), n.tags) regexp ?", + ProcessTextFlags::NoCombining.bits() + ) + .unwrap(); + without_combining(text) + }; + let re = &to_custom_re(&text, r"\S"); self.args.push(format!("(?i).* {re}(::| ).*")); } } From ee664b8fbb0bcc7c1147f9d5b177291c58267f3c Mon Sep 17 00:00:00 2001 From: Hanni614 Date: Sat, 27 Sep 2025 06:46:11 +0200 Subject: [PATCH 08/18] Stats - Retention rate help box links to Deck Options manual page #4198 (#4329) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * stats: link retention help box to "Desired retention" section * docs: use HTTPS for fsrs and desiredRetention links * Add Hanna Nilsén to CONTRIBUTORS * Apply suggestion from @user1823 Co-authored-by: user1823 <92206575+user1823@users.noreply.github.com> --------- Co-authored-by: Hanna Nilsén Co-authored-by: user1823 <92206575+user1823@users.noreply.github.com> --- CONTRIBUTORS | 1 + ts/lib/components/HelpModal.svelte | 7 ++----- ts/lib/tslib/help-page.ts | 3 ++- ts/routes/graphs/TrueRetention.svelte | 3 ++- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/CONTRIBUTORS b/CONTRIBUTORS index 7064c6885..36f535187 100644 --- a/CONTRIBUTORS +++ b/CONTRIBUTORS @@ -243,6 +243,7 @@ Lee Doughty <32392044+leedoughty@users.noreply.github.com> memchr Max Romanowski Aldlss +Hanna Nilsén ******************** diff --git a/ts/lib/components/HelpModal.svelte b/ts/lib/components/HelpModal.svelte index cf6292537..7ee425950 100644 --- a/ts/lib/components/HelpModal.svelte +++ b/ts/lib/components/HelpModal.svelte @@ -23,6 +23,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html export let title: string; export let url: string; + export let linkLabel: string | undefined = undefined; export let startIndex = 0; export let helpSections: HelpItem[]; export let fsrs = false; @@ -106,11 +107,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
{@html renderMarkdown( tr.helpForMoreInfo({ - link: `${title}`, + link: `${linkLabel ?? title}`, }), )}
diff --git a/ts/lib/tslib/help-page.ts b/ts/lib/tslib/help-page.ts index e3f209c6a..e2b2e3da4 100644 --- a/ts/lib/tslib/help-page.ts +++ b/ts/lib/tslib/help-page.ts @@ -27,7 +27,8 @@ export const HelpPage = { limitsFromTop: "https://docs.ankiweb.net/deck-options.html#limits-start-from-top", dailyLimits: "https://docs.ankiweb.net/deck-options.html#daily-limits", audio: "https://docs.ankiweb.net/deck-options.html#audio", - fsrs: "http://docs.ankiweb.net/deck-options.html#fsrs", + fsrs: "https://docs.ankiweb.net/deck-options.html#fsrs", + desiredRetention: "https://docs.ankiweb.net/deck-options.html#desired-retention", }, Leeches: { leeches: "https://docs.ankiweb.net/leeches.html#leeches", diff --git a/ts/routes/graphs/TrueRetention.svelte b/ts/routes/graphs/TrueRetention.svelte index 4a9738831..12d17079b 100644 --- a/ts/routes/graphs/TrueRetention.svelte +++ b/ts/routes/graphs/TrueRetention.svelte @@ -72,7 +72,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html > { modal = e.detail.modal; From 3a5a2b1afb5c3273c1eb7da6d092c66dbc222218 Mon Sep 17 00:00:00 2001 From: Jarrett Ye Date: Sat, 27 Sep 2025 13:37:24 +0800 Subject: [PATCH 09/18] Improve performance of `reviews_for_fsrs` function (#4339) * Refactor reviews_for_fsrs function for improved performance Replaced the previous implementation with a more efficient approach using a single loop and pre-allocated vectors. This change reduces the complexity of creating FSRSItems and enhances overall performance, especially for larger datasets. * collapse `if` statement * When not training, only create the final FSRS item --- rslib/src/scheduler/fsrs/params.rs | 67 ++++++++++++++++++++---------- 1 file changed, 44 insertions(+), 23 deletions(-) diff --git a/rslib/src/scheduler/fsrs/params.rs b/rslib/src/scheduler/fsrs/params.rs index d7bb56f5b..1fb1d58b8 100644 --- a/rslib/src/scheduler/fsrs/params.rs +++ b/rslib/src/scheduler/fsrs/params.rs @@ -478,27 +478,42 @@ pub(crate) fn reviews_for_fsrs( })) .collect_vec(); - let skip = if training { 1 } else { 0 }; - // Convert the remaining entries into separate FSRSItems, where each item - // contains all reviews done until then. - let items: Vec<(RevlogId, FSRSItem)> = entries - .iter() - .enumerate() - .skip(skip) - .map(|(outer_idx, entry)| { - let reviews = entries - .iter() - .take(outer_idx + 1) - .enumerate() - .map(|(inner_idx, r)| FSRSReview { - rating: r.button_chosen as u32, - delta_t: delta_ts[inner_idx], - }) - .collect(); - (entry.id, FSRSItem { reviews }) - }) - .filter(|(_, item)| !training || item.reviews.last().unwrap().delta_t > 0) - .collect_vec(); + let items = if training { + // Convert the remaining entries into separate FSRSItems, where each item + // contains all reviews done until then. + let mut items = Vec::with_capacity(entries.len()); + let mut current_reviews = Vec::with_capacity(entries.len()); + for (idx, (entry, &delta_t)) in entries.iter().zip(delta_ts.iter()).enumerate() { + current_reviews.push(FSRSReview { + rating: entry.button_chosen as u32, + delta_t, + }); + if idx >= 1 && delta_t > 0 { + items.push(( + entry.id, + FSRSItem { + reviews: current_reviews.clone(), + }, + )); + } + } + items + } else { + // When not training, we only need the final FSRS item, which represents + // the complete history of the card. This avoids expensive clones in a loop. + let reviews = entries + .iter() + .zip(delta_ts.iter()) + .map(|(entry, &delta_t)| FSRSReview { + rating: entry.button_chosen as u32, + delta_t, + }) + .collect(); + let last_entry = entries.last().unwrap(); + + vec![(last_entry.id, FSRSItem { reviews })] + }; + if items.is_empty() { None } else { @@ -738,7 +753,7 @@ pub(crate) mod tests { ], false, ), - fsrs_items!([review(0)], [review(0), review(1)]) + fsrs_items!([review(0), review(1)]) ); } @@ -809,7 +824,7 @@ pub(crate) mod tests { // R | A X R assert_eq!( convert_ignore_before(revlogs, false, days_ago_ms(9)), - fsrs_items!([review(0)], [review(0), review(2)]) + fsrs_items!([review(0), review(2)]) ); } @@ -828,6 +843,9 @@ pub(crate) mod tests { assert_eq!( convert_ignore_before(revlogs, false, days_ago_ms(9)) .unwrap() + .last() + .unwrap() + .reviews .len(), 2 ); @@ -849,6 +867,9 @@ pub(crate) mod tests { assert_eq!( convert_ignore_before(revlogs, false, days_ago_ms(9)) .unwrap() + .last() + .unwrap() + .reviews .len(), 2 ); From 03f3a005f4821720938ae600ddcc74f8321468fa Mon Sep 17 00:00:00 2001 From: Jarrett Ye Date: Sat, 27 Sep 2025 13:46:50 +0800 Subject: [PATCH 10/18] Fix/first and latest review dates should only consider entries with a rating. (#4360) * Fix/first and latest review dates should only only consider entries with a rating. * Update rslib/src/stats/card.rs Co-authored-by: user1823 <92206575+user1823@users.noreply.github.com> --------- Co-authored-by: user1823 <92206575+user1823@users.noreply.github.com> --- rslib/src/stats/card.rs | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/rslib/src/stats/card.rs b/rslib/src/stats/card.rs index 008977fe9..a76edffa2 100644 --- a/rslib/src/stats/card.rs +++ b/rslib/src/stats/card.rs @@ -76,8 +76,15 @@ impl Collection { note_id: card.note_id.into(), deck: deck.human_name(), added: card.id.as_secs().0, - first_review: revlog.first().map(|entry| entry.id.as_secs().0), - latest_review: revlog.last().map(|entry| entry.id.as_secs().0), + first_review: revlog + .iter() + .find(|entry| entry.has_rating()) + .map(|entry| entry.id.as_secs().0), + // last_review_time is not used to ensure cram revlogs are included. + latest_review: revlog + .iter() + .rfind(|entry| entry.has_rating()) + .map(|entry| entry.id.as_secs().0), due_date: self.due_date(&card)?, due_position: self.position(&card), interval: card.interval, From d8aa244a5aa9ed8fe02eb71f15415fdc53057bca Mon Sep 17 00:00:00 2001 From: user1823 <92206575+user1823@users.noreply.github.com> Date: Sat, 27 Sep 2025 11:28:41 +0530 Subject: [PATCH 11/18] Export last_interval to Python (#4365) * Export last_interval to Python * Add last_interval field to StatsRevlogEntry * Implement last_interval_secs function * Update last_interval field type in stats.proto * Update last_interval to use last_interval_secs --- proto/anki/stats.proto | 2 ++ rslib/src/revlog/mod.rs | 9 +++++++++ rslib/src/stats/card.rs | 1 + 3 files changed, 12 insertions(+) diff --git a/proto/anki/stats.proto b/proto/anki/stats.proto index a5639f841..1bd0fe630 100644 --- a/proto/anki/stats.proto +++ b/proto/anki/stats.proto @@ -37,6 +37,8 @@ message CardStatsResponse { uint32 ease = 5; float taken_secs = 6; optional cards.FsrsMemoryState memory_state = 7; + // seconds + uint32 last_interval = 8; } repeated StatsRevlogEntry revlog = 1; int64 card_id = 2; diff --git a/rslib/src/revlog/mod.rs b/rslib/src/revlog/mod.rs index f52698388..fbb9b459a 100644 --- a/rslib/src/revlog/mod.rs +++ b/rslib/src/revlog/mod.rs @@ -85,6 +85,15 @@ impl RevlogEntry { .unwrap() } + pub(crate) fn last_interval_secs(&self) -> u32 { + u32::try_from(if self.last_interval > 0 { + self.last_interval.saturating_mul(86_400) + } else { + self.last_interval.saturating_mul(-1) + }) + .unwrap() + } + /// Returns true if this entry represents a reset operation. /// These entries are created when a card is reset using /// [`Collection::reschedule_cards_as_new`]. diff --git a/rslib/src/stats/card.rs b/rslib/src/stats/card.rs index a76edffa2..0dabff5e5 100644 --- a/rslib/src/stats/card.rs +++ b/rslib/src/stats/card.rs @@ -227,6 +227,7 @@ fn stats_revlog_entry( ease: entry.ease_factor, taken_secs: entry.taken_millis as f32 / 1000., memory_state: None, + last_interval: entry.last_interval_secs(), } } From b0665a8ef1abefa3f1dfe2dcc5c3834b4c40f2e0 Mon Sep 17 00:00:00 2001 From: user1823 <92206575+user1823@users.noreply.github.com> Date: Sat, 27 Sep 2025 12:13:34 +0530 Subject: [PATCH 12/18] Fix/Ensure fuzz doesn't go backward during rescheduling (#4364) * Fix/Ensure fuzz doesn't go backward during rescheduling Fixes https://github.com/ankitects/anki/issues/2694 * Fix * Get previous_interval from LastRevlogInfo * Fix * Format * Format * Exclude lapses * Force reconfigure in CI The cached build.ninja may reference files that don't exist in the PR. On a local build this tends to auto-fix itself as the build scripts detect a quick failure and re-run the configure, but CI tends to be too slow. https://github.com/ankitects/anki/pull/4364#issuecomment-3338026129 * Rename min/max to make it clear they restrict interval, not fuzz * Wording tweaks/comments for clarity --------- Co-authored-by: Damien Elmes --- .buildkite/linux/entrypoint | 1 + rslib/src/scheduler/fsrs/memory_state.rs | 35 ++++++++++++++++++++++-- rslib/src/scheduler/fsrs/rescheduler.rs | 7 +++-- 3 files changed, 37 insertions(+), 6 deletions(-) diff --git a/.buildkite/linux/entrypoint b/.buildkite/linux/entrypoint index a519cfc3d..4c656aa5c 100755 --- a/.buildkite/linux/entrypoint +++ b/.buildkite/linux/entrypoint @@ -16,6 +16,7 @@ if [ "$CLEAR_RUST" = "1" ]; then rm -rf $BUILD_ROOT/rust fi +rm -f out/build.ninja ./ninja pylib qt check echo "--- Ensure libs importable" diff --git a/rslib/src/scheduler/fsrs/memory_state.rs b/rslib/src/scheduler/fsrs/memory_state.rs index 420ead5a3..303bbfd91 100644 --- a/rslib/src/scheduler/fsrs/memory_state.rs +++ b/rslib/src/scheduler/fsrs/memory_state.rs @@ -136,6 +136,19 @@ impl Collection { let deckconfig_id = deck.config_id().unwrap(); // reschedule it let original_interval = card.interval; + let min_interval = |interval: u32| { + let previous_interval = + last_info.previous_interval.unwrap_or(0); + if interval > previous_interval { + // interval grew; don't allow fuzzed interval to + // be less than previous+1 + previous_interval + 1 + } else { + // interval shrunk; don't restrict negative fuzz + 0 + } + .max(1) + }; let interval = fsrs.next_interval( Some(state.stability), desired_retention, @@ -146,7 +159,7 @@ impl Collection { .and_then(|r| { r.find_interval( interval, - 1, + min_interval(interval as u32), req.max_interval, days_elapsed as u32, deckconfig_id, @@ -157,7 +170,7 @@ impl Collection { with_review_fuzz( card.get_fuzz_factor(true), interval, - 1, + min_interval(interval as u32), req.max_interval, ) }); @@ -310,6 +323,9 @@ pub(crate) struct LastRevlogInfo { /// reviewed the card and now, so that we can determine an accurate period /// when the card has subsequently been rescheduled to a different day. pub(crate) last_reviewed_at: Option, + /// The interval before the latest review. Used to prevent fuzz from going + /// backwards when rescheduling the card + pub(crate) previous_interval: Option, } /// Return a map of cards to info about last review. @@ -321,14 +337,27 @@ pub(crate) fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap= 0 && e.button_chosen > 1 { + Some(e.last_interval as u32) + } else { + None + }; } else if e.is_reset() { last_reviewed_at = None; + previous_interval = None; } } - out.insert(card_id, LastRevlogInfo { last_reviewed_at }); + out.insert( + card_id, + LastRevlogInfo { + last_reviewed_at, + previous_interval, + }, + ); }); out } diff --git a/rslib/src/scheduler/fsrs/rescheduler.rs b/rslib/src/scheduler/fsrs/rescheduler.rs index db490b3e4..37c824230 100644 --- a/rslib/src/scheduler/fsrs/rescheduler.rs +++ b/rslib/src/scheduler/fsrs/rescheduler.rs @@ -115,13 +115,14 @@ impl Rescheduler { pub fn find_interval( &self, interval: f32, - minimum: u32, - maximum: u32, + minimum_interval: u32, + maximum_interval: u32, days_elapsed: u32, deckconfig_id: DeckConfigId, fuzz_seed: Option, ) -> Option { - let (before_days, after_days) = constrained_fuzz_bounds(interval, minimum, maximum); + let (before_days, after_days) = + constrained_fuzz_bounds(interval, minimum_interval, maximum_interval); // Don't reschedule the card when it's overdue if after_days < days_elapsed { From e0b0d0d19bb349d5241567743083b1b03dbf1984 Mon Sep 17 00:00:00 2001 From: llama Date: Sat, 27 Sep 2025 14:58:46 +0800 Subject: [PATCH 13/18] feat: add i18n to launcher (#4361) * add anki_i18n and locale_config crates to launcher * add launcher.ftl * add tr to state * replace most hardcoded strings with translations * add support for `launcher` rustcfg to trim translations * use marker structs to denote type of translations * move underscores into generated code * Update cargo-license, which may fix the license order issue (dae) --- Cargo.lock | 35 ++++++++ Cargo.toml | 1 + ftl/core/launcher.ftl | 33 +++++++ qt/launcher/Cargo.toml | 2 + qt/launcher/src/main.rs | 104 ++++++++++++++-------- rslib/i18n/build.rs | 13 ++- rslib/i18n/src/generated.rs | 11 ++- rslib/i18n/src/generated_launcher.rs | 15 ++++ rslib/i18n/src/lib.rs | 128 +++++++++++++++------------ rslib/i18n/write_strings.rs | 18 ++-- tools/minilints/src/main.rs | 2 +- 11 files changed, 252 insertions(+), 110 deletions(-) create mode 100644 ftl/core/launcher.ftl create mode 100644 rslib/i18n/src/generated_launcher.rs diff --git a/Cargo.lock b/Cargo.lock index 0abf397e1..e9c74f6ea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3555,6 +3555,7 @@ dependencies = [ name = "launcher" version = "1.0.0" dependencies = [ + "anki_i18n", "anki_io", "anki_process", "anyhow", @@ -3563,6 +3564,7 @@ dependencies = [ "embed-resource", "libc", "libc-stdhandle", + "locale_config", "serde_json", "widestring", "windows 0.61.3", @@ -3702,6 +3704,19 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed" +[[package]] +name = "locale_config" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d2c35b16f4483f6c26f0e4e9550717a2f6575bcd6f12a53ff0c490a94a6934" +dependencies = [ + "lazy_static", + "objc", + "objc-foundation", + "regex", + "winapi", +] + [[package]] name = "lock_api" version = "0.4.13" @@ -4380,6 +4395,26 @@ dependencies = [ "malloc_buf", ] +[[package]] +name = "objc-foundation" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1add1b659e36c9607c7aab864a76c7a4c2760cd0cd2e120f3fb8b952c7e22bf9" +dependencies = [ + "block", + "objc", + "objc_id", +] + +[[package]] +name = "objc_id" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c92d4ddb4bd7b50d730c215ff871754d0da6b2178849f8a2a2ab69712d0c073b" +dependencies = [ + "objc", +] + [[package]] name = "object" version = "0.36.7" diff --git a/Cargo.toml b/Cargo.toml index db77d41a3..fe7f5acd5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -92,6 +92,7 @@ itertools = "0.14.0" junction = "1.2.0" libc = "0.2" libc-stdhandle = "0.1" +locale_config = "0.3.0" maplit = "1.0.2" nom = "8.0.0" num-format = "0.4.4" diff --git a/ftl/core/launcher.ftl b/ftl/core/launcher.ftl new file mode 100644 index 000000000..d07608fbd --- /dev/null +++ b/ftl/core/launcher.ftl @@ -0,0 +1,33 @@ +launcher-title = Anki Launcher +launcher-press-enter-to-start = Press enter to start Anki. +launcher-anki-will-start-shortly = Anki will start shortly. +launcher-you-can-close-this-window = You can close this window. +launcher-updating-anki = Updating Anki... +launcher-latest-anki = Latest Anki (just press Enter) +launcher-choose-a-version = Choose a version +launcher-sync-project-changes = Sync project changes +launcher-keep-existing-version = Keep existing version ({ $current }) +launcher-revert-to-previous = Revert to previous version ({ $prev }) +launcher-allow-betas = Allow betas: { $state } +launcher-on = on +launcher-off = off +launcher-cache-downloads = Cache downloads: { $state } +launcher-download-mirror = Download mirror: { $state } +launcher-uninstall = Uninstall +launcher-invalid-input = Invalid input. Please try again. +launcher-latest-releases = Latest releases: { $releases } +launcher-enter-the-version-you-want = Enter the version you want to install: +launcher-versions-before-cant-be-installed = Versions before 2.1.50 can't be installed. +launcher-invalid-version = Invalid version. +launcher-unable-to-check-for-versions = Unable to check for Anki versions. Please check your internet connection. +launcher-checking-for-updates = Checking for updates... +launcher-uninstall-confirm = Uninstall Anki's program files? (y/n) +launcher-uninstall-cancelled = Uninstall cancelled. +launcher-program-files-removed = Program files removed. +launcher-remove-all-profiles-confirm = Remove all profiles/cards? (y/n) +launcher-user-data-removed = User data removed. +launcher-download-mirror-options = Download mirror options: +launcher-mirror-no-mirror = No mirror +launcher-mirror-china = China +launcher-mirror-disabled = Mirror disabled. +launcher-mirror-china-enabled = China mirror enabled. diff --git a/qt/launcher/Cargo.toml b/qt/launcher/Cargo.toml index 7de321a29..5fd1c9900 100644 --- a/qt/launcher/Cargo.toml +++ b/qt/launcher/Cargo.toml @@ -8,11 +8,13 @@ publish = false rust-version.workspace = true [dependencies] +anki_i18n.workspace = true anki_io.workspace = true anki_process.workspace = true anyhow.workspace = true camino.workspace = true dirs.workspace = true +locale_config.workspace = true serde_json.workspace = true [target.'cfg(all(unix, not(target_os = "macos")))'.dependencies] diff --git a/qt/launcher/src/main.rs b/qt/launcher/src/main.rs index 8996f9820..e53f34ca3 100644 --- a/qt/launcher/src/main.rs +++ b/qt/launcher/src/main.rs @@ -10,6 +10,7 @@ use std::process::Command; use std::time::SystemTime; use std::time::UNIX_EPOCH; +use anki_i18n::I18n; use anki_io::copy_file; use anki_io::create_dir_all; use anki_io::modified_time; @@ -31,6 +32,7 @@ use crate::platform::respawn_launcher; mod platform; struct State { + tr: I18n, current_version: Option, prerelease_marker: std::path::PathBuf, uv_install_root: std::path::PathBuf, @@ -100,7 +102,14 @@ fn run() -> Result<()> { let (exe_dir, resources_dir) = get_exe_and_resources_dirs()?; + let locale = locale_config::Locale::user_default().to_string(); + let mut state = State { + tr: I18n::new(&[if !locale.is_empty() { + locale + } else { + "en".to_owned() + }]), current_version: None, prerelease_marker: uv_install_root.join("prerelease"), uv_install_root: uv_install_root.clone(), @@ -160,7 +169,7 @@ fn run() -> Result<()> { } print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top - println!("\x1B[1mAnki Launcher\x1B[0m\n"); + println!("\x1B[1m{}\x1B[0m\n", state.tr.launcher_title()); ensure_os_supported()?; @@ -178,15 +187,18 @@ fn run() -> Result<()> { } if cfg!(unix) && !cfg!(target_os = "macos") { - println!("\nPress enter to start Anki."); + println!("\n{}", state.tr.launcher_press_enter_to_start()); let mut input = String::new(); let _ = stdin().read_line(&mut input); } else { // on Windows/macOS, the user needs to close the terminal/console // currently, but ideas on how we can avoid this would be good! println!(); - println!("Anki will start shortly."); - println!("\x1B[1mYou can close this window.\x1B[0m\n"); + println!("{}", state.tr.launcher_anki_will_start_shortly()); + println!( + "\x1B[1m{}\x1B[0m\n", + state.tr.launcher_you_can_close_this_window() + ); } // respawn the launcher as a disconnected subprocess for normal startup @@ -258,7 +270,7 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re // Remove sync marker before attempting sync let _ = remove_file(&state.sync_complete_marker); - println!("Updating Anki...\n"); + println!("{}\n", state.tr.launcher_updating_anki()); let python_version_trimmed = if state.user_python_version_path.exists() { let python_version = read_file(&state.user_python_version_path)?; @@ -440,44 +452,62 @@ fn file_timestamp_secs(path: &std::path::Path) -> i64 { fn get_main_menu_choice(state: &State) -> Result { loop { - println!("1) Latest Anki (press Enter)"); - println!("2) Choose a version"); + println!("1) {}", state.tr.launcher_latest_anki()); + println!("2) {}", state.tr.launcher_choose_a_version()); if let Some(current_version) = &state.current_version { let normalized_current = normalize_version(current_version); if state.pyproject_modified_by_user { - println!("3) Sync project changes"); + println!("3) {}", state.tr.launcher_sync_project_changes()); } else { - println!("3) Keep existing version ({normalized_current})"); + println!( + "3) {}", + state.tr.launcher_keep_existing_version(normalized_current) + ); } } if let Some(prev_version) = &state.previous_version { if state.current_version.as_ref() != Some(prev_version) { let normalized_prev = normalize_version(prev_version); - println!("4) Revert to previous version ({normalized_prev})"); + println!( + "4) {}", + state.tr.launcher_revert_to_previous(normalized_prev) + ); } } println!(); let betas_enabled = state.prerelease_marker.exists(); println!( - "5) Allow betas: {}", - if betas_enabled { "on" } else { "off" } + "5) {}", + state.tr.launcher_allow_betas(if betas_enabled { + state.tr.launcher_on() + } else { + state.tr.launcher_off() + }) ); let cache_enabled = !state.no_cache_marker.exists(); println!( - "6) Cache downloads: {}", - if cache_enabled { "on" } else { "off" } + "6) {}", + state.tr.launcher_cache_downloads(if cache_enabled { + state.tr.launcher_on() + } else { + state.tr.launcher_off() + }) ); let mirror_enabled = is_mirror_enabled(state); println!( - "7) Download mirror: {}", - if mirror_enabled { "on" } else { "off" } + "7) {}", + state.tr.launcher_download_mirror(if mirror_enabled { + state.tr.launcher_on() + } else { + state.tr.launcher_off() + }) ); println!(); - println!("8) Uninstall"); + println!("8) {}", state.tr.launcher_uninstall()); print!("> "); let _ = stdout().flush(); @@ -499,7 +529,7 @@ fn get_main_menu_choice(state: &State) -> Result { if state.current_version.is_some() { MainMenuChoice::KeepExisting } else { - println!("Invalid input. Please try again.\n"); + println!("{}\n", state.tr.launcher_invalid_input()); continue; } } @@ -511,7 +541,7 @@ fn get_main_menu_choice(state: &State) -> Result { } } } - println!("Invalid input. Please try again.\n"); + println!("{}\n", state.tr.launcher_invalid_input()); continue; } "5" => MainMenuChoice::ToggleBetas, @@ -519,7 +549,7 @@ fn get_main_menu_choice(state: &State) -> Result { "7" => MainMenuChoice::DownloadMirror, "8" => MainMenuChoice::Uninstall, _ => { - println!("Invalid input. Please try again."); + println!("{}\n", state.tr.launcher_invalid_input()); continue; } }); @@ -534,9 +564,9 @@ fn get_version_kind(state: &State) -> Result> { .map(|v| v.as_str()) .collect::>() .join(", "); - println!("Latest releases: {releases_str}"); + println!("{}", state.tr.launcher_latest_releases(releases_str)); - println!("Enter the version you want to install:"); + println!("{}", state.tr.launcher_enter_the_version_you_want()); print!("> "); let _ = stdout().flush(); @@ -560,11 +590,11 @@ fn get_version_kind(state: &State) -> Result> { Ok(Some(version_kind)) } (None, true) => { - println!("Versions before 2.1.50 can't be installed."); + println!("{}", state.tr.launcher_versions_before_cant_be_installed()); Ok(None) } _ => { - println!("Invalid version.\n"); + println!("{}\n", state.tr.launcher_invalid_version()); Ok(None) } } @@ -700,7 +730,7 @@ fn fetch_versions(state: &State) -> Result> { let output = match cmd.utf8_output() { Ok(output) => output, Err(e) => { - print!("Unable to check for Anki versions. Please check your internet connection.\n\n"); + print!("{}\n\n", state.tr.launcher_unable_to_check_for_versions()); return Err(e.into()); } }; @@ -709,7 +739,7 @@ fn fetch_versions(state: &State) -> Result> { } fn get_releases(state: &State) -> Result { - println!("Checking for updates..."); + println!("{}", state.tr.launcher_checking_for_updates()); let include_prereleases = state.prerelease_marker.exists(); let all_versions = fetch_versions(state)?; let all_versions = filter_and_normalize_versions(all_versions, include_prereleases); @@ -911,7 +941,7 @@ fn get_anki_addons21_path() -> Result { } fn handle_uninstall(state: &State) -> Result { - println!("Uninstall Anki's program files? (y/n)"); + println!("{}", state.tr.launcher_uninstall_confirm()); print!("> "); let _ = stdout().flush(); @@ -920,7 +950,7 @@ fn handle_uninstall(state: &State) -> Result { let input = input.trim().to_lowercase(); if input != "y" { - println!("Uninstall cancelled."); + println!("{}", state.tr.launcher_uninstall_cancelled()); println!(); return Ok(false); } @@ -928,11 +958,11 @@ fn handle_uninstall(state: &State) -> Result { // Remove program files if state.uv_install_root.exists() { anki_io::remove_dir_all(&state.uv_install_root)?; - println!("Program files removed."); + println!("{}", state.tr.launcher_program_files_removed()); } println!(); - println!("Remove all profiles/cards? (y/n)"); + println!("{}", state.tr.launcher_remove_all_profiles_confirm()); print!("> "); let _ = stdout().flush(); @@ -942,7 +972,7 @@ fn handle_uninstall(state: &State) -> Result { if input == "y" && state.anki_base_folder.exists() { anki_io::remove_dir_all(&state.anki_base_folder)?; - println!("User data removed."); + println!("{}", state.tr.launcher_user_data_removed()); } println!(); @@ -1036,9 +1066,9 @@ fn get_mirror_urls(state: &State) -> Result> { fn show_mirror_submenu(state: &State) -> Result<()> { loop { - println!("Download mirror options:"); - println!("1) No mirror"); - println!("2) China"); + println!("{}", state.tr.launcher_download_mirror_options()); + println!("1) {}", state.tr.launcher_mirror_no_mirror()); + println!("2) {}", state.tr.launcher_mirror_china()); print!("> "); let _ = stdout().flush(); @@ -1052,14 +1082,14 @@ fn show_mirror_submenu(state: &State) -> Result<()> { if state.mirror_path.exists() { let _ = remove_file(&state.mirror_path); } - println!("Mirror disabled."); + println!("{}", state.tr.launcher_mirror_disabled()); break; } "2" => { // Write China mirror URLs let china_mirrors = "https://registry.npmmirror.com/-/binary/python-build-standalone/\nhttps://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple/"; write_file(&state.mirror_path, china_mirrors)?; - println!("China mirror enabled."); + println!("{}", state.tr.launcher_mirror_china_enabled()); break; } "" => { @@ -1067,7 +1097,7 @@ fn show_mirror_submenu(state: &State) -> Result<()> { break; } _ => { - println!("Invalid input. Please try again."); + println!("{}", state.tr.launcher_invalid_input()); continue; } } diff --git a/rslib/i18n/build.rs b/rslib/i18n/build.rs index 4baa6a709..f604c9167 100644 --- a/rslib/i18n/build.rs +++ b/rslib/i18n/build.rs @@ -23,10 +23,10 @@ use write_strings::write_strings; fn main() -> Result<()> { // generate our own requirements - let map = get_ftl_data(); + let mut map = get_ftl_data(); check(&map); - let modules = get_modules(&map); - write_strings(&map, &modules); + let mut modules = get_modules(&map); + write_strings(&map, &modules, "strings.rs", "All"); typescript::write_ts_interface(&modules)?; python::write_py_interface(&modules)?; @@ -41,5 +41,12 @@ fn main() -> Result<()> { write_file_if_changed(path, meta_json)?; } } + + // generate strings for the launcher + map.iter_mut() + .for_each(|(_, modules)| modules.retain(|module, _| module == "launcher")); + modules.retain(|module| module.name == "launcher"); + write_strings(&map, &modules, "strings_launcher.rs", "Launcher"); + Ok(()) } diff --git a/rslib/i18n/src/generated.rs b/rslib/i18n/src/generated.rs index f3fa71ce8..7463a594e 100644 --- a/rslib/i18n/src/generated.rs +++ b/rslib/i18n/src/generated.rs @@ -1,8 +1,15 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -// Include auto-generated content - #![allow(clippy::all)] +#[derive(Clone)] +pub struct All; + +// Include auto-generated content include!(concat!(env!("OUT_DIR"), "/strings.rs")); + +impl Translations for All { + const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>> = &_STRINGS; + const KEYS_BY_MODULE: &[&[&str]] = &_KEYS_BY_MODULE; +} diff --git a/rslib/i18n/src/generated_launcher.rs b/rslib/i18n/src/generated_launcher.rs new file mode 100644 index 000000000..35dc3f28b --- /dev/null +++ b/rslib/i18n/src/generated_launcher.rs @@ -0,0 +1,15 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +#![allow(clippy::all)] + +#[derive(Clone)] +pub struct Launcher; + +// Include auto-generated content +include!(concat!(env!("OUT_DIR"), "/strings_launcher.rs")); + +impl Translations for Launcher { + const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>> = &_STRINGS; + const KEYS_BY_MODULE: &[&[&str]] = &_KEYS_BY_MODULE; +} diff --git a/rslib/i18n/src/lib.rs b/rslib/i18n/src/lib.rs index bfd6f5ba2..95b960fad 100644 --- a/rslib/i18n/src/lib.rs +++ b/rslib/i18n/src/lib.rs @@ -2,8 +2,10 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html mod generated; +mod generated_launcher; use std::borrow::Cow; +use std::marker::PhantomData; use std::sync::Arc; use std::sync::Mutex; @@ -12,8 +14,6 @@ use fluent::FluentArgs; use fluent::FluentResource; use fluent::FluentValue; use fluent_bundle::bundle::FluentBundle as FluentBundleOrig; -use generated::KEYS_BY_MODULE; -use generated::STRINGS; use num_format::Locale; use serde::Serialize; use unic_langid::LanguageIdentifier; @@ -22,6 +22,9 @@ type FluentBundle = FluentBundleOrig { fn round(self) -> Self; } @@ -187,20 +190,67 @@ fn get_bundle_with_extra( get_bundle(text, extra_text, &locales) } +pub trait Translations { + const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>>; + const KEYS_BY_MODULE: &[&[&str]]; +} + #[derive(Clone)] -pub struct I18n { +pub struct I18n { inner: Arc>, + _translations_type: std::marker::PhantomData

, } -fn get_key(module_idx: usize, translation_idx: usize) -> &'static str { - KEYS_BY_MODULE - .get(module_idx) - .and_then(|translations| translations.get(translation_idx)) - .cloned() - .unwrap_or("invalid-module-or-translation-index") -} +impl I18n

{ + fn get_key(module_idx: usize, translation_idx: usize) -> &'static str { + P::KEYS_BY_MODULE + .get(module_idx) + .and_then(|translations| translations.get(translation_idx)) + .cloned() + .unwrap_or("invalid-module-or-translation-index") + } + + fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec { + langs + .iter() + .cloned() + .map(|lang| { + let mut buf = String::new(); + let lang_name = remapped_lang_name(&lang); + if let Some(strings) = P::STRINGS.get(lang_name) { + if desired_modules.is_empty() { + // empty list, provide all modules + for value in strings.values() { + buf.push_str(value) + } + } else { + for module_name in desired_modules { + if let Some(text) = strings.get(module_name.as_str()) { + buf.push_str(text); + } + } + } + } + buf + }) + .collect() + } + + /// This temporarily behaves like the older code; in the future we could + /// either access each &str separately, or load them on demand. + fn ftl_localized_text(lang: &LanguageIdentifier) -> Option { + let lang = remapped_lang_name(lang); + if let Some(module) = P::STRINGS.get(lang) { + let mut text = String::new(); + for module_text in module.values() { + text.push_str(module_text) + } + Some(text) + } else { + None + } + } -impl I18n { pub fn template_only() -> Self { Self::new::<&str>(&[]) } @@ -225,7 +275,7 @@ impl I18n { let mut output_langs = vec![]; for lang in input_langs { // if the language is bundled in the binary - if let Some(text) = ftl_localized_text(&lang).or_else(|| { + if let Some(text) = Self::ftl_localized_text(&lang).or_else(|| { // when testing, allow missing translations if cfg!(test) { Some(String::new()) @@ -244,7 +294,7 @@ impl I18n { // add English templates let template_lang = "en-US".parse().unwrap(); - let template_text = ftl_localized_text(&template_lang).unwrap(); + let template_text = Self::ftl_localized_text(&template_lang).unwrap(); let template_bundle = get_bundle_with_extra(&template_text, None).unwrap(); bundles.push(template_bundle); output_langs.push(template_lang); @@ -261,6 +311,7 @@ impl I18n { bundles, langs: output_langs, })), + _translations_type: PhantomData, } } @@ -270,7 +321,7 @@ impl I18n { message_index: usize, args: FluentArgs, ) -> String { - let key = get_key(module_index, message_index); + let key = Self::get_key(module_index, message_index); self.translate(key, Some(args)).into() } @@ -305,7 +356,7 @@ impl I18n { /// implementation. pub fn resources_for_js(&self, desired_modules: &[String]) -> ResourcesForJavascript { let inner = self.inner.lock().unwrap(); - let resources = get_modules(&inner.langs, desired_modules); + let resources = Self::get_modules(&inner.langs, desired_modules); ResourcesForJavascript { langs: inner.langs.iter().map(ToString::to_string).collect(), resources, @@ -313,47 +364,6 @@ impl I18n { } } -fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec { - langs - .iter() - .cloned() - .map(|lang| { - let mut buf = String::new(); - let lang_name = remapped_lang_name(&lang); - if let Some(strings) = STRINGS.get(lang_name) { - if desired_modules.is_empty() { - // empty list, provide all modules - for value in strings.values() { - buf.push_str(value) - } - } else { - for module_name in desired_modules { - if let Some(text) = strings.get(module_name.as_str()) { - buf.push_str(text); - } - } - } - } - buf - }) - .collect() -} - -/// This temporarily behaves like the older code; in the future we could either -/// access each &str separately, or load them on demand. -fn ftl_localized_text(lang: &LanguageIdentifier) -> Option { - let lang = remapped_lang_name(lang); - if let Some(module) = STRINGS.get(lang) { - let mut text = String::new(); - for module_text in module.values() { - text.push_str(module_text) - } - Some(text) - } else { - None - } -} - struct I18nInner { // bundles in preferred language order, with template English as the // last element @@ -490,7 +500,7 @@ mod test { #[test] fn i18n() { // English template - let tr = I18n::new(&["zz"]); + let tr = I18n::::new(&["zz"]); assert_eq!(tr.translate("valid-key", None), "a valid key"); assert_eq!(tr.translate("invalid-key", None), "invalid-key"); @@ -513,7 +523,7 @@ mod test { ); // Another language - let tr = I18n::new(&["ja_JP"]); + let tr = I18n::::new(&["ja_JP"]); assert_eq!(tr.translate("valid-key", None), "キー"); assert_eq!(tr.translate("only-in-english", None), "not translated"); assert_eq!(tr.translate("invalid-key", None), "invalid-key"); @@ -524,7 +534,7 @@ mod test { ); // Decimal separator - let tr = I18n::new(&["pl-PL"]); + let tr = I18n::::new(&["pl-PL"]); // Polish will use a comma if the string is translated assert_eq!( tr.translate("one-arg-key", Some(tr_args!["one"=>2.07])), diff --git a/rslib/i18n/write_strings.rs b/rslib/i18n/write_strings.rs index 33905d98f..db31be2b7 100644 --- a/rslib/i18n/write_strings.rs +++ b/rslib/i18n/write_strings.rs @@ -15,7 +15,7 @@ use crate::extract::VariableKind; use crate::gather::TranslationsByFile; use crate::gather::TranslationsByLang; -pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) { +pub fn write_strings(map: &TranslationsByLang, modules: &[Module], out_fn: &str, tag: &str) { let mut buf = String::new(); // lang->module map @@ -25,23 +25,25 @@ pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) { // ordered list of translations by module write_translation_key_index(modules, &mut buf); // methods to generate messages - write_methods(modules, &mut buf); + write_methods(modules, &mut buf, tag); let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap()); - let path = dir.join("strings.rs"); + let path = dir.join(out_fn); fs::write(path, buf).unwrap(); } -fn write_methods(modules: &[Module], buf: &mut String) { +fn write_methods(modules: &[Module], buf: &mut String, tag: &str) { buf.push_str( r#" -use crate::{I18n,Number}; +#[allow(unused_imports)] +use crate::{I18n,Number,Translations}; +#[allow(unused_imports)] use fluent::{FluentValue, FluentArgs}; use std::borrow::Cow; -impl I18n { "#, ); + writeln!(buf, "impl I18n<{tag}> {{").unwrap(); for module in modules { for translation in &module.translations { let func = translation.key.to_snake_case(); @@ -142,7 +144,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) { writeln!( buf, - "pub(crate) const KEYS_BY_MODULE: [&[&str]; {count}] = [", + "pub(crate) const _KEYS_BY_MODULE: [&[&str]; {count}] = [", count = modules.len(), ) .unwrap(); @@ -162,7 +164,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) { fn write_lang_map(map: &TranslationsByLang, buf: &mut String) { buf.push_str( " -pub(crate) const STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! { +pub(crate) const _STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! { ", ); diff --git a/tools/minilints/src/main.rs b/tools/minilints/src/main.rs index c99fbe06e..3ecec1ac4 100644 --- a/tools/minilints/src/main.rs +++ b/tools/minilints/src/main.rs @@ -256,7 +256,7 @@ fn check_for_unstaged_changes() { fn generate_licences() -> Result { if which::which("cargo-license").is_err() { - Command::run("cargo install cargo-license@0.5.1")?; + Command::run("cargo install cargo-license@0.7.0")?; } let output = Command::run_with_output([ "cargo-license", From e4b8cf3a5f089698db4435b8e7e3a3d623892f27 Mon Sep 17 00:00:00 2001 From: Damien Elmes Date: Sat, 27 Sep 2025 14:09:15 +0700 Subject: [PATCH 14/18] Update translations --- ftl/core-repo | 2 +- ftl/qt-repo | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ftl/core-repo b/ftl/core-repo index 480ef0da7..ec5e4cad6 160000 --- a/ftl/core-repo +++ b/ftl/core-repo @@ -1 +1 @@ -Subproject commit 480ef0da728c7ea3485c58529ae7ee02be3e5dba +Subproject commit ec5e4cad6242e538cacf52265243668f0de5da80 diff --git a/ftl/qt-repo b/ftl/qt-repo index fd5f98478..0b7c53023 160000 --- a/ftl/qt-repo +++ b/ftl/qt-repo @@ -1 +1 @@ -Subproject commit fd5f984785ad07a0d3dbd893ee3d7e3671eaebd6 +Subproject commit 0b7c530233390d73b706f012bbe7489539925c7d From f5f60548dbc83d9fb6c3560f0934bf6b8c53c279 Mon Sep 17 00:00:00 2001 From: Elias <94451739+eliasjlara@users.noreply.github.com> Date: Sat, 27 Sep 2025 09:21:57 +0200 Subject: [PATCH 15/18] Fix browser search newlines and update CONTRIBUTORS (#4336) --- CONTRIBUTORS | 1 + qt/aqt/browser/browser.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CONTRIBUTORS b/CONTRIBUTORS index 36f535187..12eb79e51 100644 --- a/CONTRIBUTORS +++ b/CONTRIBUTORS @@ -244,6 +244,7 @@ memchr Max Romanowski Aldlss Hanna Nilsén +Elias Johansson Lara ******************** diff --git a/qt/aqt/browser/browser.py b/qt/aqt/browser/browser.py index e222f62c2..d935905f6 100644 --- a/qt/aqt/browser/browser.py +++ b/qt/aqt/browser/browser.py @@ -521,7 +521,7 @@ class Browser(QMainWindow): self.search() def current_search(self) -> str: - return self._line_edit().text() + return self._line_edit().text().replace("\n", " ") def search(self) -> None: """Search triggered programmatically. Caller must have saved note first.""" From 72d83ffc98d6475e37ca6760cb35192f6cf29ecb Mon Sep 17 00:00:00 2001 From: Toby Penner Date: Sun, 28 Sep 2025 11:56:07 -0400 Subject: [PATCH 16/18] Add syntax for multi-card cloze deletions (#4333) * Add multi-card cloze support * Add Toby Penner to CONTRIBUTORS --- CONTRIBUTORS | 1 + rslib/src/cloze.rs | 253 +++++++++++++++++++++++++++++++++++++++------ 2 files changed, 221 insertions(+), 33 deletions(-) diff --git a/CONTRIBUTORS b/CONTRIBUTORS index 12eb79e51..d8ac8caf5 100644 --- a/CONTRIBUTORS +++ b/CONTRIBUTORS @@ -245,6 +245,7 @@ Max Romanowski Aldlss Hanna Nilsén Elias Johansson Lara +Toby Penner ******************** diff --git a/rslib/src/cloze.rs b/rslib/src/cloze.rs index 027c14c0c..9df53286d 100644 --- a/rslib/src/cloze.rs +++ b/rslib/src/cloze.rs @@ -10,6 +10,7 @@ use std::sync::LazyLock; use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion; use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape; use htmlescape::encode_attribute; +use itertools::Itertools; use nom::branch::alt; use nom::bytes::complete::tag; use nom::bytes::complete::take_while; @@ -26,7 +27,7 @@ use crate::template::RenderContext; use crate::text::strip_html_preserving_entities; static CLOZE: LazyLock = - LazyLock::new(|| Regex::new(r"(?s)\{\{c\d+::(.*?)(::.*?)?\}\}").unwrap()); + LazyLock::new(|| Regex::new(r"(?s)\{\{c[\d,]+::(.*?)(::.*?)?\}\}").unwrap()); static MATHJAX: LazyLock = LazyLock::new(|| { Regex::new( @@ -48,7 +49,7 @@ mod mathjax_caps { #[derive(Debug)] enum Token<'a> { // The parameter is the cloze number as is appears in the field content. - OpenCloze(u16), + OpenCloze(Vec), Text(&'a str), CloseCloze, } @@ -58,21 +59,24 @@ fn tokenize(mut text: &str) -> impl Iterator> { fn open_cloze(text: &str) -> IResult<&str, Token<'_>> { // opening brackets and 'c' let (text, _opening_brackets_and_c) = tag("{{c")(text)?; - // following number - let (text, digits) = take_while(|c: char| c.is_ascii_digit())(text)?; - let digits: u16 = match digits.parse() { - Ok(digits) => digits, - Err(_) => { - // not a valid number; fail to recognize - return Err(nom::Err::Error(nom::error::make_error( - text, - nom::error::ErrorKind::Digit, - ))); - } - }; + // following comma-seperated numbers + let (text, ordinals) = take_while(|c: char| c.is_ascii_digit() || c == ',')(text)?; + let ordinals: Vec = ordinals + .split(',') + .filter_map(|s| s.parse().ok()) + .collect::>() // deduplicate + .into_iter() + .sorted() // set conversion can de-order + .collect(); + if ordinals.is_empty() { + return Err(nom::Err::Error(nom::error::make_error( + text, + nom::error::ErrorKind::Digit, + ))); + } // :: let (text, _colons) = tag("::")(text)?; - Ok((text, Token::OpenCloze(digits))) + Ok((text, Token::OpenCloze(ordinals))) } fn close_cloze(text: &str) -> IResult<&str, Token<'_>> { @@ -121,11 +125,20 @@ enum TextOrCloze<'a> { #[derive(Debug)] struct ExtractedCloze<'a> { // `ordinal` is the cloze number as is appears in the field content. - ordinal: u16, + ordinals: Vec, nodes: Vec>, hint: Option<&'a str>, } +/// Generate a string representation of the ordinals for HTML +fn ordinals_str(ordinals: &[u16]) -> String { + ordinals + .iter() + .map(|o| o.to_string()) + .collect::>() + .join(",") +} + impl ExtractedCloze<'_> { /// Return the cloze's hint, or "..." if none was provided. fn hint(&self) -> &str { @@ -151,6 +164,11 @@ impl ExtractedCloze<'_> { buf.into() } + /// Checks if this cloze is active for a given ordinal + fn contains_ordinal(&self, ordinal: u16) -> bool { + self.ordinals.contains(&ordinal) + } + /// If cloze starts with image-occlusion:, return the text following that. fn image_occlusion(&self) -> Option<&str> { let TextOrCloze::Text(text) = self.nodes.first()? else { @@ -165,10 +183,10 @@ fn parse_text_with_clozes(text: &str) -> Vec> { let mut output = vec![]; for token in tokenize(text) { match token { - Token::OpenCloze(ordinal) => { + Token::OpenCloze(ordinals) => { if open_clozes.len() < 10 { open_clozes.push(ExtractedCloze { - ordinal, + ordinals, nodes: Vec::with_capacity(1), // common case hint: None, }) @@ -214,7 +232,7 @@ fn reveal_cloze_text_in_nodes( output: &mut Vec, ) { if let TextOrCloze::Cloze(cloze) = node { - if cloze.ordinal == cloze_ord { + if cloze.contains_ordinal(cloze_ord) { if question { output.push(cloze.hint().into()) } else { @@ -234,14 +252,16 @@ fn reveal_cloze( active_cloze_found_in_text: &mut bool, buf: &mut String, ) { - let active = cloze.ordinal == cloze_ord; + let active = cloze.contains_ordinal(cloze_ord); *active_cloze_found_in_text |= active; + if let Some(image_occlusion_text) = cloze.image_occlusion() { buf.push_str(&render_image_occlusion( image_occlusion_text, question, active, - cloze.ordinal, + cloze_ord, + &cloze.ordinals, )); return; } @@ -265,7 +285,7 @@ fn reveal_cloze( buf, r#"[{}]"#, encode_attribute(&content_buf), - cloze.ordinal, + ordinals_str(&cloze.ordinals), cloze.hint() ) .unwrap(); @@ -274,7 +294,7 @@ fn reveal_cloze( write!( buf, r#""#, - cloze.ordinal + ordinals_str(&cloze.ordinals) ) .unwrap(); for node in &cloze.nodes { @@ -292,7 +312,7 @@ fn reveal_cloze( write!( buf, r#""#, - cloze.ordinal + ordinals_str(&cloze.ordinals) ) .unwrap(); for node in &cloze.nodes { @@ -308,23 +328,29 @@ fn reveal_cloze( } } -fn render_image_occlusion(text: &str, question_side: bool, active: bool, ordinal: u16) -> String { +fn render_image_occlusion( + text: &str, + question_side: bool, + active: bool, + ordinal: u16, + ordinals: &[u16], +) -> String { if (question_side && active) || ordinal == 0 { format!( r#"

"#, - ordinal, + ordinals_str(ordinals), &get_image_cloze_data(text) ) } else if !active { format!( r#"
"#, - ordinal, + ordinals_str(ordinals), &get_image_cloze_data(text) ) } else if !question_side && active { format!( r#"
"#, - ordinal, + ordinals_str(ordinals), &get_image_cloze_data(text) ) } else { @@ -338,7 +364,10 @@ pub fn parse_image_occlusions(text: &str) -> Vec { if let TextOrCloze::Cloze(cloze) = node { if cloze.image_occlusion().is_some() { if let Some(shape) = parse_image_cloze(cloze.image_occlusion().unwrap()) { - occlusions.entry(cloze.ordinal).or_default().push(shape); + // Associate this occlusion with all ordinals in this cloze + for &ordinal in &cloze.ordinals { + occlusions.entry(ordinal).or_default().push(shape.clone()); + } } } } @@ -420,7 +449,7 @@ pub fn expand_clozes_to_reveal_latex(text: &str) -> String { pub(crate) fn contains_cloze(text: &str) -> bool { parse_text_with_clozes(text) .iter() - .any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinal != 0)) + .any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinals.iter().any(|&o| o != 0))) } /// Returns the set of cloze number as they appear in the fields's content. @@ -433,10 +462,12 @@ pub fn cloze_numbers_in_string(html: &str) -> HashSet { fn add_cloze_numbers_in_text_with_clozes(nodes: &[TextOrCloze], set: &mut HashSet) { for node in nodes { if let TextOrCloze::Cloze(cloze) = node { - if cloze.ordinal != 0 { - set.insert(cloze.ordinal); - add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set); + for &ordinal in &cloze.ordinals { + if ordinal != 0 { + set.insert(ordinal); + } } + add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set); } } } @@ -654,4 +685,160 @@ mod test { ) ); } + + #[test] + fn multi_card_card_generation() { + let text = "{{c1,2,3::multi}}"; + assert_eq!( + cloze_number_in_fields(vec![text]), + vec![1, 2, 3].into_iter().collect::>() + ); + } + + #[test] + fn multi_card_cloze_basic() { + let text = "{{c1,2::shared}} word and {{c1::first}} vs {{c2::second}}"; + + assert_eq!( + strip_html(&reveal_cloze_text(text, 1, true)).as_ref(), + "[...] word and [...] vs second" + ); + assert_eq!( + strip_html(&reveal_cloze_text(text, 2, true)).as_ref(), + "[...] word and first vs [...]" + ); + assert_eq!( + strip_html(&reveal_cloze_text(text, 1, false)).as_ref(), + "shared word and first vs second" + ); + assert_eq!( + strip_html(&reveal_cloze_text(text, 2, false)).as_ref(), + "shared word and first vs second" + ); + assert_eq!( + cloze_numbers_in_string(text), + vec![1, 2].into_iter().collect::>() + ); + } + + #[test] + fn multi_card_cloze_html_attributes() { + let text = "{{c1,2,3::multi}}"; + + let card1_html = reveal_cloze_text(text, 1, true); + assert!(card1_html.contains(r#"data-ordinal="1,2,3""#)); + + let card2_html = reveal_cloze_text(text, 2, true); + assert!(card2_html.contains(r#"data-ordinal="1,2,3""#)); + + let card3_html = reveal_cloze_text(text, 3, true); + assert!(card3_html.contains(r#"data-ordinal="1,2,3""#)); + } + + #[test] + fn multi_card_cloze_with_hints() { + let text = "{{c1,2::answer::hint}}"; + + assert_eq!( + strip_html(&reveal_cloze_text(text, 1, true)).as_ref(), + "[hint]" + ); + assert_eq!( + strip_html(&reveal_cloze_text(text, 2, true)).as_ref(), + "[hint]" + ); + + assert_eq!( + strip_html(&reveal_cloze_text(text, 1, false)).as_ref(), + "answer" + ); + assert_eq!( + strip_html(&reveal_cloze_text(text, 2, false)).as_ref(), + "answer" + ); + } + + #[test] + fn multi_card_cloze_edge_cases() { + assert_eq!( + cloze_numbers_in_string("{{c1,1,2::test}}"), + vec![1, 2].into_iter().collect::>() + ); + + assert_eq!( + cloze_numbers_in_string("{{c0,1,2::test}}"), + vec![1, 2].into_iter().collect::>() + ); + + assert_eq!( + cloze_numbers_in_string("{{c1,,3::test}}"), + vec![1, 3].into_iter().collect::>() + ); + } + + #[test] + fn multi_card_cloze_only_filter() { + let text = "{{c1,2::shared}} and {{c1::first}} vs {{c2::second}}"; + + assert_eq!(reveal_cloze_text_only(text, 1, true), "..., ..."); + assert_eq!(reveal_cloze_text_only(text, 2, true), "..., ..."); + assert_eq!(reveal_cloze_text_only(text, 1, false), "shared, first"); + assert_eq!(reveal_cloze_text_only(text, 2, false), "shared, second"); + } + + #[test] + fn multi_card_nested_cloze() { + let text = "{{c1,2::outer {{c3::inner}}}}"; + + assert_eq!( + strip_html(&reveal_cloze_text(text, 1, true)).as_ref(), + "[...]" + ); + + assert_eq!( + strip_html(&reveal_cloze_text(text, 2, true)).as_ref(), + "[...]" + ); + + assert_eq!( + strip_html(&reveal_cloze_text(text, 3, true)).as_ref(), + "outer [...]" + ); + + assert_eq!( + cloze_numbers_in_string(text), + vec![1, 2, 3].into_iter().collect::>() + ); + } + + #[test] + fn nested_parent_child_card_same_cloze() { + let text = "{{c1::outer {{c1::inner}}}}"; + + assert_eq!( + strip_html(&reveal_cloze_text(text, 1, true)).as_ref(), + "[...]" + ); + + assert_eq!( + cloze_numbers_in_string(text), + vec![1].into_iter().collect::>() + ); + } + + #[test] + fn multi_card_image_occlusion() { + let text = "{{c1,2::image-occlusion:rect:left=10:top=20:width=30:height=40}}"; + + let occlusions = parse_image_occlusions(text); + assert_eq!(occlusions.len(), 2); + assert!(occlusions.iter().any(|o| o.ordinal == 1)); + assert!(occlusions.iter().any(|o| o.ordinal == 2)); + + let card1_html = reveal_cloze_text(text, 1, true); + assert!(card1_html.contains(r#"data-ordinal="1,2""#)); + + let card2_html = reveal_cloze_text(text, 2, true); + assert!(card2_html.contains(r#"data-ordinal="1,2""#)); + } } From f28018c5c68f14d9295426d8aae02831d8e0cf84 Mon Sep 17 00:00:00 2001 From: llama Date: Mon, 29 Sep 2025 22:12:24 +0800 Subject: [PATCH 17/18] fix(ci): pin cargo-license to 0.7.0 (#4367) * pin cargo-license to 0.7.0 * ./ninja fix:minilints --- cargo/licenses.json | 4 ++-- tools/minilints/src/main.rs | 4 +--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/cargo/licenses.json b/cargo/licenses.json index 274c060be..53b832fda 100644 --- a/cargo/licenses.json +++ b/cargo/licenses.json @@ -2226,7 +2226,7 @@ { "authors": "Ibraheem Ahmed ", "description": "A high performance, zero-copy URL router.", - "license": "MIT AND BSD-3-Clause", + "license": "BSD-3-Clause AND MIT", "license_file": null, "name": "matchit", "repository": "https://github.com/ibraheemdev/matchit" @@ -4154,7 +4154,7 @@ { "authors": "David Tolnay ", "description": "Determine whether characters have the XID_Start or XID_Continue properties according to Unicode Standard Annex #31", - "license": "(MIT OR Apache-2.0) AND Unicode-3.0", + "license": "(Apache-2.0 OR MIT) AND Unicode-3.0", "license_file": null, "name": "unicode-ident", "repository": "https://github.com/dtolnay/unicode-ident" diff --git a/tools/minilints/src/main.rs b/tools/minilints/src/main.rs index 3ecec1ac4..6d38278b5 100644 --- a/tools/minilints/src/main.rs +++ b/tools/minilints/src/main.rs @@ -255,9 +255,7 @@ fn check_for_unstaged_changes() { } fn generate_licences() -> Result { - if which::which("cargo-license").is_err() { - Command::run("cargo install cargo-license@0.7.0")?; - } + Command::run("cargo install cargo-license@0.7.0")?; let output = Command::run_with_output([ "cargo-license", "--features", From a842ba14981ed11664664808887617c42bb186e2 Mon Sep 17 00:00:00 2001 From: llama Date: Mon, 29 Sep 2025 22:13:09 +0800 Subject: [PATCH 18/18] replace more hardcoded strings in the launcher with translations (#4368) --- ftl/core/launcher.ftl | 4 ++++ qt/launcher/src/main.rs | 8 ++++---- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/ftl/core/launcher.ftl b/ftl/core/launcher.ftl index d07608fbd..13d419a0c 100644 --- a/ftl/core/launcher.ftl +++ b/ftl/core/launcher.ftl @@ -31,3 +31,7 @@ launcher-mirror-no-mirror = No mirror launcher-mirror-china = China launcher-mirror-disabled = Mirror disabled. launcher-mirror-china-enabled = China mirror enabled. +launcher-beta-releases-enabled = Beta releases enabled. +launcher-beta-releases-disabled = Beta releases disabled. +launcher-download-caching-enabled = Download caching enabled. +launcher-download-caching-disabled = Download caching disabled and cache cleared. diff --git a/qt/launcher/src/main.rs b/qt/launcher/src/main.rs index e53f34ca3..cdfc54d8c 100644 --- a/qt/launcher/src/main.rs +++ b/qt/launcher/src/main.rs @@ -390,10 +390,10 @@ fn main_menu_loop(state: &State) -> Result<()> { // Toggle beta prerelease file if state.prerelease_marker.exists() { let _ = remove_file(&state.prerelease_marker); - println!("Beta releases disabled."); + println!("{}", state.tr.launcher_beta_releases_disabled()); } else { write_file(&state.prerelease_marker, "")?; - println!("Beta releases enabled."); + println!("{}", state.tr.launcher_beta_releases_enabled()); } println!(); continue; @@ -402,14 +402,14 @@ fn main_menu_loop(state: &State) -> Result<()> { // Toggle cache disable file if state.no_cache_marker.exists() { let _ = remove_file(&state.no_cache_marker); - println!("Download caching enabled."); + println!("{}", state.tr.launcher_download_caching_enabled()); } else { write_file(&state.no_cache_marker, "")?; // Delete the cache directory and everything in it if state.uv_cache_dir.exists() { let _ = anki_io::remove_dir_all(&state.uv_cache_dir); } - println!("Download caching disabled and cache cleared."); + println!("{}", state.tr.launcher_download_caching_disabled()); } println!(); continue;