>,
+ _translations_type: std::marker::PhantomData,
}
-fn get_key(module_idx: usize, translation_idx: usize) -> &'static str {
- KEYS_BY_MODULE
- .get(module_idx)
- .and_then(|translations| translations.get(translation_idx))
- .cloned()
- .unwrap_or("invalid-module-or-translation-index")
-}
+impl I18n {
+ fn get_key(module_idx: usize, translation_idx: usize) -> &'static str {
+ P::KEYS_BY_MODULE
+ .get(module_idx)
+ .and_then(|translations| translations.get(translation_idx))
+ .cloned()
+ .unwrap_or("invalid-module-or-translation-index")
+ }
+
+ fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec {
+ langs
+ .iter()
+ .cloned()
+ .map(|lang| {
+ let mut buf = String::new();
+ let lang_name = remapped_lang_name(&lang);
+ if let Some(strings) = P::STRINGS.get(lang_name) {
+ if desired_modules.is_empty() {
+ // empty list, provide all modules
+ for value in strings.values() {
+ buf.push_str(value)
+ }
+ } else {
+ for module_name in desired_modules {
+ if let Some(text) = strings.get(module_name.as_str()) {
+ buf.push_str(text);
+ }
+ }
+ }
+ }
+ buf
+ })
+ .collect()
+ }
+
+ /// This temporarily behaves like the older code; in the future we could
+ /// either access each &str separately, or load them on demand.
+ fn ftl_localized_text(lang: &LanguageIdentifier) -> Option {
+ let lang = remapped_lang_name(lang);
+ if let Some(module) = P::STRINGS.get(lang) {
+ let mut text = String::new();
+ for module_text in module.values() {
+ text.push_str(module_text)
+ }
+ Some(text)
+ } else {
+ None
+ }
+ }
-impl I18n {
pub fn template_only() -> Self {
Self::new::<&str>(&[])
}
@@ -225,7 +275,7 @@ impl I18n {
let mut output_langs = vec![];
for lang in input_langs {
// if the language is bundled in the binary
- if let Some(text) = ftl_localized_text(&lang).or_else(|| {
+ if let Some(text) = Self::ftl_localized_text(&lang).or_else(|| {
// when testing, allow missing translations
if cfg!(test) {
Some(String::new())
@@ -244,7 +294,7 @@ impl I18n {
// add English templates
let template_lang = "en-US".parse().unwrap();
- let template_text = ftl_localized_text(&template_lang).unwrap();
+ let template_text = Self::ftl_localized_text(&template_lang).unwrap();
let template_bundle = get_bundle_with_extra(&template_text, None).unwrap();
bundles.push(template_bundle);
output_langs.push(template_lang);
@@ -261,6 +311,7 @@ impl I18n {
bundles,
langs: output_langs,
})),
+ _translations_type: PhantomData,
}
}
@@ -270,7 +321,7 @@ impl I18n {
message_index: usize,
args: FluentArgs,
) -> String {
- let key = get_key(module_index, message_index);
+ let key = Self::get_key(module_index, message_index);
self.translate(key, Some(args)).into()
}
@@ -305,7 +356,7 @@ impl I18n {
/// implementation.
pub fn resources_for_js(&self, desired_modules: &[String]) -> ResourcesForJavascript {
let inner = self.inner.lock().unwrap();
- let resources = get_modules(&inner.langs, desired_modules);
+ let resources = Self::get_modules(&inner.langs, desired_modules);
ResourcesForJavascript {
langs: inner.langs.iter().map(ToString::to_string).collect(),
resources,
@@ -313,47 +364,6 @@ impl I18n {
}
}
-fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec {
- langs
- .iter()
- .cloned()
- .map(|lang| {
- let mut buf = String::new();
- let lang_name = remapped_lang_name(&lang);
- if let Some(strings) = STRINGS.get(lang_name) {
- if desired_modules.is_empty() {
- // empty list, provide all modules
- for value in strings.values() {
- buf.push_str(value)
- }
- } else {
- for module_name in desired_modules {
- if let Some(text) = strings.get(module_name.as_str()) {
- buf.push_str(text);
- }
- }
- }
- }
- buf
- })
- .collect()
-}
-
-/// This temporarily behaves like the older code; in the future we could either
-/// access each &str separately, or load them on demand.
-fn ftl_localized_text(lang: &LanguageIdentifier) -> Option {
- let lang = remapped_lang_name(lang);
- if let Some(module) = STRINGS.get(lang) {
- let mut text = String::new();
- for module_text in module.values() {
- text.push_str(module_text)
- }
- Some(text)
- } else {
- None
- }
-}
-
struct I18nInner {
// bundles in preferred language order, with template English as the
// last element
@@ -490,7 +500,7 @@ mod test {
#[test]
fn i18n() {
// English template
- let tr = I18n::new(&["zz"]);
+ let tr = I18n::::new(&["zz"]);
assert_eq!(tr.translate("valid-key", None), "a valid key");
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
@@ -513,7 +523,7 @@ mod test {
);
// Another language
- let tr = I18n::new(&["ja_JP"]);
+ let tr = I18n::::new(&["ja_JP"]);
assert_eq!(tr.translate("valid-key", None), "キー");
assert_eq!(tr.translate("only-in-english", None), "not translated");
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
@@ -524,7 +534,7 @@ mod test {
);
// Decimal separator
- let tr = I18n::new(&["pl-PL"]);
+ let tr = I18n::::new(&["pl-PL"]);
// Polish will use a comma if the string is translated
assert_eq!(
tr.translate("one-arg-key", Some(tr_args!["one"=>2.07])),
diff --git a/rslib/i18n/write_strings.rs b/rslib/i18n/write_strings.rs
index 33905d98f..db31be2b7 100644
--- a/rslib/i18n/write_strings.rs
+++ b/rslib/i18n/write_strings.rs
@@ -15,7 +15,7 @@ use crate::extract::VariableKind;
use crate::gather::TranslationsByFile;
use crate::gather::TranslationsByLang;
-pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
+pub fn write_strings(map: &TranslationsByLang, modules: &[Module], out_fn: &str, tag: &str) {
let mut buf = String::new();
// lang->module map
@@ -25,23 +25,25 @@ pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
// ordered list of translations by module
write_translation_key_index(modules, &mut buf);
// methods to generate messages
- write_methods(modules, &mut buf);
+ write_methods(modules, &mut buf, tag);
let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
- let path = dir.join("strings.rs");
+ let path = dir.join(out_fn);
fs::write(path, buf).unwrap();
}
-fn write_methods(modules: &[Module], buf: &mut String) {
+fn write_methods(modules: &[Module], buf: &mut String, tag: &str) {
buf.push_str(
r#"
-use crate::{I18n,Number};
+#[allow(unused_imports)]
+use crate::{I18n,Number,Translations};
+#[allow(unused_imports)]
use fluent::{FluentValue, FluentArgs};
use std::borrow::Cow;
-impl I18n {
"#,
);
+ writeln!(buf, "impl I18n<{tag}> {{").unwrap();
for module in modules {
for translation in &module.translations {
let func = translation.key.to_snake_case();
@@ -142,7 +144,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) {
writeln!(
buf,
- "pub(crate) const KEYS_BY_MODULE: [&[&str]; {count}] = [",
+ "pub(crate) const _KEYS_BY_MODULE: [&[&str]; {count}] = [",
count = modules.len(),
)
.unwrap();
@@ -162,7 +164,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) {
fn write_lang_map(map: &TranslationsByLang, buf: &mut String) {
buf.push_str(
"
-pub(crate) const STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! {
+pub(crate) const _STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! {
",
);
diff --git a/rslib/src/cloze.rs b/rslib/src/cloze.rs
index 027c14c0c..9df53286d 100644
--- a/rslib/src/cloze.rs
+++ b/rslib/src/cloze.rs
@@ -10,6 +10,7 @@ use std::sync::LazyLock;
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion;
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape;
use htmlescape::encode_attribute;
+use itertools::Itertools;
use nom::branch::alt;
use nom::bytes::complete::tag;
use nom::bytes::complete::take_while;
@@ -26,7 +27,7 @@ use crate::template::RenderContext;
use crate::text::strip_html_preserving_entities;
static CLOZE: LazyLock =
- LazyLock::new(|| Regex::new(r"(?s)\{\{c\d+::(.*?)(::.*?)?\}\}").unwrap());
+ LazyLock::new(|| Regex::new(r"(?s)\{\{c[\d,]+::(.*?)(::.*?)?\}\}").unwrap());
static MATHJAX: LazyLock = LazyLock::new(|| {
Regex::new(
@@ -48,7 +49,7 @@ mod mathjax_caps {
#[derive(Debug)]
enum Token<'a> {
// The parameter is the cloze number as is appears in the field content.
- OpenCloze(u16),
+ OpenCloze(Vec),
Text(&'a str),
CloseCloze,
}
@@ -58,21 +59,24 @@ fn tokenize(mut text: &str) -> impl Iterator- > {
fn open_cloze(text: &str) -> IResult<&str, Token<'_>> {
// opening brackets and 'c'
let (text, _opening_brackets_and_c) = tag("{{c")(text)?;
- // following number
- let (text, digits) = take_while(|c: char| c.is_ascii_digit())(text)?;
- let digits: u16 = match digits.parse() {
- Ok(digits) => digits,
- Err(_) => {
- // not a valid number; fail to recognize
- return Err(nom::Err::Error(nom::error::make_error(
- text,
- nom::error::ErrorKind::Digit,
- )));
- }
- };
+ // following comma-seperated numbers
+ let (text, ordinals) = take_while(|c: char| c.is_ascii_digit() || c == ',')(text)?;
+ let ordinals: Vec = ordinals
+ .split(',')
+ .filter_map(|s| s.parse().ok())
+ .collect::>() // deduplicate
+ .into_iter()
+ .sorted() // set conversion can de-order
+ .collect();
+ if ordinals.is_empty() {
+ return Err(nom::Err::Error(nom::error::make_error(
+ text,
+ nom::error::ErrorKind::Digit,
+ )));
+ }
// ::
let (text, _colons) = tag("::")(text)?;
- Ok((text, Token::OpenCloze(digits)))
+ Ok((text, Token::OpenCloze(ordinals)))
}
fn close_cloze(text: &str) -> IResult<&str, Token<'_>> {
@@ -121,11 +125,20 @@ enum TextOrCloze<'a> {
#[derive(Debug)]
struct ExtractedCloze<'a> {
// `ordinal` is the cloze number as is appears in the field content.
- ordinal: u16,
+ ordinals: Vec,
nodes: Vec>,
hint: Option<&'a str>,
}
+/// Generate a string representation of the ordinals for HTML
+fn ordinals_str(ordinals: &[u16]) -> String {
+ ordinals
+ .iter()
+ .map(|o| o.to_string())
+ .collect::>()
+ .join(",")
+}
+
impl ExtractedCloze<'_> {
/// Return the cloze's hint, or "..." if none was provided.
fn hint(&self) -> &str {
@@ -151,6 +164,11 @@ impl ExtractedCloze<'_> {
buf.into()
}
+ /// Checks if this cloze is active for a given ordinal
+ fn contains_ordinal(&self, ordinal: u16) -> bool {
+ self.ordinals.contains(&ordinal)
+ }
+
/// If cloze starts with image-occlusion:, return the text following that.
fn image_occlusion(&self) -> Option<&str> {
let TextOrCloze::Text(text) = self.nodes.first()? else {
@@ -165,10 +183,10 @@ fn parse_text_with_clozes(text: &str) -> Vec> {
let mut output = vec![];
for token in tokenize(text) {
match token {
- Token::OpenCloze(ordinal) => {
+ Token::OpenCloze(ordinals) => {
if open_clozes.len() < 10 {
open_clozes.push(ExtractedCloze {
- ordinal,
+ ordinals,
nodes: Vec::with_capacity(1), // common case
hint: None,
})
@@ -214,7 +232,7 @@ fn reveal_cloze_text_in_nodes(
output: &mut Vec,
) {
if let TextOrCloze::Cloze(cloze) = node {
- if cloze.ordinal == cloze_ord {
+ if cloze.contains_ordinal(cloze_ord) {
if question {
output.push(cloze.hint().into())
} else {
@@ -234,14 +252,16 @@ fn reveal_cloze(
active_cloze_found_in_text: &mut bool,
buf: &mut String,
) {
- let active = cloze.ordinal == cloze_ord;
+ let active = cloze.contains_ordinal(cloze_ord);
*active_cloze_found_in_text |= active;
+
if let Some(image_occlusion_text) = cloze.image_occlusion() {
buf.push_str(&render_image_occlusion(
image_occlusion_text,
question,
active,
- cloze.ordinal,
+ cloze_ord,
+ &cloze.ordinals,
));
return;
}
@@ -265,7 +285,7 @@ fn reveal_cloze(
buf,
r#"[{}]"#,
encode_attribute(&content_buf),
- cloze.ordinal,
+ ordinals_str(&cloze.ordinals),
cloze.hint()
)
.unwrap();
@@ -274,7 +294,7 @@ fn reveal_cloze(
write!(
buf,
r#""#,
- cloze.ordinal
+ ordinals_str(&cloze.ordinals)
)
.unwrap();
for node in &cloze.nodes {
@@ -292,7 +312,7 @@ fn reveal_cloze(
write!(
buf,
r#""#,
- cloze.ordinal
+ ordinals_str(&cloze.ordinals)
)
.unwrap();
for node in &cloze.nodes {
@@ -308,23 +328,29 @@ fn reveal_cloze(
}
}
-fn render_image_occlusion(text: &str, question_side: bool, active: bool, ordinal: u16) -> String {
+fn render_image_occlusion(
+ text: &str,
+ question_side: bool,
+ active: bool,
+ ordinal: u16,
+ ordinals: &[u16],
+) -> String {
if (question_side && active) || ordinal == 0 {
format!(
r#""#,
- ordinal,
+ ordinals_str(ordinals),
&get_image_cloze_data(text)
)
} else if !active {
format!(
r#""#,
- ordinal,
+ ordinals_str(ordinals),
&get_image_cloze_data(text)
)
} else if !question_side && active {
format!(
r#""#,
- ordinal,
+ ordinals_str(ordinals),
&get_image_cloze_data(text)
)
} else {
@@ -338,7 +364,10 @@ pub fn parse_image_occlusions(text: &str) -> Vec {
if let TextOrCloze::Cloze(cloze) = node {
if cloze.image_occlusion().is_some() {
if let Some(shape) = parse_image_cloze(cloze.image_occlusion().unwrap()) {
- occlusions.entry(cloze.ordinal).or_default().push(shape);
+ // Associate this occlusion with all ordinals in this cloze
+ for &ordinal in &cloze.ordinals {
+ occlusions.entry(ordinal).or_default().push(shape.clone());
+ }
}
}
}
@@ -420,7 +449,7 @@ pub fn expand_clozes_to_reveal_latex(text: &str) -> String {
pub(crate) fn contains_cloze(text: &str) -> bool {
parse_text_with_clozes(text)
.iter()
- .any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinal != 0))
+ .any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinals.iter().any(|&o| o != 0)))
}
/// Returns the set of cloze number as they appear in the fields's content.
@@ -433,10 +462,12 @@ pub fn cloze_numbers_in_string(html: &str) -> HashSet {
fn add_cloze_numbers_in_text_with_clozes(nodes: &[TextOrCloze], set: &mut HashSet) {
for node in nodes {
if let TextOrCloze::Cloze(cloze) = node {
- if cloze.ordinal != 0 {
- set.insert(cloze.ordinal);
- add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set);
+ for &ordinal in &cloze.ordinals {
+ if ordinal != 0 {
+ set.insert(ordinal);
+ }
}
+ add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set);
}
}
}
@@ -654,4 +685,160 @@ mod test {
)
);
}
+
+ #[test]
+ fn multi_card_card_generation() {
+ let text = "{{c1,2,3::multi}}";
+ assert_eq!(
+ cloze_number_in_fields(vec![text]),
+ vec![1, 2, 3].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn multi_card_cloze_basic() {
+ let text = "{{c1,2::shared}} word and {{c1::first}} vs {{c2::second}}";
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
+ "[...] word and [...] vs second"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
+ "[...] word and first vs [...]"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, false)).as_ref(),
+ "shared word and first vs second"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, false)).as_ref(),
+ "shared word and first vs second"
+ );
+ assert_eq!(
+ cloze_numbers_in_string(text),
+ vec![1, 2].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn multi_card_cloze_html_attributes() {
+ let text = "{{c1,2,3::multi}}";
+
+ let card1_html = reveal_cloze_text(text, 1, true);
+ assert!(card1_html.contains(r#"data-ordinal="1,2,3""#));
+
+ let card2_html = reveal_cloze_text(text, 2, true);
+ assert!(card2_html.contains(r#"data-ordinal="1,2,3""#));
+
+ let card3_html = reveal_cloze_text(text, 3, true);
+ assert!(card3_html.contains(r#"data-ordinal="1,2,3""#));
+ }
+
+ #[test]
+ fn multi_card_cloze_with_hints() {
+ let text = "{{c1,2::answer::hint}}";
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
+ "[hint]"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
+ "[hint]"
+ );
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, false)).as_ref(),
+ "answer"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, false)).as_ref(),
+ "answer"
+ );
+ }
+
+ #[test]
+ fn multi_card_cloze_edge_cases() {
+ assert_eq!(
+ cloze_numbers_in_string("{{c1,1,2::test}}"),
+ vec![1, 2].into_iter().collect::>()
+ );
+
+ assert_eq!(
+ cloze_numbers_in_string("{{c0,1,2::test}}"),
+ vec![1, 2].into_iter().collect::>()
+ );
+
+ assert_eq!(
+ cloze_numbers_in_string("{{c1,,3::test}}"),
+ vec![1, 3].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn multi_card_cloze_only_filter() {
+ let text = "{{c1,2::shared}} and {{c1::first}} vs {{c2::second}}";
+
+ assert_eq!(reveal_cloze_text_only(text, 1, true), "..., ...");
+ assert_eq!(reveal_cloze_text_only(text, 2, true), "..., ...");
+ assert_eq!(reveal_cloze_text_only(text, 1, false), "shared, first");
+ assert_eq!(reveal_cloze_text_only(text, 2, false), "shared, second");
+ }
+
+ #[test]
+ fn multi_card_nested_cloze() {
+ let text = "{{c1,2::outer {{c3::inner}}}}";
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
+ "[...]"
+ );
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
+ "[...]"
+ );
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 3, true)).as_ref(),
+ "outer [...]"
+ );
+
+ assert_eq!(
+ cloze_numbers_in_string(text),
+ vec![1, 2, 3].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn nested_parent_child_card_same_cloze() {
+ let text = "{{c1::outer {{c1::inner}}}}";
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
+ "[...]"
+ );
+
+ assert_eq!(
+ cloze_numbers_in_string(text),
+ vec![1].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn multi_card_image_occlusion() {
+ let text = "{{c1,2::image-occlusion:rect:left=10:top=20:width=30:height=40}}";
+
+ let occlusions = parse_image_occlusions(text);
+ assert_eq!(occlusions.len(), 2);
+ assert!(occlusions.iter().any(|o| o.ordinal == 1));
+ assert!(occlusions.iter().any(|o| o.ordinal == 2));
+
+ let card1_html = reveal_cloze_text(text, 1, true);
+ assert!(card1_html.contains(r#"data-ordinal="1,2""#));
+
+ let card2_html = reveal_cloze_text(text, 2, true);
+ assert!(card2_html.contains(r#"data-ordinal="1,2""#));
+ }
}
diff --git a/rslib/src/collection/service.rs b/rslib/src/collection/service.rs
index 2050a6897..a37360782 100644
--- a/rslib/src/collection/service.rs
+++ b/rslib/src/collection/service.rs
@@ -1,8 +1,10 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
+use anki_proto::collection::GetCustomColoursResponse;
use anki_proto::generic;
use crate::collection::Collection;
+use crate::config::ConfigKey;
use crate::error;
use crate::prelude::BoolKey;
use crate::prelude::Op;
@@ -62,4 +64,13 @@ impl crate::services::CollectionService for Collection {
})
.map(Into::into)
}
+
+ fn get_custom_colours(
+ &mut self,
+ ) -> error::Result {
+ let colours = self
+ .get_config_optional(ConfigKey::CustomColorPickerPalette)
+ .unwrap_or_default();
+ Ok(GetCustomColoursResponse { colours })
+ }
}
diff --git a/rslib/src/config/mod.rs b/rslib/src/config/mod.rs
index 5ece5b7e1..1e507281a 100644
--- a/rslib/src/config/mod.rs
+++ b/rslib/src/config/mod.rs
@@ -71,6 +71,7 @@ pub(crate) enum ConfigKey {
NextNewCardPosition,
#[strum(to_string = "schedVer")]
SchedulerVersion,
+ CustomColorPickerPalette,
}
#[derive(PartialEq, Eq, Serialize_repr, Deserialize_repr, Clone, Copy, Debug)]
diff --git a/rslib/src/revlog/mod.rs b/rslib/src/revlog/mod.rs
index f52698388..fbb9b459a 100644
--- a/rslib/src/revlog/mod.rs
+++ b/rslib/src/revlog/mod.rs
@@ -85,6 +85,15 @@ impl RevlogEntry {
.unwrap()
}
+ pub(crate) fn last_interval_secs(&self) -> u32 {
+ u32::try_from(if self.last_interval > 0 {
+ self.last_interval.saturating_mul(86_400)
+ } else {
+ self.last_interval.saturating_mul(-1)
+ })
+ .unwrap()
+ }
+
/// Returns true if this entry represents a reset operation.
/// These entries are created when a card is reset using
/// [`Collection::reschedule_cards_as_new`].
diff --git a/rslib/src/scheduler/fsrs/memory_state.rs b/rslib/src/scheduler/fsrs/memory_state.rs
index 420ead5a3..303bbfd91 100644
--- a/rslib/src/scheduler/fsrs/memory_state.rs
+++ b/rslib/src/scheduler/fsrs/memory_state.rs
@@ -136,6 +136,19 @@ impl Collection {
let deckconfig_id = deck.config_id().unwrap();
// reschedule it
let original_interval = card.interval;
+ let min_interval = |interval: u32| {
+ let previous_interval =
+ last_info.previous_interval.unwrap_or(0);
+ if interval > previous_interval {
+ // interval grew; don't allow fuzzed interval to
+ // be less than previous+1
+ previous_interval + 1
+ } else {
+ // interval shrunk; don't restrict negative fuzz
+ 0
+ }
+ .max(1)
+ };
let interval = fsrs.next_interval(
Some(state.stability),
desired_retention,
@@ -146,7 +159,7 @@ impl Collection {
.and_then(|r| {
r.find_interval(
interval,
- 1,
+ min_interval(interval as u32),
req.max_interval,
days_elapsed as u32,
deckconfig_id,
@@ -157,7 +170,7 @@ impl Collection {
with_review_fuzz(
card.get_fuzz_factor(true),
interval,
- 1,
+ min_interval(interval as u32),
req.max_interval,
)
});
@@ -310,6 +323,9 @@ pub(crate) struct LastRevlogInfo {
/// reviewed the card and now, so that we can determine an accurate period
/// when the card has subsequently been rescheduled to a different day.
pub(crate) last_reviewed_at: Option,
+ /// The interval before the latest review. Used to prevent fuzz from going
+ /// backwards when rescheduling the card
+ pub(crate) previous_interval: Option,
}
/// Return a map of cards to info about last review.
@@ -321,14 +337,27 @@ pub(crate) fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap= 0 && e.button_chosen > 1 {
+ Some(e.last_interval as u32)
+ } else {
+ None
+ };
} else if e.is_reset() {
last_reviewed_at = None;
+ previous_interval = None;
}
}
- out.insert(card_id, LastRevlogInfo { last_reviewed_at });
+ out.insert(
+ card_id,
+ LastRevlogInfo {
+ last_reviewed_at,
+ previous_interval,
+ },
+ );
});
out
}
diff --git a/rslib/src/scheduler/fsrs/params.rs b/rslib/src/scheduler/fsrs/params.rs
index 726870fe1..1fb1d58b8 100644
--- a/rslib/src/scheduler/fsrs/params.rs
+++ b/rslib/src/scheduler/fsrs/params.rs
@@ -174,7 +174,7 @@ impl Collection {
}
}
- let health_check_passed = if health_check {
+ let health_check_passed = if health_check && input.train_set.len() > 300 {
let fsrs = FSRS::new(None)?;
fsrs.evaluate_with_time_series_splits(input, |_| true)
.ok()
@@ -478,27 +478,42 @@ pub(crate) fn reviews_for_fsrs(
}))
.collect_vec();
- let skip = if training { 1 } else { 0 };
- // Convert the remaining entries into separate FSRSItems, where each item
- // contains all reviews done until then.
- let items: Vec<(RevlogId, FSRSItem)> = entries
- .iter()
- .enumerate()
- .skip(skip)
- .map(|(outer_idx, entry)| {
- let reviews = entries
- .iter()
- .take(outer_idx + 1)
- .enumerate()
- .map(|(inner_idx, r)| FSRSReview {
- rating: r.button_chosen as u32,
- delta_t: delta_ts[inner_idx],
- })
- .collect();
- (entry.id, FSRSItem { reviews })
- })
- .filter(|(_, item)| !training || item.reviews.last().unwrap().delta_t > 0)
- .collect_vec();
+ let items = if training {
+ // Convert the remaining entries into separate FSRSItems, where each item
+ // contains all reviews done until then.
+ let mut items = Vec::with_capacity(entries.len());
+ let mut current_reviews = Vec::with_capacity(entries.len());
+ for (idx, (entry, &delta_t)) in entries.iter().zip(delta_ts.iter()).enumerate() {
+ current_reviews.push(FSRSReview {
+ rating: entry.button_chosen as u32,
+ delta_t,
+ });
+ if idx >= 1 && delta_t > 0 {
+ items.push((
+ entry.id,
+ FSRSItem {
+ reviews: current_reviews.clone(),
+ },
+ ));
+ }
+ }
+ items
+ } else {
+ // When not training, we only need the final FSRS item, which represents
+ // the complete history of the card. This avoids expensive clones in a loop.
+ let reviews = entries
+ .iter()
+ .zip(delta_ts.iter())
+ .map(|(entry, &delta_t)| FSRSReview {
+ rating: entry.button_chosen as u32,
+ delta_t,
+ })
+ .collect();
+ let last_entry = entries.last().unwrap();
+
+ vec![(last_entry.id, FSRSItem { reviews })]
+ };
+
if items.is_empty() {
None
} else {
@@ -738,7 +753,7 @@ pub(crate) mod tests {
],
false,
),
- fsrs_items!([review(0)], [review(0), review(1)])
+ fsrs_items!([review(0), review(1)])
);
}
@@ -809,7 +824,7 @@ pub(crate) mod tests {
// R | A X R
assert_eq!(
convert_ignore_before(revlogs, false, days_ago_ms(9)),
- fsrs_items!([review(0)], [review(0), review(2)])
+ fsrs_items!([review(0), review(2)])
);
}
@@ -828,6 +843,9 @@ pub(crate) mod tests {
assert_eq!(
convert_ignore_before(revlogs, false, days_ago_ms(9))
.unwrap()
+ .last()
+ .unwrap()
+ .reviews
.len(),
2
);
@@ -849,6 +867,9 @@ pub(crate) mod tests {
assert_eq!(
convert_ignore_before(revlogs, false, days_ago_ms(9))
.unwrap()
+ .last()
+ .unwrap()
+ .reviews
.len(),
2
);
diff --git a/rslib/src/scheduler/fsrs/rescheduler.rs b/rslib/src/scheduler/fsrs/rescheduler.rs
index db490b3e4..37c824230 100644
--- a/rslib/src/scheduler/fsrs/rescheduler.rs
+++ b/rslib/src/scheduler/fsrs/rescheduler.rs
@@ -115,13 +115,14 @@ impl Rescheduler {
pub fn find_interval(
&self,
interval: f32,
- minimum: u32,
- maximum: u32,
+ minimum_interval: u32,
+ maximum_interval: u32,
days_elapsed: u32,
deckconfig_id: DeckConfigId,
fuzz_seed: Option,
) -> Option {
- let (before_days, after_days) = constrained_fuzz_bounds(interval, minimum, maximum);
+ let (before_days, after_days) =
+ constrained_fuzz_bounds(interval, minimum_interval, maximum_interval);
// Don't reschedule the card when it's overdue
if after_days < days_elapsed {
diff --git a/rslib/src/search/builder.rs b/rslib/src/search/builder.rs
index a76af0560..0c22ff1eb 100644
--- a/rslib/src/search/builder.rs
+++ b/rslib/src/search/builder.rs
@@ -6,6 +6,7 @@ use std::mem;
use itertools::Itertools;
use super::writer::write_nodes;
+use super::FieldSearchMode;
use super::Node;
use super::SearchNode;
use super::StateKind;
@@ -174,7 +175,7 @@ impl SearchNode {
pub fn from_tag_name(name: &str) -> Self {
Self::Tag {
tag: escape_anki_wildcards_for_search_node(name),
- is_re: false,
+ mode: FieldSearchMode::Normal,
}
}
diff --git a/rslib/src/search/mod.rs b/rslib/src/search/mod.rs
index 0960fabf9..0dd52dbc3 100644
--- a/rslib/src/search/mod.rs
+++ b/rslib/src/search/mod.rs
@@ -13,6 +13,7 @@ pub use builder::JoinSearches;
pub use builder::Negated;
pub use builder::SearchBuilder;
pub use parser::parse as parse_search;
+pub use parser::FieldSearchMode;
pub use parser::Node;
pub use parser::PropertyKind;
pub use parser::RatingKind;
diff --git a/rslib/src/search/parser.rs b/rslib/src/search/parser.rs
index 33c1a4622..5928bf486 100644
--- a/rslib/src/search/parser.rs
+++ b/rslib/src/search/parser.rs
@@ -3,6 +3,7 @@
use std::sync::LazyLock;
+use anki_proto::search::search_node::FieldSearchMode as FieldSearchModeProto;
use nom::branch::alt;
use nom::bytes::complete::escaped;
use nom::bytes::complete::is_not;
@@ -27,7 +28,6 @@ use crate::error::ParseError;
use crate::error::Result;
use crate::error::SearchErrorKind as FailKind;
use crate::prelude::*;
-
type IResult<'a, O> = std::result::Result<(&'a str, O), nom::Err>>;
type ParseResult<'a, O> = std::result::Result>>;
@@ -48,6 +48,23 @@ pub enum Node {
Search(SearchNode),
}
+#[derive(Copy, Debug, PartialEq, Eq, Clone)]
+pub enum FieldSearchMode {
+ Normal,
+ Regex,
+ NoCombining,
+}
+
+impl From for FieldSearchMode {
+ fn from(mode: FieldSearchModeProto) -> Self {
+ match mode {
+ FieldSearchModeProto::Normal => Self::Normal,
+ FieldSearchModeProto::Regex => Self::Regex,
+ FieldSearchModeProto::Nocombining => Self::NoCombining,
+ }
+ }
+}
+
#[derive(Debug, PartialEq, Clone)]
pub enum SearchNode {
// text without a colon
@@ -56,7 +73,7 @@ pub enum SearchNode {
SingleField {
field: String,
text: String,
- is_re: bool,
+ mode: FieldSearchMode,
},
AddedInDays(u32),
EditedInDays(u32),
@@ -77,7 +94,7 @@ pub enum SearchNode {
},
Tag {
tag: String,
- is_re: bool,
+ mode: FieldSearchMode,
},
Duplicates {
notetype_id: NotetypeId,
@@ -373,12 +390,17 @@ fn parse_tag(s: &str) -> ParseResult<'_, SearchNode> {
Ok(if let Some(re) = s.strip_prefix("re:") {
SearchNode::Tag {
tag: unescape_quotes(re),
- is_re: true,
+ mode: FieldSearchMode::Regex,
+ }
+ } else if let Some(nc) = s.strip_prefix("nc:") {
+ SearchNode::Tag {
+ tag: unescape(nc)?,
+ mode: FieldSearchMode::NoCombining,
}
} else {
SearchNode::Tag {
tag: unescape(s)?,
- is_re: false,
+ mode: FieldSearchMode::Normal,
}
})
}
@@ -670,13 +692,19 @@ fn parse_single_field<'a>(key: &'a str, val: &'a str) -> ParseResult<'a, SearchN
SearchNode::SingleField {
field: unescape(key)?,
text: unescape_quotes(stripped),
- is_re: true,
+ mode: FieldSearchMode::Regex,
+ }
+ } else if let Some(stripped) = val.strip_prefix("nc:") {
+ SearchNode::SingleField {
+ field: unescape(key)?,
+ text: unescape_quotes(stripped),
+ mode: FieldSearchMode::NoCombining,
}
} else {
SearchNode::SingleField {
field: unescape(key)?,
text: unescape(val)?,
- is_re: false,
+ mode: FieldSearchMode::Normal,
}
})
}
@@ -806,7 +834,7 @@ mod test {
Search(SingleField {
field: "foo".into(),
text: "bar baz".into(),
- is_re: false,
+ mode: FieldSearchMode::Normal,
})
]))),
Or,
@@ -819,7 +847,16 @@ mod test {
vec![Search(SingleField {
field: "foo".into(),
text: "bar".into(),
- is_re: true
+ mode: FieldSearchMode::Regex,
+ })]
+ );
+
+ assert_eq!(
+ parse("foo:nc:bar")?,
+ vec![Search(SingleField {
+ field: "foo".into(),
+ text: "bar".into(),
+ mode: FieldSearchMode::NoCombining,
})]
);
@@ -829,7 +866,7 @@ mod test {
vec![Search(SingleField {
field: "field".into(),
text: "va\"lue".into(),
- is_re: false
+ mode: FieldSearchMode::Normal,
})]
);
assert_eq!(parse(r#""field:va\"lue""#)?, parse(r#"field:"va\"lue""#)?,);
@@ -906,14 +943,14 @@ mod test {
parse("tag:hard")?,
vec![Search(Tag {
tag: "hard".into(),
- is_re: false
+ mode: FieldSearchMode::Normal
})]
);
assert_eq!(
parse(r"tag:re:\\")?,
vec![Search(Tag {
tag: r"\\".into(),
- is_re: true
+ mode: FieldSearchMode::Regex
})]
);
assert_eq!(
diff --git a/rslib/src/search/service/search_node.rs b/rslib/src/search/service/search_node.rs
index 1851a28f7..6986eef2a 100644
--- a/rslib/src/search/service/search_node.rs
+++ b/rslib/src/search/service/search_node.rs
@@ -6,6 +6,7 @@ use itertools::Itertools;
use crate::prelude::*;
use crate::search::parse_search;
+use crate::search::FieldSearchMode;
use crate::search::Negated;
use crate::search::Node;
use crate::search::PropertyKind;
@@ -40,7 +41,7 @@ impl TryFrom for Node {
Filter::FieldName(s) => Node::Search(SearchNode::SingleField {
field: escape_anki_wildcards_for_search_node(&s),
text: "_*".to_string(),
- is_re: false,
+ mode: FieldSearchMode::Normal,
}),
Filter::Rated(rated) => Node::Search(SearchNode::Rated {
days: rated.days,
@@ -107,7 +108,7 @@ impl TryFrom for Node {
Filter::Field(field) => Node::Search(SearchNode::SingleField {
field: escape_anki_wildcards(&field.field_name),
text: escape_anki_wildcards(&field.text),
- is_re: field.is_re,
+ mode: field.mode().into(),
}),
Filter::LiteralText(text) => {
let text = escape_anki_wildcards(&text);
diff --git a/rslib/src/search/sqlwriter.rs b/rslib/src/search/sqlwriter.rs
index 542dba4fc..f6237d6fd 100644
--- a/rslib/src/search/sqlwriter.rs
+++ b/rslib/src/search/sqlwriter.rs
@@ -7,6 +7,7 @@ use std::ops::Range;
use itertools::Itertools;
+use super::parser::FieldSearchMode;
use super::parser::Node;
use super::parser::PropertyKind;
use super::parser::RatingKind;
@@ -138,8 +139,8 @@ impl SqlWriter<'_> {
false,
)?
}
- SearchNode::SingleField { field, text, is_re } => {
- self.write_field(&norm(field), &self.norm_note(text), *is_re)?
+ SearchNode::SingleField { field, text, mode } => {
+ self.write_field(&norm(field), &self.norm_note(text), *mode)?
}
SearchNode::Duplicates { notetype_id, text } => {
self.write_dupe(*notetype_id, &self.norm_note(text))?
@@ -180,7 +181,7 @@ impl SqlWriter<'_> {
SearchNode::Notetype(notetype) => self.write_notetype(&norm(notetype)),
SearchNode::Rated { days, ease } => self.write_rated(">", -i64::from(*days), ease)?,
- SearchNode::Tag { tag, is_re } => self.write_tag(&norm(tag), *is_re),
+ SearchNode::Tag { tag, mode } => self.write_tag(&norm(tag), *mode),
SearchNode::State(state) => self.write_state(state)?,
SearchNode::Flag(flag) => {
write!(self.sql, "(c.flags & 7) == {flag}").unwrap();
@@ -296,8 +297,8 @@ impl SqlWriter<'_> {
Ok(())
}
- fn write_tag(&mut self, tag: &str, is_re: bool) {
- if is_re {
+ fn write_tag(&mut self, tag: &str, mode: FieldSearchMode) {
+ if mode == FieldSearchMode::Regex {
self.args.push(format!("(?i){tag}"));
write!(self.sql, "regexp_tags(?{}, n.tags)", self.args.len()).unwrap();
} else {
@@ -310,8 +311,19 @@ impl SqlWriter<'_> {
}
s if s.contains(' ') => write!(self.sql, "false").unwrap(),
text => {
- write!(self.sql, "n.tags regexp ?").unwrap();
- let re = &to_custom_re(text, r"\S");
+ let text = if mode == FieldSearchMode::Normal {
+ write!(self.sql, "n.tags regexp ?").unwrap();
+ Cow::from(text)
+ } else {
+ write!(
+ self.sql,
+ "coalesce(process_text(n.tags, {}), n.tags) regexp ?",
+ ProcessTextFlags::NoCombining.bits()
+ )
+ .unwrap();
+ without_combining(text)
+ };
+ let re = &to_custom_re(&text, r"\S");
self.args.push(format!("(?i).* {re}(::| ).*"));
}
}
@@ -567,16 +579,18 @@ impl SqlWriter<'_> {
}
}
- fn write_field(&mut self, field_name: &str, val: &str, is_re: bool) -> Result<()> {
+ fn write_field(&mut self, field_name: &str, val: &str, mode: FieldSearchMode) -> Result<()> {
if matches!(field_name, "*" | "_*" | "*_") {
- if is_re {
+ if mode == FieldSearchMode::Regex {
self.write_all_fields_regexp(val);
} else {
self.write_all_fields(val);
}
Ok(())
- } else if is_re {
+ } else if mode == FieldSearchMode::Regex {
self.write_single_field_regexp(field_name, val)
+ } else if mode == FieldSearchMode::NoCombining {
+ self.write_single_field_nc(field_name, val)
} else {
self.write_single_field(field_name, val)
}
@@ -592,6 +606,58 @@ impl SqlWriter<'_> {
write!(self.sql, "regexp_fields(?{}, n.flds)", self.args.len()).unwrap();
}
+ fn write_single_field_nc(&mut self, field_name: &str, val: &str) -> Result<()> {
+ let field_indicies_by_notetype = self.num_fields_and_fields_indices_by_notetype(
+ field_name,
+ matches!(val, "*" | "_*" | "*_"),
+ )?;
+ if field_indicies_by_notetype.is_empty() {
+ write!(self.sql, "false").unwrap();
+ return Ok(());
+ }
+
+ let val = to_sql(val);
+ let val = without_combining(&val);
+ self.args.push(val.into());
+ let arg_idx = self.args.len();
+ let field_idx_str = format!("' || ?{arg_idx} || '");
+ let other_idx_str = "%".to_string();
+
+ let notetype_clause = |ctx: &FieldQualifiedSearchContext| -> String {
+ let field_index_clause = |range: &Range| {
+ let f = (0..ctx.total_fields_in_note)
+ .filter_map(|i| {
+ if i as u32 == range.start {
+ Some(&field_idx_str)
+ } else if range.contains(&(i as u32)) {
+ None
+ } else {
+ Some(&other_idx_str)
+ }
+ })
+ .join("\x1f");
+ format!(
+ "coalesce(process_text(n.flds, {}), n.flds) like '{f}' escape '\\'",
+ ProcessTextFlags::NoCombining.bits()
+ )
+ };
+
+ let all_field_clauses = ctx
+ .field_ranges_to_search
+ .iter()
+ .map(field_index_clause)
+ .join(" or ");
+ format!("(n.mid = {mid} and ({all_field_clauses}))", mid = ctx.ntid)
+ };
+ let all_notetype_clauses = field_indicies_by_notetype
+ .iter()
+ .map(notetype_clause)
+ .join(" or ");
+ write!(self.sql, "({all_notetype_clauses})").unwrap();
+
+ Ok(())
+ }
+
fn write_single_field_regexp(&mut self, field_name: &str, val: &str) -> Result<()> {
let field_indicies_by_notetype = self.fields_indices_by_notetype(field_name)?;
if field_indicies_by_notetype.is_empty() {
@@ -1116,6 +1182,20 @@ mod test {
vec!["(?i)te.*st".into()]
)
);
+ // field search with no-combine
+ assert_eq!(
+ s(ctx, "front:nc:frânçais"),
+ (
+ concat!(
+ "(((n.mid = 1581236385344 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%' escape '\\')) or ",
+ "(n.mid = 1581236385345 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%\u{1f}%' escape '\\')) or ",
+ "(n.mid = 1581236385346 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%' escape '\\')) or ",
+ "(n.mid = 1581236385347 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%' escape '\\'))))"
+ )
+ .into(),
+ vec!["francais".into()]
+ )
+ );
// all field search
assert_eq!(
s(ctx, "*:te*st"),
diff --git a/rslib/src/search/writer.rs b/rslib/src/search/writer.rs
index 3bbe6fd0a..68d05c66d 100644
--- a/rslib/src/search/writer.rs
+++ b/rslib/src/search/writer.rs
@@ -9,6 +9,7 @@ use regex::Regex;
use crate::notetype::NotetypeId as NotetypeIdType;
use crate::prelude::*;
use crate::search::parser::parse;
+use crate::search::parser::FieldSearchMode;
use crate::search::parser::Node;
use crate::search::parser::PropertyKind;
use crate::search::parser::RatingKind;
@@ -69,7 +70,7 @@ fn write_search_node(node: &SearchNode) -> String {
use SearchNode::*;
match node {
UnqualifiedText(s) => maybe_quote(&s.replace(':', "\\:")),
- SingleField { field, text, is_re } => write_single_field(field, text, *is_re),
+ SingleField { field, text, mode } => write_single_field(field, text, *mode),
AddedInDays(u) => format!("added:{u}"),
EditedInDays(u) => format!("edited:{u}"),
IntroducedInDays(u) => format!("introduced:{u}"),
@@ -81,7 +82,7 @@ fn write_search_node(node: &SearchNode) -> String {
NotetypeId(NotetypeIdType(i)) => format!("mid:{i}"),
Notetype(s) => maybe_quote(&format!("note:{s}")),
Rated { days, ease } => write_rated(days, ease),
- Tag { tag, is_re } => write_single_field("tag", tag, *is_re),
+ Tag { tag, mode } => write_single_field("tag", tag, *mode),
Duplicates { notetype_id, text } => write_dupe(notetype_id, text),
State(k) => write_state(k),
Flag(u) => format!("flag:{u}"),
@@ -116,14 +117,25 @@ fn needs_quotation(txt: &str) -> bool {
}
/// Also used by tag search, which has the same syntax.
-fn write_single_field(field: &str, text: &str, is_re: bool) -> String {
- let re = if is_re { "re:" } else { "" };
- let text = if !is_re && text.starts_with("re:") {
+fn write_single_field(field: &str, text: &str, mode: FieldSearchMode) -> String {
+ let prefix = match mode {
+ FieldSearchMode::Normal => "",
+ FieldSearchMode::Regex => "re:",
+ FieldSearchMode::NoCombining => "nc:",
+ };
+ let text = if mode == FieldSearchMode::Normal
+ && (text.starts_with("re:") || text.starts_with("nc:"))
+ {
text.replacen(':', "\\:", 1)
} else {
text.to_string()
};
- maybe_quote(&format!("{}:{}{}", field.replace(':', "\\:"), re, &text))
+ maybe_quote(&format!(
+ "{}:{}{}",
+ field.replace(':', "\\:"),
+ prefix,
+ &text
+ ))
}
fn write_template(template: &TemplateKind) -> String {
diff --git a/rslib/src/stats/card.rs b/rslib/src/stats/card.rs
index 008977fe9..0dabff5e5 100644
--- a/rslib/src/stats/card.rs
+++ b/rslib/src/stats/card.rs
@@ -76,8 +76,15 @@ impl Collection {
note_id: card.note_id.into(),
deck: deck.human_name(),
added: card.id.as_secs().0,
- first_review: revlog.first().map(|entry| entry.id.as_secs().0),
- latest_review: revlog.last().map(|entry| entry.id.as_secs().0),
+ first_review: revlog
+ .iter()
+ .find(|entry| entry.has_rating())
+ .map(|entry| entry.id.as_secs().0),
+ // last_review_time is not used to ensure cram revlogs are included.
+ latest_review: revlog
+ .iter()
+ .rfind(|entry| entry.has_rating())
+ .map(|entry| entry.id.as_secs().0),
due_date: self.due_date(&card)?,
due_position: self.position(&card),
interval: card.interval,
@@ -220,6 +227,7 @@ fn stats_revlog_entry(
ease: entry.ease_factor,
taken_secs: entry.taken_millis as f32 / 1000.,
memory_state: None,
+ last_interval: entry.last_interval_secs(),
}
}
diff --git a/tools/minilints/src/main.rs b/tools/minilints/src/main.rs
index c99fbe06e..6d38278b5 100644
--- a/tools/minilints/src/main.rs
+++ b/tools/minilints/src/main.rs
@@ -255,9 +255,7 @@ fn check_for_unstaged_changes() {
}
fn generate_licences() -> Result {
- if which::which("cargo-license").is_err() {
- Command::run("cargo install cargo-license@0.5.1")?;
- }
+ Command::run("cargo install cargo-license@0.7.0")?;
let output = Command::run_with_output([
"cargo-license",
"--features",
diff --git a/ts/editor/editor-toolbar/ColorPicker.svelte b/ts/editor/editor-toolbar/ColorPicker.svelte
index 746b193a2..f5934441c 100644
--- a/ts/editor/editor-toolbar/ColorPicker.svelte
+++ b/ts/editor/editor-toolbar/ColorPicker.svelte
@@ -4,6 +4,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
-->
-
+ saveCustomColours({})}
+/>
{#if keyCombination}
inputRef.click()} />
diff --git a/ts/editor/editor-toolbar/HighlightColorButton.svelte b/ts/editor/editor-toolbar/HighlightColorButton.svelte
index 865ec5668..f89f7a99a 100644
--- a/ts/editor/editor-toolbar/HighlightColorButton.svelte
+++ b/ts/editor/editor-toolbar/HighlightColorButton.svelte
@@ -19,6 +19,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import ColorPicker from "./ColorPicker.svelte";
import { context as editorToolbarContext } from "./EditorToolbar.svelte";
import WithColorHelper from "./WithColorHelper.svelte";
+ import { saveCustomColours } from "@generated/backend";
export let color: string;
@@ -134,7 +135,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
color = setColor(event);
bridgeCommand(`lastHighlightColor:${color}`);
}}
- on:change={() => setTextColor()}
+ on:change={() => {
+ setTextColor();
+ saveCustomColours({});
+ }}
/>
diff --git a/ts/editor/editor-toolbar/TextColorButton.svelte b/ts/editor/editor-toolbar/TextColorButton.svelte
index 165953180..ce80aae49 100644
--- a/ts/editor/editor-toolbar/TextColorButton.svelte
+++ b/ts/editor/editor-toolbar/TextColorButton.svelte
@@ -22,6 +22,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import ColorPicker from "./ColorPicker.svelte";
import { context as editorToolbarContext } from "./EditorToolbar.svelte";
import WithColorHelper from "./WithColorHelper.svelte";
+ import { saveCustomColours } from "@generated/backend";
export let color: string;
@@ -158,6 +159,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
setTimeout(() => {
setTextColor();
}, 200);
+ saveCustomColours({});
}}
/>
diff --git a/ts/lib/components/HelpModal.svelte b/ts/lib/components/HelpModal.svelte
index cf6292537..7ee425950 100644
--- a/ts/lib/components/HelpModal.svelte
+++ b/ts/lib/components/HelpModal.svelte
@@ -23,6 +23,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
export let title: string;
export let url: string;
+ export let linkLabel: string | undefined = undefined;
export let startIndex = 0;
export let helpSections: HelpItem[];
export let fsrs = false;
@@ -106,11 +107,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
diff --git a/ts/lib/tslib/help-page.ts b/ts/lib/tslib/help-page.ts
index e3f209c6a..e2b2e3da4 100644
--- a/ts/lib/tslib/help-page.ts
+++ b/ts/lib/tslib/help-page.ts
@@ -27,7 +27,8 @@ export const HelpPage = {
limitsFromTop: "https://docs.ankiweb.net/deck-options.html#limits-start-from-top",
dailyLimits: "https://docs.ankiweb.net/deck-options.html#daily-limits",
audio: "https://docs.ankiweb.net/deck-options.html#audio",
- fsrs: "http://docs.ankiweb.net/deck-options.html#fsrs",
+ fsrs: "https://docs.ankiweb.net/deck-options.html#fsrs",
+ desiredRetention: "https://docs.ankiweb.net/deck-options.html#desired-retention",
},
Leeches: {
leeches: "https://docs.ankiweb.net/leeches.html#leeches",
diff --git a/ts/reviewer/images.ts b/ts/reviewer/images.ts
index 05de24158..28c54bebb 100644
--- a/ts/reviewer/images.ts
+++ b/ts/reviewer/images.ts
@@ -10,6 +10,9 @@ export function allImagesLoaded(): Promise {
}
function imageLoaded(img: HTMLImageElement): Promise {
+ if (!img.getAttribute("decoding")) {
+ img.decoding = "async";
+ }
return img.complete
? Promise.resolve()
: new Promise((resolve) => {
diff --git a/ts/routes/deck-options/FsrsOptions.svelte b/ts/routes/deck-options/FsrsOptions.svelte
index a166f2081..48124771f 100644
--- a/ts/routes/deck-options/FsrsOptions.svelte
+++ b/ts/routes/deck-options/FsrsOptions.svelte
@@ -53,6 +53,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
let desiredRetentionFocused = false;
let desiredRetentionEverFocused = false;
let optimized = false;
+ const initialParams = [...fsrsParams($config)];
$: if (desiredRetentionFocused) {
desiredRetentionEverFocused = true;
}
@@ -338,6 +339,14 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
state.save(UpdateDeckConfigsMode.COMPUTE_ALL_PARAMS);
}
+ function showSimulatorModal(modal: Modal) {
+ if (fsrsParams($config).toString() === initialParams.toString()) {
+ modal?.show();
+ } else {
+ alert(tr.deckConfigFsrsSimulateSavePreset());
+ }
+ }
+
let simulatorModal: Modal;
let workloadModal: Modal;
@@ -368,7 +377,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
class="btn btn-primary"
on:click={() => {
simulateFsrsRequest.reviewLimit = 9999;
- workloadModal?.show();
+ showSimulatorModal(workloadModal);
}}
>
{tr.deckConfigFsrsDesiredRetentionHelpMeDecideExperimental()}
@@ -455,7 +464,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
-
diff --git a/ts/routes/graphs/TrueRetention.svelte b/ts/routes/graphs/TrueRetention.svelte
index 4a9738831..12d17079b 100644
--- a/ts/routes/graphs/TrueRetention.svelte
+++ b/ts/routes/graphs/TrueRetention.svelte
@@ -72,7 +72,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
>
{
modal = e.detail.modal;
diff --git a/ts/routes/image-occlusion/Toolbar.svelte b/ts/routes/image-occlusion/Toolbar.svelte
index 8775de936..b00e42087 100644
--- a/ts/routes/image-occlusion/Toolbar.svelte
+++ b/ts/routes/image-occlusion/Toolbar.svelte
@@ -32,6 +32,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
saveNeededStore,
opacityStateStore,
} from "./store";
+ import { get } from "svelte/store";
import { drawEllipse, drawPolygon, drawRectangle, drawText } from "./tools/index";
import { makeMaskTransparent, SHAPE_MASK_COLOR } from "./tools/lib";
import { enableSelectable, stopDraw } from "./tools/lib";
@@ -55,6 +56,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
onWheelDragX,
} from "./tools/tool-zoom";
import { fillMask } from "./tools/tool-fill";
+ import { getCustomColours, saveCustomColours } from "@generated/backend";
export let canvas;
export let iconSize;
@@ -76,6 +78,16 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
let colourRef: HTMLInputElement | undefined;
const colour = writable(SHAPE_MASK_COLOR);
+ const customColorPickerPalette = writable([]);
+
+ async function loadCustomColours() {
+ customColorPickerPalette.set(
+ (await getCustomColours({})).colours.filter(
+ (hex) => !hex.startsWith("#ffffff"),
+ ),
+ );
+ }
+
function onClick(event: MouseEvent) {
const upperCanvas = document.querySelector(".upper-canvas");
if (event.target == upperCanvas) {
@@ -222,7 +234,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
}
onMount(() => {
- opacityStateStore.set(maskOpacity);
+ maskOpacity = get(opacityStateStore);
removeHandlers = singleCallback(
on(document, "click", onClick),
on(window, "mousemove", onMousemove),
@@ -233,6 +245,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
on(document, "touchstart", onTouchstart),
on(document, "mousemove", onMousemoveDocument),
);
+ loadCustomColours();
});
onDestroy(() => {
@@ -241,7 +254,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
($colour = e.currentTarget!.value)}
+ on:change={() => saveCustomColours({})}
/>
diff --git a/ts/routes/image-occlusion/mask-editor.ts b/ts/routes/image-occlusion/mask-editor.ts
index 6d4d0d284..41adbe423 100644
--- a/ts/routes/image-occlusion/mask-editor.ts
+++ b/ts/routes/image-occlusion/mask-editor.ts
@@ -8,10 +8,22 @@ import { fabric } from "fabric";
import { get } from "svelte/store";
import { optimumCssSizeForCanvas } from "./canvas-scale";
-import { hideAllGuessOne, notesDataStore, saveNeededStore, tagsWritable, textEditingState } from "./store";
+import {
+ hideAllGuessOne,
+ notesDataStore,
+ opacityStateStore,
+ saveNeededStore,
+ tagsWritable,
+ textEditingState,
+} from "./store";
import Toast from "./Toast.svelte";
import { addShapesToCanvasFromCloze } from "./tools/add-from-cloze";
-import { enableSelectable, makeShapesRemainInCanvas, moveShapeToCanvasBoundaries } from "./tools/lib";
+import {
+ enableSelectable,
+ makeMaskTransparent,
+ makeShapesRemainInCanvas,
+ moveShapeToCanvasBoundaries,
+} from "./tools/lib";
import { modifiedPolygon } from "./tools/tool-polygon";
import { undoStack } from "./tools/tool-undo-redo";
import { enablePinchZoom, onResize, setCanvasSize } from "./tools/tool-zoom";
@@ -83,6 +95,7 @@ export const setupMaskEditorForEdit = async (
window.requestAnimationFrame(() => {
onImageLoaded({ noteId: BigInt(noteId) });
});
+ if (get(opacityStateStore)) { makeMaskTransparent(canvas, true); }
};
return canvas;
diff --git a/yarn.lock b/yarn.lock
index 84bd46e31..761f20972 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -6939,8 +6939,8 @@ __metadata:
linkType: hard
"vite@npm:6":
- version: 6.3.5
- resolution: "vite@npm:6.3.5"
+ version: 6.3.6
+ resolution: "vite@npm:6.3.6"
dependencies:
esbuild: "npm:^0.25.0"
fdir: "npm:^6.4.4"
@@ -6989,7 +6989,7 @@ __metadata:
optional: true
bin:
vite: bin/vite.js
- checksum: 10c0/df70201659085133abffc6b88dcdb8a57ef35f742a01311fc56a4cfcda6a404202860729cc65a2c401a724f6e25f9ab40ce4339ed4946f550541531ced6fe41c
+ checksum: 10c0/add701f1e72596c002275782e38d0389ab400c1be330c93a3009804d62db68097a936ca1c53c3301df3aaacfe5e328eab547060f31ef9c49a277ae50df6ad4fb
languageName: node
linkType: hard