>,
+ _translations_type: std::marker::PhantomData,
}
-fn get_key(module_idx: usize, translation_idx: usize) -> &'static str {
- KEYS_BY_MODULE
- .get(module_idx)
- .and_then(|translations| translations.get(translation_idx))
- .cloned()
- .unwrap_or("invalid-module-or-translation-index")
-}
+impl I18n {
+ fn get_key(module_idx: usize, translation_idx: usize) -> &'static str {
+ P::KEYS_BY_MODULE
+ .get(module_idx)
+ .and_then(|translations| translations.get(translation_idx))
+ .cloned()
+ .unwrap_or("invalid-module-or-translation-index")
+ }
+
+ fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec {
+ langs
+ .iter()
+ .cloned()
+ .map(|lang| {
+ let mut buf = String::new();
+ let lang_name = remapped_lang_name(&lang);
+ if let Some(strings) = P::STRINGS.get(lang_name) {
+ if desired_modules.is_empty() {
+ // empty list, provide all modules
+ for value in strings.values() {
+ buf.push_str(value)
+ }
+ } else {
+ for module_name in desired_modules {
+ if let Some(text) = strings.get(module_name.as_str()) {
+ buf.push_str(text);
+ }
+ }
+ }
+ }
+ buf
+ })
+ .collect()
+ }
+
+ /// This temporarily behaves like the older code; in the future we could
+ /// either access each &str separately, or load them on demand.
+ fn ftl_localized_text(lang: &LanguageIdentifier) -> Option {
+ let lang = remapped_lang_name(lang);
+ if let Some(module) = P::STRINGS.get(lang) {
+ let mut text = String::new();
+ for module_text in module.values() {
+ text.push_str(module_text)
+ }
+ Some(text)
+ } else {
+ None
+ }
+ }
-impl I18n {
pub fn template_only() -> Self {
Self::new::<&str>(&[])
}
@@ -225,7 +275,7 @@ impl I18n {
let mut output_langs = vec![];
for lang in input_langs {
// if the language is bundled in the binary
- if let Some(text) = ftl_localized_text(&lang).or_else(|| {
+ if let Some(text) = Self::ftl_localized_text(&lang).or_else(|| {
// when testing, allow missing translations
if cfg!(test) {
Some(String::new())
@@ -244,7 +294,7 @@ impl I18n {
// add English templates
let template_lang = "en-US".parse().unwrap();
- let template_text = ftl_localized_text(&template_lang).unwrap();
+ let template_text = Self::ftl_localized_text(&template_lang).unwrap();
let template_bundle = get_bundle_with_extra(&template_text, None).unwrap();
bundles.push(template_bundle);
output_langs.push(template_lang);
@@ -261,6 +311,7 @@ impl I18n {
bundles,
langs: output_langs,
})),
+ _translations_type: PhantomData,
}
}
@@ -270,7 +321,7 @@ impl I18n {
message_index: usize,
args: FluentArgs,
) -> String {
- let key = get_key(module_index, message_index);
+ let key = Self::get_key(module_index, message_index);
self.translate(key, Some(args)).into()
}
@@ -305,7 +356,7 @@ impl I18n {
/// implementation.
pub fn resources_for_js(&self, desired_modules: &[String]) -> ResourcesForJavascript {
let inner = self.inner.lock().unwrap();
- let resources = get_modules(&inner.langs, desired_modules);
+ let resources = Self::get_modules(&inner.langs, desired_modules);
ResourcesForJavascript {
langs: inner.langs.iter().map(ToString::to_string).collect(),
resources,
@@ -313,47 +364,6 @@ impl I18n {
}
}
-fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec {
- langs
- .iter()
- .cloned()
- .map(|lang| {
- let mut buf = String::new();
- let lang_name = remapped_lang_name(&lang);
- if let Some(strings) = STRINGS.get(lang_name) {
- if desired_modules.is_empty() {
- // empty list, provide all modules
- for value in strings.values() {
- buf.push_str(value)
- }
- } else {
- for module_name in desired_modules {
- if let Some(text) = strings.get(module_name.as_str()) {
- buf.push_str(text);
- }
- }
- }
- }
- buf
- })
- .collect()
-}
-
-/// This temporarily behaves like the older code; in the future we could either
-/// access each &str separately, or load them on demand.
-fn ftl_localized_text(lang: &LanguageIdentifier) -> Option {
- let lang = remapped_lang_name(lang);
- if let Some(module) = STRINGS.get(lang) {
- let mut text = String::new();
- for module_text in module.values() {
- text.push_str(module_text)
- }
- Some(text)
- } else {
- None
- }
-}
-
struct I18nInner {
// bundles in preferred language order, with template English as the
// last element
@@ -490,7 +500,7 @@ mod test {
#[test]
fn i18n() {
// English template
- let tr = I18n::new(&["zz"]);
+ let tr = I18n::::new(&["zz"]);
assert_eq!(tr.translate("valid-key", None), "a valid key");
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
@@ -513,7 +523,7 @@ mod test {
);
// Another language
- let tr = I18n::new(&["ja_JP"]);
+ let tr = I18n::::new(&["ja_JP"]);
assert_eq!(tr.translate("valid-key", None), "キー");
assert_eq!(tr.translate("only-in-english", None), "not translated");
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
@@ -524,7 +534,7 @@ mod test {
);
// Decimal separator
- let tr = I18n::new(&["pl-PL"]);
+ let tr = I18n::::new(&["pl-PL"]);
// Polish will use a comma if the string is translated
assert_eq!(
tr.translate("one-arg-key", Some(tr_args!["one"=>2.07])),
diff --git a/rslib/i18n/write_strings.rs b/rslib/i18n/write_strings.rs
index 36af62eeb..db31be2b7 100644
--- a/rslib/i18n/write_strings.rs
+++ b/rslib/i18n/write_strings.rs
@@ -15,7 +15,7 @@ use crate::extract::VariableKind;
use crate::gather::TranslationsByFile;
use crate::gather::TranslationsByLang;
-pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
+pub fn write_strings(map: &TranslationsByLang, modules: &[Module], out_fn: &str, tag: &str) {
let mut buf = String::new();
// lang->module map
@@ -25,23 +25,25 @@ pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
// ordered list of translations by module
write_translation_key_index(modules, &mut buf);
// methods to generate messages
- write_methods(modules, &mut buf);
+ write_methods(modules, &mut buf, tag);
let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
- let path = dir.join("strings.rs");
+ let path = dir.join(out_fn);
fs::write(path, buf).unwrap();
}
-fn write_methods(modules: &[Module], buf: &mut String) {
+fn write_methods(modules: &[Module], buf: &mut String, tag: &str) {
buf.push_str(
r#"
-use crate::{I18n,Number};
+#[allow(unused_imports)]
+use crate::{I18n,Number,Translations};
+#[allow(unused_imports)]
use fluent::{FluentValue, FluentArgs};
use std::borrow::Cow;
-impl I18n {
"#,
);
+ writeln!(buf, "impl I18n<{tag}> {{").unwrap();
for module in modules {
for translation in &module.translations {
let func = translation.key.to_snake_case();
@@ -142,7 +144,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) {
writeln!(
buf,
- "pub(crate) const KEYS_BY_MODULE: [&[&str]; {count}] = [",
+ "pub(crate) const _KEYS_BY_MODULE: [&[&str]; {count}] = [",
count = modules.len(),
)
.unwrap();
@@ -162,7 +164,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) {
fn write_lang_map(map: &TranslationsByLang, buf: &mut String) {
buf.push_str(
"
-pub(crate) const STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! {
+pub(crate) const _STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! {
",
);
@@ -195,12 +197,30 @@ pub(crate) const {lang_name}: phf::Map<&str, &str> = phf::phf_map! {{",
.unwrap();
for (module, contents) in modules {
- writeln!(buf, r###" "{module}" => r##"{contents}"##,"###).unwrap();
+ let escaped_contents = escape_unicode_control_chars(contents);
+ writeln!(
+ buf,
+ r###" "{module}" => r##"{escaped_contents}"##,"###
+ )
+ .unwrap();
}
buf.push_str("};\n");
}
+fn escape_unicode_control_chars(input: &str) -> String {
+ use regex::Regex;
+
+ static RE: std::sync::OnceLock = std::sync::OnceLock::new();
+ let re = RE.get_or_init(|| Regex::new(r"[\u{202a}-\u{202e}\u{2066}-\u{2069}]").unwrap());
+
+ re.replace_all(input, |caps: ®ex::Captures| {
+ let c = caps.get(0).unwrap().as_str().chars().next().unwrap();
+ format!("\\u{{{:04x}}}", c as u32)
+ })
+ .into_owned()
+}
+
fn lang_constant_name(lang: &str) -> String {
lang.to_ascii_uppercase().replace('-', "_")
}
diff --git a/rslib/linkchecker/tests/links.rs b/rslib/linkchecker/tests/links.rs
index 2f39fbe31..39201de78 100644
--- a/rslib/linkchecker/tests/links.rs
+++ b/rslib/linkchecker/tests/links.rs
@@ -42,14 +42,14 @@ enum CheckableUrl {
}
impl CheckableUrl {
- fn url(&self) -> Cow {
+ fn url(&self) -> Cow<'_, str> {
match *self {
Self::HelpPage(page) => help_page_to_link(page).into(),
Self::String(s) => s.into(),
}
}
- fn anchor(&self) -> Cow {
+ fn anchor(&self) -> Cow<'_, str> {
match *self {
Self::HelpPage(page) => help_page_link_suffix(page).into(),
Self::String(s) => s.split('#').next_back().unwrap_or_default().into(),
diff --git a/rslib/proto/python.rs b/rslib/proto/python.rs
index a5adb4179..5c245de1d 100644
--- a/rslib/proto/python.rs
+++ b/rslib/proto/python.rs
@@ -22,7 +22,7 @@ pub(crate) fn write_python_interface(services: &[BackendService]) -> Result<()>
write_header(&mut out)?;
for service in services {
- if service.name == "BackendAnkidroidService" {
+ if ["BackendAnkidroidService", "BackendFrontendService"].contains(&service.name.as_str()) {
continue;
}
for method in service.all_methods() {
diff --git a/rslib/src/backend/collection.rs b/rslib/src/backend/collection.rs
index d9f7c6262..5cef74381 100644
--- a/rslib/src/backend/collection.rs
+++ b/rslib/src/backend/collection.rs
@@ -94,7 +94,7 @@ impl BackendCollectionService for Backend {
}
impl Backend {
- pub(super) fn lock_open_collection(&self) -> Result>> {
+ pub(super) fn lock_open_collection(&self) -> Result>> {
let guard = self.col.lock().unwrap();
guard
.is_some()
@@ -102,7 +102,7 @@ impl Backend {
.ok_or(AnkiError::CollectionNotOpen)
}
- pub(super) fn lock_closed_collection(&self) -> Result>> {
+ pub(super) fn lock_closed_collection(&self) -> Result>> {
let guard = self.col.lock().unwrap();
guard
.is_none()
diff --git a/rslib/src/card_rendering/mod.rs b/rslib/src/card_rendering/mod.rs
index 3d61a4fe5..262f2a7c9 100644
--- a/rslib/src/card_rendering/mod.rs
+++ b/rslib/src/card_rendering/mod.rs
@@ -34,7 +34,7 @@ pub fn prettify_av_tags + AsRef>(txt: S) -> String {
/// Parse `txt` into [CardNodes] and return the result,
/// or [None] if it only contains text nodes.
-fn nodes_or_text_only(txt: &str) -> Option {
+fn nodes_or_text_only(txt: &str) -> Option> {
let nodes = CardNodes::parse(txt);
(!nodes.text_only).then_some(nodes)
}
diff --git a/rslib/src/card_rendering/parser.rs b/rslib/src/card_rendering/parser.rs
index b124c069d..0ee66a9b1 100644
--- a/rslib/src/card_rendering/parser.rs
+++ b/rslib/src/card_rendering/parser.rs
@@ -103,13 +103,13 @@ fn is_not0<'parser, 'arr: 'parser, 's: 'parser>(
move |s| alt((is_not(arr), success(""))).parse(s)
}
-fn node(s: &str) -> IResult {
+fn node(s: &str) -> IResult<'_, Node<'_>> {
alt((sound_node, tag_node, text_node)).parse(s)
}
/// A sound tag `[sound:resource]`, where `resource` is pointing to a sound or
/// video file.
-fn sound_node(s: &str) -> IResult {
+fn sound_node(s: &str) -> IResult<'_, Node<'_>> {
map(
delimited(tag("[sound:"), is_not("]"), tag("]")),
Node::SoundOrVideo,
@@ -117,7 +117,7 @@ fn sound_node(s: &str) -> IResult {
.parse(s)
}
-fn take_till_potential_tag_start(s: &str) -> IResult<&str> {
+fn take_till_potential_tag_start(s: &str) -> IResult<'_, &str> {
// first char could be '[', but wasn't part of a node, so skip (eof ends parse)
let (after, offset) = anychar(s).map(|(s, c)| (s, c.len_utf8()))?;
Ok(match after.find('[') {
@@ -127,9 +127,9 @@ fn take_till_potential_tag_start(s: &str) -> IResult<&str> {
}
/// An Anki tag `[anki:tag...]...[/anki:tag]`.
-fn tag_node(s: &str) -> IResult {
+fn tag_node(s: &str) -> IResult<'_, Node<'_>> {
/// Match the start of an opening tag and return its name.
- fn name(s: &str) -> IResult<&str> {
+ fn name(s: &str) -> IResult<'_, &str> {
preceded(tag("[anki:"), is_not("] \t\r\n")).parse(s)
}
@@ -139,12 +139,12 @@ fn tag_node(s: &str) -> IResult {
) -> impl FnMut(&'s str) -> IResult<'s, Vec<(&'s str, &'s str)>> + 'name {
/// List of whitespace-separated `key=val` tuples, where `val` may be
/// empty.
- fn options(s: &str) -> IResult> {
- fn key(s: &str) -> IResult<&str> {
+ fn options(s: &str) -> IResult<'_, Vec<(&str, &str)>> {
+ fn key(s: &str) -> IResult<'_, &str> {
is_not("] \t\r\n=").parse(s)
}
- fn val(s: &str) -> IResult<&str> {
+ fn val(s: &str) -> IResult<'_, &str> {
alt((
delimited(tag("\""), is_not0("\""), tag("\"")),
is_not0("] \t\r\n\""),
@@ -197,7 +197,7 @@ fn tag_node(s: &str) -> IResult {
.parse(s)
}
-fn text_node(s: &str) -> IResult {
+fn text_node(s: &str) -> IResult<'_, Node<'_>> {
map(take_till_potential_tag_start, Node::Text).parse(s)
}
diff --git a/rslib/src/cloze.rs b/rslib/src/cloze.rs
index 02919dc12..70a5d1703 100644
--- a/rslib/src/cloze.rs
+++ b/rslib/src/cloze.rs
@@ -10,6 +10,7 @@ use std::sync::LazyLock;
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion;
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape;
use htmlescape::encode_attribute;
+use itertools::Itertools;
use nom::branch::alt;
use nom::bytes::complete::tag;
use nom::bytes::complete::take_while;
@@ -26,7 +27,7 @@ use crate::template::RenderContext;
use crate::text::strip_html_preserving_entities;
static CLOZE: LazyLock =
- LazyLock::new(|| Regex::new(r"(?s)\{\{c\d+::(.*?)(::.*?)?\}\}").unwrap());
+ LazyLock::new(|| Regex::new(r"(?s)\{\{c[\d,]+::(.*?)(::.*?)?\}\}").unwrap());
static MATHJAX: LazyLock = LazyLock::new(|| {
Regex::new(
@@ -48,39 +49,42 @@ mod mathjax_caps {
#[derive(Debug)]
enum Token<'a> {
// The parameter is the cloze number as is appears in the field content.
- OpenCloze(u16),
+ OpenCloze(Vec),
Text(&'a str),
CloseCloze,
}
/// Tokenize string
-fn tokenize(mut text: &str) -> impl Iterator- {
- fn open_cloze(text: &str) -> IResult<&str, Token> {
+fn tokenize(mut text: &str) -> impl Iterator
- > {
+ fn open_cloze(text: &str) -> IResult<&str, Token<'_>> {
// opening brackets and 'c'
let (text, _opening_brackets_and_c) = tag("{{c")(text)?;
- // following number
- let (text, digits) = take_while(|c: char| c.is_ascii_digit())(text)?;
- let digits: u16 = match digits.parse() {
- Ok(digits) => digits,
- Err(_) => {
- // not a valid number; fail to recognize
- return Err(nom::Err::Error(nom::error::make_error(
- text,
- nom::error::ErrorKind::Digit,
- )));
- }
- };
+ // following comma-seperated numbers
+ let (text, ordinals) = take_while(|c: char| c.is_ascii_digit() || c == ',')(text)?;
+ let ordinals: Vec = ordinals
+ .split(',')
+ .filter_map(|s| s.parse().ok())
+ .collect::>() // deduplicate
+ .into_iter()
+ .sorted() // set conversion can de-order
+ .collect();
+ if ordinals.is_empty() {
+ return Err(nom::Err::Error(nom::error::make_error(
+ text,
+ nom::error::ErrorKind::Digit,
+ )));
+ }
// ::
let (text, _colons) = tag("::")(text)?;
- Ok((text, Token::OpenCloze(digits)))
+ Ok((text, Token::OpenCloze(ordinals)))
}
- fn close_cloze(text: &str) -> IResult<&str, Token> {
+ fn close_cloze(text: &str) -> IResult<&str, Token<'_>> {
map(tag("}}"), |_| Token::CloseCloze).parse(text)
}
/// Match a run of text until an open/close marker is encountered.
- fn normal_text(text: &str) -> IResult<&str, Token> {
+ fn normal_text(text: &str) -> IResult<&str, Token<'_>> {
if text.is_empty() {
return Err(nom::Err::Error(nom::error::make_error(
text,
@@ -121,18 +125,27 @@ enum TextOrCloze<'a> {
#[derive(Debug)]
struct ExtractedCloze<'a> {
// `ordinal` is the cloze number as is appears in the field content.
- ordinal: u16,
+ ordinals: Vec,
nodes: Vec>,
hint: Option<&'a str>,
}
+/// Generate a string representation of the ordinals for HTML
+fn ordinals_str(ordinals: &[u16]) -> String {
+ ordinals
+ .iter()
+ .map(|o| o.to_string())
+ .collect::>()
+ .join(",")
+}
+
impl ExtractedCloze<'_> {
/// Return the cloze's hint, or "..." if none was provided.
fn hint(&self) -> &str {
self.hint.unwrap_or("...")
}
- fn clozed_text(&self) -> Cow {
+ fn clozed_text(&self) -> Cow<'_, str> {
// happy efficient path?
if self.nodes.len() == 1 {
if let TextOrCloze::Text(text) = self.nodes.last().unwrap() {
@@ -151,6 +164,11 @@ impl ExtractedCloze<'_> {
buf.into()
}
+ /// Checks if this cloze is active for a given ordinal
+ fn contains_ordinal(&self, ordinal: u16) -> bool {
+ self.ordinals.contains(&ordinal)
+ }
+
/// If cloze starts with image-occlusion:, return the text following that.
fn image_occlusion(&self) -> Option<&str> {
let TextOrCloze::Text(text) = self.nodes.first()? else {
@@ -165,10 +183,10 @@ fn parse_text_with_clozes(text: &str) -> Vec> {
let mut output = vec![];
for token in tokenize(text) {
match token {
- Token::OpenCloze(ordinal) => {
+ Token::OpenCloze(ordinals) => {
if open_clozes.len() < 10 {
open_clozes.push(ExtractedCloze {
- ordinal,
+ ordinals,
nodes: Vec::with_capacity(1), // common case
hint: None,
})
@@ -214,7 +232,7 @@ fn reveal_cloze_text_in_nodes(
output: &mut Vec,
) {
if let TextOrCloze::Cloze(cloze) = node {
- if cloze.ordinal == cloze_ord {
+ if cloze.contains_ordinal(cloze_ord) {
if question {
output.push(cloze.hint().into())
} else {
@@ -234,14 +252,15 @@ fn reveal_cloze(
active_cloze_found_in_text: &mut bool,
buf: &mut String,
) {
- let active = cloze.ordinal == cloze_ord;
+ let active = cloze.contains_ordinal(cloze_ord);
*active_cloze_found_in_text |= active;
+
if let Some(image_occlusion_text) = cloze.image_occlusion() {
buf.push_str(&render_image_occlusion(
image_occlusion_text,
question,
active,
- cloze.ordinal,
+ &cloze.ordinals,
));
return;
}
@@ -265,7 +284,7 @@ fn reveal_cloze(
buf,
r#"[{}]"#,
encode_attribute(&content_buf),
- cloze.ordinal,
+ ordinals_str(&cloze.ordinals),
cloze.hint()
)
.unwrap();
@@ -274,7 +293,7 @@ fn reveal_cloze(
write!(
buf,
r#""#,
- cloze.ordinal
+ ordinals_str(&cloze.ordinals)
)
.unwrap();
for node in &cloze.nodes {
@@ -292,7 +311,7 @@ fn reveal_cloze(
write!(
buf,
r#""#,
- cloze.ordinal
+ ordinals_str(&cloze.ordinals)
)
.unwrap();
for node in &cloze.nodes {
@@ -308,23 +327,28 @@ fn reveal_cloze(
}
}
-fn render_image_occlusion(text: &str, question_side: bool, active: bool, ordinal: u16) -> String {
- if (question_side && active) || ordinal == 0 {
+fn render_image_occlusion(
+ text: &str,
+ question_side: bool,
+ active: bool,
+ ordinals: &[u16],
+) -> String {
+ if (question_side && active) || ordinals.contains(&0) {
format!(
r#""#,
- ordinal,
+ ordinals_str(ordinals),
&get_image_cloze_data(text)
)
} else if !active {
format!(
r#""#,
- ordinal,
+ ordinals_str(ordinals),
&get_image_cloze_data(text)
)
} else if !question_side && active {
format!(
r#""#,
- ordinal,
+ ordinals_str(ordinals),
&get_image_cloze_data(text)
)
} else {
@@ -338,7 +362,10 @@ pub fn parse_image_occlusions(text: &str) -> Vec {
if let TextOrCloze::Cloze(cloze) = node {
if cloze.image_occlusion().is_some() {
if let Some(shape) = parse_image_cloze(cloze.image_occlusion().unwrap()) {
- occlusions.entry(cloze.ordinal).or_default().push(shape);
+ // Associate this occlusion with all ordinals in this cloze
+ for &ordinal in &cloze.ordinals {
+ occlusions.entry(ordinal).or_default().push(shape.clone());
+ }
}
}
}
@@ -353,7 +380,7 @@ pub fn parse_image_occlusions(text: &str) -> Vec {
.collect()
}
-pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow {
+pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow<'_, str> {
let mut buf = String::new();
let mut active_cloze_found_in_text = false;
for node in &parse_text_with_clozes(text) {
@@ -376,7 +403,7 @@ pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow
}
}
-pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow {
+pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow<'_, str> {
let mut output = Vec::new();
for node in &parse_text_with_clozes(text) {
reveal_cloze_text_in_nodes(node, cloze_ord, question, &mut output);
@@ -384,7 +411,7 @@ pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow
output.join(", ").into()
}
-pub fn extract_cloze_for_typing(text: &str, cloze_ord: u16) -> Cow {
+pub fn extract_cloze_for_typing(text: &str, cloze_ord: u16) -> Cow<'_, str> {
let mut output = Vec::new();
for node in &parse_text_with_clozes(text) {
reveal_cloze_text_in_nodes(node, cloze_ord, false, &mut output);
@@ -420,7 +447,7 @@ pub fn expand_clozes_to_reveal_latex(text: &str) -> String {
pub(crate) fn contains_cloze(text: &str) -> bool {
parse_text_with_clozes(text)
.iter()
- .any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinal != 0))
+ .any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinals.iter().any(|&o| o != 0)))
}
/// Returns the set of cloze number as they appear in the fields's content.
@@ -433,10 +460,12 @@ pub fn cloze_numbers_in_string(html: &str) -> HashSet {
fn add_cloze_numbers_in_text_with_clozes(nodes: &[TextOrCloze], set: &mut HashSet) {
for node in nodes {
if let TextOrCloze::Cloze(cloze) = node {
- if cloze.ordinal != 0 {
- set.insert(cloze.ordinal);
- add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set);
+ for &ordinal in &cloze.ordinals {
+ if ordinal != 0 {
+ set.insert(ordinal);
+ }
}
+ add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set);
}
}
}
@@ -460,7 +489,7 @@ pub(crate) fn strip_clozes(text: &str) -> Cow<'_, str> {
CLOZE.replace_all(text, "$1")
}
-fn strip_html_inside_mathjax(text: &str) -> Cow {
+fn strip_html_inside_mathjax(text: &str) -> Cow<'_, str> {
MATHJAX.replace_all(text, |caps: &Captures| -> String {
format!(
"{}{}{}",
@@ -654,4 +683,160 @@ mod test {
)
);
}
+
+ #[test]
+ fn multi_card_card_generation() {
+ let text = "{{c1,2,3::multi}}";
+ assert_eq!(
+ cloze_number_in_fields(vec![text]),
+ vec![1, 2, 3].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn multi_card_cloze_basic() {
+ let text = "{{c1,2::shared}} word and {{c1::first}} vs {{c2::second}}";
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
+ "[...] word and [...] vs second"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
+ "[...] word and first vs [...]"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, false)).as_ref(),
+ "shared word and first vs second"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, false)).as_ref(),
+ "shared word and first vs second"
+ );
+ assert_eq!(
+ cloze_numbers_in_string(text),
+ vec![1, 2].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn multi_card_cloze_html_attributes() {
+ let text = "{{c1,2,3::multi}}";
+
+ let card1_html = reveal_cloze_text(text, 1, true);
+ assert!(card1_html.contains(r#"data-ordinal="1,2,3""#));
+
+ let card2_html = reveal_cloze_text(text, 2, true);
+ assert!(card2_html.contains(r#"data-ordinal="1,2,3""#));
+
+ let card3_html = reveal_cloze_text(text, 3, true);
+ assert!(card3_html.contains(r#"data-ordinal="1,2,3""#));
+ }
+
+ #[test]
+ fn multi_card_cloze_with_hints() {
+ let text = "{{c1,2::answer::hint}}";
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
+ "[hint]"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
+ "[hint]"
+ );
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, false)).as_ref(),
+ "answer"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, false)).as_ref(),
+ "answer"
+ );
+ }
+
+ #[test]
+ fn multi_card_cloze_edge_cases() {
+ assert_eq!(
+ cloze_numbers_in_string("{{c1,1,2::test}}"),
+ vec![1, 2].into_iter().collect::>()
+ );
+
+ assert_eq!(
+ cloze_numbers_in_string("{{c0,1,2::test}}"),
+ vec![1, 2].into_iter().collect::>()
+ );
+
+ assert_eq!(
+ cloze_numbers_in_string("{{c1,,3::test}}"),
+ vec![1, 3].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn multi_card_cloze_only_filter() {
+ let text = "{{c1,2::shared}} and {{c1::first}} vs {{c2::second}}";
+
+ assert_eq!(reveal_cloze_text_only(text, 1, true), "..., ...");
+ assert_eq!(reveal_cloze_text_only(text, 2, true), "..., ...");
+ assert_eq!(reveal_cloze_text_only(text, 1, false), "shared, first");
+ assert_eq!(reveal_cloze_text_only(text, 2, false), "shared, second");
+ }
+
+ #[test]
+ fn multi_card_nested_cloze() {
+ let text = "{{c1,2::outer {{c3::inner}}}}";
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
+ "[...]"
+ );
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
+ "[...]"
+ );
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 3, true)).as_ref(),
+ "outer [...]"
+ );
+
+ assert_eq!(
+ cloze_numbers_in_string(text),
+ vec![1, 2, 3].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn nested_parent_child_card_same_cloze() {
+ let text = "{{c1::outer {{c1::inner}}}}";
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
+ "[...]"
+ );
+
+ assert_eq!(
+ cloze_numbers_in_string(text),
+ vec![1].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn multi_card_image_occlusion() {
+ let text = "{{c1,2::image-occlusion:rect:left=10:top=20:width=30:height=40}}";
+
+ let occlusions = parse_image_occlusions(text);
+ assert_eq!(occlusions.len(), 2);
+ assert!(occlusions.iter().any(|o| o.ordinal == 1));
+ assert!(occlusions.iter().any(|o| o.ordinal == 2));
+
+ let card1_html = reveal_cloze_text(text, 1, true);
+ assert!(card1_html.contains(r#"data-ordinal="1,2""#));
+
+ let card2_html = reveal_cloze_text(text, 2, true);
+ assert!(card2_html.contains(r#"data-ordinal="1,2""#));
+ }
}
diff --git a/rslib/src/collection/service.rs b/rslib/src/collection/service.rs
index 2050a6897..a37360782 100644
--- a/rslib/src/collection/service.rs
+++ b/rslib/src/collection/service.rs
@@ -1,8 +1,10 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
+use anki_proto::collection::GetCustomColoursResponse;
use anki_proto::generic;
use crate::collection::Collection;
+use crate::config::ConfigKey;
use crate::error;
use crate::prelude::BoolKey;
use crate::prelude::Op;
@@ -62,4 +64,13 @@ impl crate::services::CollectionService for Collection {
})
.map(Into::into)
}
+
+ fn get_custom_colours(
+ &mut self,
+ ) -> error::Result {
+ let colours = self
+ .get_config_optional(ConfigKey::CustomColorPickerPalette)
+ .unwrap_or_default();
+ Ok(GetCustomColoursResponse { colours })
+ }
}
diff --git a/rslib/src/config/mod.rs b/rslib/src/config/mod.rs
index 5ece5b7e1..1e507281a 100644
--- a/rslib/src/config/mod.rs
+++ b/rslib/src/config/mod.rs
@@ -71,6 +71,7 @@ pub(crate) enum ConfigKey {
NextNewCardPosition,
#[strum(to_string = "schedVer")]
SchedulerVersion,
+ CustomColorPickerPalette,
}
#[derive(PartialEq, Eq, Serialize_repr, Deserialize_repr, Clone, Copy, Debug)]
diff --git a/rslib/src/deckconfig/service.rs b/rslib/src/deckconfig/service.rs
index 11c4288d3..b8987982b 100644
--- a/rslib/src/deckconfig/service.rs
+++ b/rslib/src/deckconfig/service.rs
@@ -115,7 +115,7 @@ impl crate::services::DeckConfigService for Collection {
.storage
.get_revlog_entries_for_searched_cards_in_card_order()?;
- let config = guard.col.get_optimal_retention_parameters(revlogs)?;
+ let mut config = guard.col.get_optimal_retention_parameters(revlogs)?;
let cards = guard
.col
.storage
@@ -125,6 +125,8 @@ impl crate::services::DeckConfigService for Collection {
.filter_map(|c| crate::card::Card::convert(c.clone(), days_elapsed, c.memory_state?))
.collect::>();
+ config.deck_size = guard.cards;
+
let costs = (70u32..=99u32)
.into_par_iter()
.map(|dr| {
diff --git a/rslib/src/deckconfig/update.rs b/rslib/src/deckconfig/update.rs
index 0bd549a20..a9a27753e 100644
--- a/rslib/src/deckconfig/update.rs
+++ b/rslib/src/deckconfig/update.rs
@@ -216,9 +216,6 @@ impl Collection {
for deck in self.storage.get_all_decks()? {
if let Ok(normal) = deck.normal() {
let deck_id = deck.id;
- if let Some(desired_retention) = normal.desired_retention {
- deck_desired_retention.insert(deck_id, desired_retention);
- }
// previous order & params
let previous_config_id = DeckConfigId(normal.config_id);
let previous_config = configs_before_update.get(&previous_config_id);
@@ -226,21 +223,23 @@ impl Collection {
.map(|c| c.inner.new_card_insert_order())
.unwrap_or_default();
let previous_params = previous_config.map(|c| c.fsrs_params());
- let previous_retention = previous_config.map(|c| c.inner.desired_retention);
+ let previous_preset_dr = previous_config.map(|c| c.inner.desired_retention);
+ let previous_deck_dr = normal.desired_retention;
+ let previous_dr = previous_deck_dr.or(previous_preset_dr);
let previous_easy_days = previous_config.map(|c| &c.inner.easy_days_percentages);
// if a selected (sub)deck, or its old config was removed, update deck to point
// to new config
- let current_config_id = if selected_deck_ids.contains(&deck.id)
+ let (current_config_id, current_deck_dr) = if selected_deck_ids.contains(&deck.id)
|| !configs_after_update.contains_key(&previous_config_id)
{
let mut updated = deck.clone();
updated.normal_mut()?.config_id = selected_config.id.0;
update_deck_limits(updated.normal_mut()?, &req.limits, today);
self.update_deck_inner(&mut updated, deck, usn)?;
- selected_config.id
+ (selected_config.id, updated.normal()?.desired_retention)
} else {
- previous_config_id
+ (previous_config_id, previous_deck_dr)
};
// if new order differs, deck needs re-sorting
@@ -254,11 +253,12 @@ impl Collection {
// if params differ, memory state needs to be recomputed
let current_params = current_config.map(|c| c.fsrs_params());
- let current_retention = current_config.map(|c| c.inner.desired_retention);
+ let current_preset_dr = current_config.map(|c| c.inner.desired_retention);
+ let current_dr = current_deck_dr.or(current_preset_dr);
let current_easy_days = current_config.map(|c| &c.inner.easy_days_percentages);
if fsrs_toggled
|| previous_params != current_params
- || previous_retention != current_retention
+ || previous_dr != current_dr
|| (req.fsrs_reschedule && previous_easy_days != current_easy_days)
{
decks_needing_memory_recompute
@@ -266,7 +266,9 @@ impl Collection {
.or_default()
.push(deck_id);
}
-
+ if let Some(desired_retention) = current_deck_dr {
+ deck_desired_retention.insert(deck_id, desired_retention);
+ }
self.adjust_remaining_steps_in_deck(deck_id, previous_config, current_config, usn)?;
}
}
diff --git a/rslib/src/decks/name.rs b/rslib/src/decks/name.rs
index 09fd2fe65..c7e79a782 100644
--- a/rslib/src/decks/name.rs
+++ b/rslib/src/decks/name.rs
@@ -191,7 +191,7 @@ fn invalid_char_for_deck_component(c: char) -> bool {
c.is_ascii_control()
}
-fn normalized_deck_name_component(comp: &str) -> Cow {
+fn normalized_deck_name_component(comp: &str) -> Cow<'_, str> {
let mut out = normalize_to_nfc(comp);
if out.contains(invalid_char_for_deck_component) {
out = out.replace(invalid_char_for_deck_component, "").into();
diff --git a/rslib/src/decks/schema11.rs b/rslib/src/decks/schema11.rs
index 5cd4094f0..3d4e30b96 100644
--- a/rslib/src/decks/schema11.rs
+++ b/rslib/src/decks/schema11.rs
@@ -135,6 +135,8 @@ pub struct NormalDeckSchema11 {
review_limit_today: Option,
#[serde(default, deserialize_with = "default_on_invalid")]
new_limit_today: Option,
+ #[serde(default, deserialize_with = "default_on_invalid")]
+ desired_retention: Option,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
@@ -249,6 +251,7 @@ impl Default for NormalDeckSchema11 {
new_limit: None,
review_limit_today: None,
new_limit_today: None,
+ desired_retention: None,
}
}
}
@@ -325,7 +328,7 @@ impl From for NormalDeck {
new_limit: deck.new_limit,
review_limit_today: deck.review_limit_today,
new_limit_today: deck.new_limit_today,
- desired_retention: None,
+ desired_retention: deck.desired_retention.map(|v| v as f32 / 100.0),
}
}
}
@@ -367,6 +370,7 @@ impl From for DeckSchema11 {
new_limit: norm.new_limit,
review_limit_today: norm.review_limit_today,
new_limit_today: norm.new_limit_today,
+ desired_retention: norm.desired_retention.map(|v| (v * 100.0) as u32),
common: deck.into(),
}),
DeckKind::Filtered(ref filt) => DeckSchema11::Filtered(FilteredDeckSchema11 {
@@ -431,7 +435,8 @@ static RESERVED_DECK_KEYS: Set<&'static str> = phf_set! {
"browserCollapsed",
"extendRev",
"id",
- "collapsed"
+ "collapsed",
+ "desiredRetention",
};
impl From<&Deck> for DeckTodaySchema11 {
diff --git a/rslib/src/import_export/gather.rs b/rslib/src/import_export/gather.rs
index 99e4babe2..7249e134a 100644
--- a/rslib/src/import_export/gather.rs
+++ b/rslib/src/import_export/gather.rs
@@ -231,7 +231,10 @@ fn svg_getter(notetypes: &[Notetype]) -> impl Fn(NotetypeId) -> bool {
}
impl Collection {
- fn gather_notes(&mut self, search: impl TryIntoSearch) -> Result<(Vec, NoteTableGuard)> {
+ fn gather_notes(
+ &mut self,
+ search: impl TryIntoSearch,
+ ) -> Result<(Vec, NoteTableGuard<'_>)> {
let guard = self.search_notes_into_table(search)?;
guard
.col
@@ -240,7 +243,7 @@ impl Collection {
.map(|notes| (notes, guard))
}
- fn gather_cards(&mut self) -> Result<(Vec, CardTableGuard)> {
+ fn gather_cards(&mut self) -> Result<(Vec, CardTableGuard<'_>)> {
let guard = self.search_cards_of_notes_into_table()?;
guard
.col
diff --git a/rslib/src/import_export/package/apkg/import/notes.rs b/rslib/src/import_export/package/apkg/import/notes.rs
index ba5178a18..ce4266289 100644
--- a/rslib/src/import_export/package/apkg/import/notes.rs
+++ b/rslib/src/import_export/package/apkg/import/notes.rs
@@ -664,7 +664,7 @@ mod test {
self
}
- fn import(self, col: &mut Collection) -> NoteContext {
+ fn import(self, col: &mut Collection) -> NoteContext<'_> {
let mut progress_handler = col.new_progress_handler();
let media_map = Box::leak(Box::new(self.media_map));
let mut ctx = NoteContext::new(
diff --git a/rslib/src/import_export/package/media.rs b/rslib/src/import_export/package/media.rs
index ff5bdf4d7..8a7e5b726 100644
--- a/rslib/src/import_export/package/media.rs
+++ b/rslib/src/import_export/package/media.rs
@@ -154,7 +154,7 @@ pub(super) fn extract_media_entries(
}
}
-pub(super) fn safe_normalized_file_name(name: &str) -> Result> {
+pub(super) fn safe_normalized_file_name(name: &str) -> Result> {
if !filename_is_safe(name) {
Err(AnkiError::ImportError {
source: ImportError::Corrupt,
diff --git a/rslib/src/import_export/text/csv/export.rs b/rslib/src/import_export/text/csv/export.rs
index 885035b7e..1af1cfaa9 100644
--- a/rslib/src/import_export/text/csv/export.rs
+++ b/rslib/src/import_export/text/csv/export.rs
@@ -147,7 +147,7 @@ fn rendered_nodes_to_str(nodes: &[RenderedNode]) -> String {
.join("")
}
-fn field_to_record_field(field: &str, with_html: bool) -> Cow {
+fn field_to_record_field(field: &str, with_html: bool) -> Cow<'_, str> {
let mut text = strip_redundant_sections(field);
if !with_html {
text = text.map_cow(|t| html_to_text_line(t, false));
@@ -155,7 +155,7 @@ fn field_to_record_field(field: &str, with_html: bool) -> Cow {
text
}
-fn strip_redundant_sections(text: &str) -> Cow {
+fn strip_redundant_sections(text: &str) -> Cow<'_, str> {
static RE: LazyLock = LazyLock::new(|| {
Regex::new(
r"(?isx)
@@ -169,7 +169,7 @@ fn strip_redundant_sections(text: &str) -> Cow {
RE.replace_all(text.as_ref(), "")
}
-fn strip_answer_side_question(text: &str) -> Cow {
+fn strip_answer_side_question(text: &str) -> Cow<'_, str> {
static RE: LazyLock =
LazyLock::new(|| Regex::new(r"(?is)^.*
\n*").unwrap());
RE.replace_all(text.as_ref(), "")
@@ -251,7 +251,7 @@ impl NoteContext {
.chain(self.tags(note))
}
- fn notetype_name(&self, note: &Note) -> Option> {
+ fn notetype_name(&self, note: &Note) -> Option> {
self.with_notetype.then(|| {
self.notetypes
.get(¬e.notetype_id)
@@ -259,7 +259,7 @@ impl NoteContext {
})
}
- fn deck_name(&self, note: &Note) -> Option> {
+ fn deck_name(&self, note: &Note) -> Option> {
self.with_deck.then(|| {
self.deck_ids
.get(¬e.id)
@@ -268,7 +268,7 @@ impl NoteContext {
})
}
- fn tags(&self, note: &Note) -> Option> {
+ fn tags(&self, note: &Note) -> Option> {
self.with_tags
.then(|| Cow::from(note.tags.join(" ").into_bytes()))
}
diff --git a/rslib/src/import_export/text/import.rs b/rslib/src/import_export/text/import.rs
index 202189eb6..1e6f85b3f 100644
--- a/rslib/src/import_export/text/import.rs
+++ b/rslib/src/import_export/text/import.rs
@@ -511,7 +511,7 @@ impl NoteContext<'_> {
}
impl Note {
- fn first_field_stripped(&self) -> Cow {
+ fn first_field_stripped(&self) -> Cow<'_, str> {
strip_html_preserving_media_filenames(&self.fields()[0])
}
}
@@ -623,7 +623,7 @@ impl ForeignNote {
.all(|(opt, field)| opt.as_ref().map(|s| s == field).unwrap_or(true))
}
- fn first_field_stripped(&self) -> Option> {
+ fn first_field_stripped(&self) -> Option> {
self.fields
.first()
.and_then(|s| s.as_ref())
diff --git a/rslib/src/latex.rs b/rslib/src/latex.rs
index e5cb002ac..02056b721 100644
--- a/rslib/src/latex.rs
+++ b/rslib/src/latex.rs
@@ -48,7 +48,7 @@ pub struct ExtractedLatex {
pub(crate) fn extract_latex_expanding_clozes(
text: &str,
svg: bool,
-) -> (Cow, Vec) {
+) -> (Cow<'_, str>, Vec) {
if text.contains("{{c") {
let expanded = expand_clozes_to_reveal_latex(text);
let (text, extracts) = extract_latex(&expanded, svg);
@@ -60,7 +60,7 @@ pub(crate) fn extract_latex_expanding_clozes(
/// Extract LaTeX from the provided text.
/// Expects cloze deletions to already be expanded.
-pub fn extract_latex(text: &str, svg: bool) -> (Cow, Vec) {
+pub fn extract_latex(text: &str, svg: bool) -> (Cow<'_, str>, Vec) {
let mut extracted = vec![];
let new_text = LATEX.replace_all(text, |caps: &Captures| {
@@ -84,7 +84,7 @@ pub fn extract_latex(text: &str, svg: bool) -> (Cow, Vec) {
(new_text, extracted)
}
-fn strip_html_for_latex(html: &str) -> Cow {
+fn strip_html_for_latex(html: &str) -> Cow<'_, str> {
let mut out: Cow = html.into();
if let Cow::Owned(o) = LATEX_NEWLINES.replace_all(html, "\n") {
out = o.into();
diff --git a/rslib/src/media/files.rs b/rslib/src/media/files.rs
index 6974e2f81..ce17b40bb 100644
--- a/rslib/src/media/files.rs
+++ b/rslib/src/media/files.rs
@@ -91,7 +91,7 @@ fn nonbreaking_space(char: char) -> bool {
/// - Any problem characters are removed.
/// - Windows device names like CON and PRN have '_' appended
/// - The filename is limited to 120 bytes.
-pub(crate) fn normalize_filename(fname: &str) -> Cow {
+pub(crate) fn normalize_filename(fname: &str) -> Cow<'_, str> {
let mut output = Cow::Borrowed(fname);
if !is_nfc(output.as_ref()) {
@@ -102,7 +102,7 @@ pub(crate) fn normalize_filename(fname: &str) -> Cow {
}
/// See normalize_filename(). This function expects NFC-normalized input.
-pub(crate) fn normalize_nfc_filename(mut fname: Cow) -> Cow {
+pub(crate) fn normalize_nfc_filename(mut fname: Cow<'_, str>) -> Cow<'_, str> {
if fname.contains(disallowed_char) {
fname = fname.replace(disallowed_char, "").into()
}
@@ -137,7 +137,7 @@ pub(crate) fn normalize_nfc_filename(mut fname: Cow) -> Cow {
/// but can be accessed as NFC. On these devices, if the filename
/// is otherwise valid, the filename is returned as NFC.
#[allow(clippy::collapsible_else_if)]
-pub(crate) fn filename_if_normalized(fname: &str) -> Option> {
+pub(crate) fn filename_if_normalized(fname: &str) -> Option> {
if cfg!(target_vendor = "apple") {
if !is_nfc(fname) {
let as_nfc = fname.chars().nfc().collect::();
@@ -208,7 +208,7 @@ pub(crate) fn add_hash_suffix_to_file_stem(fname: &str, hash: &Sha1Hash) -> Stri
}
/// If filename is longer than max_bytes, truncate it.
-fn truncate_filename(fname: &str, max_bytes: usize) -> Cow {
+fn truncate_filename(fname: &str, max_bytes: usize) -> Cow<'_, str> {
if fname.len() <= max_bytes {
return Cow::Borrowed(fname);
}
diff --git a/rslib/src/notes/mod.rs b/rslib/src/notes/mod.rs
index 932022e99..2b5ea2921 100644
--- a/rslib/src/notes/mod.rs
+++ b/rslib/src/notes/mod.rs
@@ -87,7 +87,7 @@ impl TryFrom for AddNoteRequest {
}
impl Collection {
- pub fn add_note(&mut self, note: &mut Note, did: DeckId) -> Result> {
+ pub fn add_note(&mut self, note: &mut Note, did: DeckId) -> Result> {
self.transact(Op::AddNote, |col| col.add_note_inner(note, did))
}
@@ -372,7 +372,7 @@ impl Collection {
Ok(())
}
- pub(crate) fn add_note_inner(&mut self, note: &mut Note, did: DeckId) -> Result<()> {
+ pub(crate) fn add_note_inner(&mut self, note: &mut Note, did: DeckId) -> Result {
let nt = self
.get_notetype(note.notetype_id)?
.or_invalid("missing note type")?;
@@ -383,10 +383,11 @@ impl Collection {
note.prepare_for_update(ctx.notetype, normalize_text)?;
note.set_modified(ctx.usn);
self.add_note_only_undoable(note)?;
- self.generate_cards_for_new_note(&ctx, note, did)?;
+ let count = self.generate_cards_for_new_note(&ctx, note, did)?;
self.set_last_deck_for_notetype(note.notetype_id, did)?;
self.set_last_notetype_for_deck(did, note.notetype_id)?;
- self.set_current_notetype_id(note.notetype_id)
+ self.set_current_notetype_id(note.notetype_id)?;
+ Ok(count)
}
pub fn update_note(&mut self, note: &mut Note) -> Result> {
diff --git a/rslib/src/notetype/cardgen.rs b/rslib/src/notetype/cardgen.rs
index 8e03d8ee4..b2a100054 100644
--- a/rslib/src/notetype/cardgen.rs
+++ b/rslib/src/notetype/cardgen.rs
@@ -215,7 +215,7 @@ impl Collection {
ctx: &CardGenContext>,
note: &Note,
target_deck_id: DeckId,
- ) -> Result<()> {
+ ) -> Result {
self.generate_cards_for_note(
ctx,
note,
@@ -231,7 +231,8 @@ impl Collection {
note: &Note,
) -> Result<()> {
let existing = self.storage.existing_cards_for_note(note.id)?;
- self.generate_cards_for_note(ctx, note, &existing, ctx.last_deck, &mut Default::default())
+ self.generate_cards_for_note(ctx, note, &existing, ctx.last_deck, &mut Default::default())?;
+ Ok(())
}
fn generate_cards_for_note(
@@ -241,12 +242,13 @@ impl Collection {
existing: &[AlreadyGeneratedCardInfo],
target_deck_id: Option,
cache: &mut CardGenCache,
- ) -> Result<()> {
+ ) -> Result {
let cards = ctx.new_cards_required(note, existing, true);
if cards.is_empty() {
- return Ok(());
+ return Ok(0);
}
- self.add_generated_cards(note.id, &cards, target_deck_id, cache)
+ self.add_generated_cards(note.id, &cards, target_deck_id, cache)?;
+ Ok(cards.len())
}
pub(crate) fn generate_cards_for_notetype(
diff --git a/rslib/src/notetype/render.rs b/rslib/src/notetype/render.rs
index 08c5677b0..19f5208dc 100644
--- a/rslib/src/notetype/render.rs
+++ b/rslib/src/notetype/render.rs
@@ -25,7 +25,7 @@ pub struct RenderCardOutput {
impl RenderCardOutput {
/// The question text. This is only valid to call when partial_render=false.
- pub fn question(&self) -> Cow {
+ pub fn question(&self) -> Cow<'_, str> {
match self.qnodes.as_slice() {
[RenderedNode::Text { text }] => text.into(),
_ => "not fully rendered".into(),
@@ -33,7 +33,7 @@ impl RenderCardOutput {
}
/// The answer text. This is only valid to call when partial_render=false.
- pub fn answer(&self) -> Cow {
+ pub fn answer(&self) -> Cow<'_, str> {
match self.anodes.as_slice() {
[RenderedNode::Text { text }] => text.into(),
_ => "not fully rendered".into(),
diff --git a/rslib/src/notetype/stock.rs b/rslib/src/notetype/stock.rs
index 9b5df66d5..b27881809 100644
--- a/rslib/src/notetype/stock.rs
+++ b/rslib/src/notetype/stock.rs
@@ -122,7 +122,7 @@ pub(crate) fn basic(tr: &I18n) -> Notetype {
pub(crate) fn basic_typing(tr: &I18n) -> Notetype {
let mut nt = basic(tr);
- nt.config.original_stock_kind = StockKind::BasicTyping as i32;
+ nt.config.original_stock_kind = OriginalStockKind::BasicTyping as i32;
nt.name = tr.notetypes_basic_type_answer_name().into();
let front = tr.notetypes_front_field();
let back = tr.notetypes_back_field();
@@ -138,7 +138,7 @@ pub(crate) fn basic_typing(tr: &I18n) -> Notetype {
pub(crate) fn basic_forward_reverse(tr: &I18n) -> Notetype {
let mut nt = basic(tr);
- nt.config.original_stock_kind = StockKind::BasicAndReversed as i32;
+ nt.config.original_stock_kind = OriginalStockKind::BasicAndReversed as i32;
nt.name = tr.notetypes_basic_reversed_name().into();
let front = tr.notetypes_front_field();
let back = tr.notetypes_back_field();
@@ -156,7 +156,7 @@ pub(crate) fn basic_forward_reverse(tr: &I18n) -> Notetype {
pub(crate) fn basic_optional_reverse(tr: &I18n) -> Notetype {
let mut nt = basic_forward_reverse(tr);
- nt.config.original_stock_kind = StockKind::BasicOptionalReversed as i32;
+ nt.config.original_stock_kind = OriginalStockKind::BasicOptionalReversed as i32;
nt.name = tr.notetypes_basic_optional_reversed_name().into();
let addrev = tr.notetypes_add_reverse_field();
nt.add_field(addrev.as_ref());
diff --git a/rslib/src/revlog/mod.rs b/rslib/src/revlog/mod.rs
index f52698388..fbb9b459a 100644
--- a/rslib/src/revlog/mod.rs
+++ b/rslib/src/revlog/mod.rs
@@ -85,6 +85,15 @@ impl RevlogEntry {
.unwrap()
}
+ pub(crate) fn last_interval_secs(&self) -> u32 {
+ u32::try_from(if self.last_interval > 0 {
+ self.last_interval.saturating_mul(86_400)
+ } else {
+ self.last_interval.saturating_mul(-1)
+ })
+ .unwrap()
+ }
+
/// Returns true if this entry represents a reset operation.
/// These entries are created when a card is reset using
/// [`Collection::reschedule_cards_as_new`].
diff --git a/rslib/src/scheduler/answering/mod.rs b/rslib/src/scheduler/answering/mod.rs
index 6ff8c6e2d..a71c6330f 100644
--- a/rslib/src/scheduler/answering/mod.rs
+++ b/rslib/src/scheduler/answering/mod.rs
@@ -443,9 +443,20 @@ impl Collection {
.storage
.get_deck(card.deck_id)?
.or_not_found(card.deck_id)?;
- let config = self.home_deck_config(deck.config_id(), card.original_deck_id)?;
+ let home_deck = if card.original_deck_id.0 == 0 {
+ &deck
+ } else {
+ &self
+ .storage
+ .get_deck(card.original_deck_id)?
+ .or_not_found(card.original_deck_id)?
+ };
+ let config = self
+ .storage
+ .get_deck_config(home_deck.config_id().or_invalid("home deck is filtered")?)?
+ .unwrap_or_default();
- let desired_retention = deck.effective_desired_retention(&config);
+ let desired_retention = home_deck.effective_desired_retention(&config);
let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs);
let fsrs_next_states = if fsrs_enabled {
let params = config.fsrs_params();
diff --git a/rslib/src/scheduler/fsrs/error.rs b/rslib/src/scheduler/fsrs/error.rs
index d5b596a36..404ee3605 100644
--- a/rslib/src/scheduler/fsrs/error.rs
+++ b/rslib/src/scheduler/fsrs/error.rs
@@ -13,13 +13,7 @@ impl From for AnkiError {
FSRSError::OptimalNotFound => AnkiError::FsrsUnableToDetermineDesiredRetention,
FSRSError::Interrupted => AnkiError::Interrupted,
FSRSError::InvalidParameters => AnkiError::FsrsParamsInvalid,
- FSRSError::InvalidInput => AnkiError::InvalidInput {
- source: InvalidInputError {
- message: "invalid params provided".to_string(),
- source: None,
- backtrace: None,
- },
- },
+ FSRSError::InvalidInput => AnkiError::FsrsParamsInvalid,
FSRSError::InvalidDeckSize => AnkiError::InvalidInput {
source: InvalidInputError {
message: "no cards to simulate".to_string(),
diff --git a/rslib/src/scheduler/fsrs/memory_state.rs b/rslib/src/scheduler/fsrs/memory_state.rs
index 420ead5a3..303bbfd91 100644
--- a/rslib/src/scheduler/fsrs/memory_state.rs
+++ b/rslib/src/scheduler/fsrs/memory_state.rs
@@ -136,6 +136,19 @@ impl Collection {
let deckconfig_id = deck.config_id().unwrap();
// reschedule it
let original_interval = card.interval;
+ let min_interval = |interval: u32| {
+ let previous_interval =
+ last_info.previous_interval.unwrap_or(0);
+ if interval > previous_interval {
+ // interval grew; don't allow fuzzed interval to
+ // be less than previous+1
+ previous_interval + 1
+ } else {
+ // interval shrunk; don't restrict negative fuzz
+ 0
+ }
+ .max(1)
+ };
let interval = fsrs.next_interval(
Some(state.stability),
desired_retention,
@@ -146,7 +159,7 @@ impl Collection {
.and_then(|r| {
r.find_interval(
interval,
- 1,
+ min_interval(interval as u32),
req.max_interval,
days_elapsed as u32,
deckconfig_id,
@@ -157,7 +170,7 @@ impl Collection {
with_review_fuzz(
card.get_fuzz_factor(true),
interval,
- 1,
+ min_interval(interval as u32),
req.max_interval,
)
});
@@ -310,6 +323,9 @@ pub(crate) struct LastRevlogInfo {
/// reviewed the card and now, so that we can determine an accurate period
/// when the card has subsequently been rescheduled to a different day.
pub(crate) last_reviewed_at: Option,
+ /// The interval before the latest review. Used to prevent fuzz from going
+ /// backwards when rescheduling the card
+ pub(crate) previous_interval: Option,
}
/// Return a map of cards to info about last review.
@@ -321,14 +337,27 @@ pub(crate) fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap= 0 && e.button_chosen > 1 {
+ Some(e.last_interval as u32)
+ } else {
+ None
+ };
} else if e.is_reset() {
last_reviewed_at = None;
+ previous_interval = None;
}
}
- out.insert(card_id, LastRevlogInfo { last_reviewed_at });
+ out.insert(
+ card_id,
+ LastRevlogInfo {
+ last_reviewed_at,
+ previous_interval,
+ },
+ );
});
out
}
diff --git a/rslib/src/scheduler/fsrs/params.rs b/rslib/src/scheduler/fsrs/params.rs
index 726870fe1..1fb1d58b8 100644
--- a/rslib/src/scheduler/fsrs/params.rs
+++ b/rslib/src/scheduler/fsrs/params.rs
@@ -174,7 +174,7 @@ impl Collection {
}
}
- let health_check_passed = if health_check {
+ let health_check_passed = if health_check && input.train_set.len() > 300 {
let fsrs = FSRS::new(None)?;
fsrs.evaluate_with_time_series_splits(input, |_| true)
.ok()
@@ -478,27 +478,42 @@ pub(crate) fn reviews_for_fsrs(
}))
.collect_vec();
- let skip = if training { 1 } else { 0 };
- // Convert the remaining entries into separate FSRSItems, where each item
- // contains all reviews done until then.
- let items: Vec<(RevlogId, FSRSItem)> = entries
- .iter()
- .enumerate()
- .skip(skip)
- .map(|(outer_idx, entry)| {
- let reviews = entries
- .iter()
- .take(outer_idx + 1)
- .enumerate()
- .map(|(inner_idx, r)| FSRSReview {
- rating: r.button_chosen as u32,
- delta_t: delta_ts[inner_idx],
- })
- .collect();
- (entry.id, FSRSItem { reviews })
- })
- .filter(|(_, item)| !training || item.reviews.last().unwrap().delta_t > 0)
- .collect_vec();
+ let items = if training {
+ // Convert the remaining entries into separate FSRSItems, where each item
+ // contains all reviews done until then.
+ let mut items = Vec::with_capacity(entries.len());
+ let mut current_reviews = Vec::with_capacity(entries.len());
+ for (idx, (entry, &delta_t)) in entries.iter().zip(delta_ts.iter()).enumerate() {
+ current_reviews.push(FSRSReview {
+ rating: entry.button_chosen as u32,
+ delta_t,
+ });
+ if idx >= 1 && delta_t > 0 {
+ items.push((
+ entry.id,
+ FSRSItem {
+ reviews: current_reviews.clone(),
+ },
+ ));
+ }
+ }
+ items
+ } else {
+ // When not training, we only need the final FSRS item, which represents
+ // the complete history of the card. This avoids expensive clones in a loop.
+ let reviews = entries
+ .iter()
+ .zip(delta_ts.iter())
+ .map(|(entry, &delta_t)| FSRSReview {
+ rating: entry.button_chosen as u32,
+ delta_t,
+ })
+ .collect();
+ let last_entry = entries.last().unwrap();
+
+ vec![(last_entry.id, FSRSItem { reviews })]
+ };
+
if items.is_empty() {
None
} else {
@@ -738,7 +753,7 @@ pub(crate) mod tests {
],
false,
),
- fsrs_items!([review(0)], [review(0), review(1)])
+ fsrs_items!([review(0), review(1)])
);
}
@@ -809,7 +824,7 @@ pub(crate) mod tests {
// R | A X R
assert_eq!(
convert_ignore_before(revlogs, false, days_ago_ms(9)),
- fsrs_items!([review(0)], [review(0), review(2)])
+ fsrs_items!([review(0), review(2)])
);
}
@@ -828,6 +843,9 @@ pub(crate) mod tests {
assert_eq!(
convert_ignore_before(revlogs, false, days_ago_ms(9))
.unwrap()
+ .last()
+ .unwrap()
+ .reviews
.len(),
2
);
@@ -849,6 +867,9 @@ pub(crate) mod tests {
assert_eq!(
convert_ignore_before(revlogs, false, days_ago_ms(9))
.unwrap()
+ .last()
+ .unwrap()
+ .reviews
.len(),
2
);
diff --git a/rslib/src/scheduler/fsrs/rescheduler.rs b/rslib/src/scheduler/fsrs/rescheduler.rs
index db490b3e4..37c824230 100644
--- a/rslib/src/scheduler/fsrs/rescheduler.rs
+++ b/rslib/src/scheduler/fsrs/rescheduler.rs
@@ -115,13 +115,14 @@ impl Rescheduler {
pub fn find_interval(
&self,
interval: f32,
- minimum: u32,
- maximum: u32,
+ minimum_interval: u32,
+ maximum_interval: u32,
days_elapsed: u32,
deckconfig_id: DeckConfigId,
fuzz_seed: Option,
) -> Option {
- let (before_days, after_days) = constrained_fuzz_bounds(interval, minimum, maximum);
+ let (before_days, after_days) =
+ constrained_fuzz_bounds(interval, minimum_interval, maximum_interval);
// Don't reschedule the card when it's overdue
if after_days < days_elapsed {
diff --git a/rslib/src/scheduler/fsrs/simulator.rs b/rslib/src/scheduler/fsrs/simulator.rs
index 62130c4d0..58c5fd5a0 100644
--- a/rslib/src/scheduler/fsrs/simulator.rs
+++ b/rslib/src/scheduler/fsrs/simulator.rs
@@ -97,7 +97,7 @@ fn create_review_priority_fn(
// Interval-based ordering
IntervalsAscending => wrap!(|c, _w| c.interval as i32),
- IntervalsDescending => wrap!(|c, _w| -(c.interval as i32)),
+ IntervalsDescending => wrap!(|c, _w| (c.interval as i32).saturating_neg()),
// Retrievability-based ordering
RetrievabilityAscending => {
wrap!(move |c, w| (c.retrievability(w) * 1000.0) as i32)
diff --git a/rslib/src/scheduler/queue/builder/gathering.rs b/rslib/src/scheduler/queue/builder/gathering.rs
index fb6274de5..293b50dc4 100644
--- a/rslib/src/scheduler/queue/builder/gathering.rs
+++ b/rslib/src/scheduler/queue/builder/gathering.rs
@@ -61,28 +61,26 @@ impl QueueBuilder {
}
fn gather_new_cards(&mut self, col: &mut Collection) -> Result<()> {
+ let salt = Self::knuth_salt(self.context.timing.days_elapsed);
match self.context.sort_options.new_gather_priority {
NewCardGatherPriority::Deck => {
self.gather_new_cards_by_deck(col, NewCardSorting::LowestPosition)
}
- NewCardGatherPriority::DeckThenRandomNotes => self.gather_new_cards_by_deck(
- col,
- NewCardSorting::RandomNotes(self.context.timing.days_elapsed),
- ),
+ NewCardGatherPriority::DeckThenRandomNotes => {
+ self.gather_new_cards_by_deck(col, NewCardSorting::RandomNotes(salt))
+ }
NewCardGatherPriority::LowestPosition => {
self.gather_new_cards_sorted(col, NewCardSorting::LowestPosition)
}
NewCardGatherPriority::HighestPosition => {
self.gather_new_cards_sorted(col, NewCardSorting::HighestPosition)
}
- NewCardGatherPriority::RandomNotes => self.gather_new_cards_sorted(
- col,
- NewCardSorting::RandomNotes(self.context.timing.days_elapsed),
- ),
- NewCardGatherPriority::RandomCards => self.gather_new_cards_sorted(
- col,
- NewCardSorting::RandomCards(self.context.timing.days_elapsed),
- ),
+ NewCardGatherPriority::RandomNotes => {
+ self.gather_new_cards_sorted(col, NewCardSorting::RandomNotes(salt))
+ }
+ NewCardGatherPriority::RandomCards => {
+ self.gather_new_cards_sorted(col, NewCardSorting::RandomCards(salt))
+ }
}
}
@@ -169,4 +167,10 @@ impl QueueBuilder {
true
}
}
+
+ // Generates a salt for use with fnvhash. Useful to increase randomness
+ // when the base salt is a small integer.
+ fn knuth_salt(base_salt: u32) -> u32 {
+ base_salt.wrapping_mul(2654435761)
+ }
}
diff --git a/rslib/src/scheduler/states/load_balancer.rs b/rslib/src/scheduler/states/load_balancer.rs
index 20b6936df..8cb9e6a1c 100644
--- a/rslib/src/scheduler/states/load_balancer.rs
+++ b/rslib/src/scheduler/states/load_balancer.rs
@@ -174,7 +174,7 @@ impl LoadBalancer {
&self,
note_id: Option,
deckconfig_id: DeckConfigId,
- ) -> LoadBalancerContext {
+ ) -> LoadBalancerContext<'_> {
LoadBalancerContext {
load_balancer: self,
note_id,
diff --git a/rslib/src/search/builder.rs b/rslib/src/search/builder.rs
index a76af0560..0c22ff1eb 100644
--- a/rslib/src/search/builder.rs
+++ b/rslib/src/search/builder.rs
@@ -6,6 +6,7 @@ use std::mem;
use itertools::Itertools;
use super::writer::write_nodes;
+use super::FieldSearchMode;
use super::Node;
use super::SearchNode;
use super::StateKind;
@@ -174,7 +175,7 @@ impl SearchNode {
pub fn from_tag_name(name: &str) -> Self {
Self::Tag {
tag: escape_anki_wildcards_for_search_node(name),
- is_re: false,
+ mode: FieldSearchMode::Normal,
}
}
diff --git a/rslib/src/search/mod.rs b/rslib/src/search/mod.rs
index d42ea8323..0dd52dbc3 100644
--- a/rslib/src/search/mod.rs
+++ b/rslib/src/search/mod.rs
@@ -13,6 +13,7 @@ pub use builder::JoinSearches;
pub use builder::Negated;
pub use builder::SearchBuilder;
pub use parser::parse as parse_search;
+pub use parser::FieldSearchMode;
pub use parser::Node;
pub use parser::PropertyKind;
pub use parser::RatingKind;
@@ -226,7 +227,7 @@ impl Collection {
&mut self,
search: impl TryIntoSearch,
mode: SortMode,
- ) -> Result {
+ ) -> Result> {
let top_node = search.try_into_search()?;
let writer = SqlWriter::new(self, ReturnItemType::Cards);
let want_order = mode != SortMode::NoOrder;
@@ -299,7 +300,7 @@ impl Collection {
pub(crate) fn search_notes_into_table(
&mut self,
search: impl TryIntoSearch,
- ) -> Result {
+ ) -> Result> {
let top_node = search.try_into_search()?;
let writer = SqlWriter::new(self, ReturnItemType::Notes);
let mode = SortMode::NoOrder;
@@ -320,7 +321,7 @@ impl Collection {
/// Place the ids of cards with notes in 'search_nids' into 'search_cids'.
/// Returns number of added cards.
- pub(crate) fn search_cards_of_notes_into_table(&mut self) -> Result {
+ pub(crate) fn search_cards_of_notes_into_table(&mut self) -> Result> {
self.storage.setup_searched_cards_table()?;
let cards = self.storage.search_cards_of_notes_into_table()?;
Ok(CardTableGuard { cards, col: self })
diff --git a/rslib/src/search/parser.rs b/rslib/src/search/parser.rs
index ae166ef54..5928bf486 100644
--- a/rslib/src/search/parser.rs
+++ b/rslib/src/search/parser.rs
@@ -3,6 +3,7 @@
use std::sync::LazyLock;
+use anki_proto::search::search_node::FieldSearchMode as FieldSearchModeProto;
use nom::branch::alt;
use nom::bytes::complete::escaped;
use nom::bytes::complete::is_not;
@@ -27,7 +28,6 @@ use crate::error::ParseError;
use crate::error::Result;
use crate::error::SearchErrorKind as FailKind;
use crate::prelude::*;
-
type IResult<'a, O> = std::result::Result<(&'a str, O), nom::Err>>;
type ParseResult<'a, O> = std::result::Result>>;
@@ -48,6 +48,23 @@ pub enum Node {
Search(SearchNode),
}
+#[derive(Copy, Debug, PartialEq, Eq, Clone)]
+pub enum FieldSearchMode {
+ Normal,
+ Regex,
+ NoCombining,
+}
+
+impl From for FieldSearchMode {
+ fn from(mode: FieldSearchModeProto) -> Self {
+ match mode {
+ FieldSearchModeProto::Normal => Self::Normal,
+ FieldSearchModeProto::Regex => Self::Regex,
+ FieldSearchModeProto::Nocombining => Self::NoCombining,
+ }
+ }
+}
+
#[derive(Debug, PartialEq, Clone)]
pub enum SearchNode {
// text without a colon
@@ -56,7 +73,7 @@ pub enum SearchNode {
SingleField {
field: String,
text: String,
- is_re: bool,
+ mode: FieldSearchMode,
},
AddedInDays(u32),
EditedInDays(u32),
@@ -77,7 +94,7 @@ pub enum SearchNode {
},
Tag {
tag: String,
- is_re: bool,
+ mode: FieldSearchMode,
},
Duplicates {
notetype_id: NotetypeId,
@@ -158,7 +175,7 @@ pub fn parse(input: &str) -> Result> {
/// Zero or more nodes inside brackets, eg 'one OR two -three'.
/// Empty vec must be handled by caller.
-fn group_inner(input: &str) -> IResult> {
+fn group_inner(input: &str) -> IResult<'_, Vec> {
let mut remaining = input;
let mut nodes = vec![];
@@ -203,16 +220,16 @@ fn group_inner(input: &str) -> IResult> {
Ok((remaining, nodes))
}
-fn whitespace0(s: &str) -> IResult> {
+fn whitespace0(s: &str) -> IResult<'_, Vec> {
many0(one_of(" \u{3000}")).parse(s)
}
/// Optional leading space, then a (negated) group or text
-fn node(s: &str) -> IResult {
+fn node(s: &str) -> IResult<'_, Node> {
preceded(whitespace0, alt((negated_node, group, text))).parse(s)
}
-fn negated_node(s: &str) -> IResult {
+fn negated_node(s: &str) -> IResult<'_, Node> {
map(preceded(char('-'), alt((group, text))), |node| {
Node::Not(Box::new(node))
})
@@ -220,7 +237,7 @@ fn negated_node(s: &str) -> IResult {
}
/// One or more nodes surrounded by brackets, eg (one OR two)
-fn group(s: &str) -> IResult {
+fn group(s: &str) -> IResult<'_, Node> {
let (opened, _) = char('(')(s)?;
let (tail, inner) = group_inner(opened)?;
if let Some(remaining) = tail.strip_prefix(')') {
@@ -235,18 +252,18 @@ fn group(s: &str) -> IResult {
}
/// Either quoted or unquoted text
-fn text(s: &str) -> IResult {
+fn text(s: &str) -> IResult<'_, Node> {
alt((quoted_term, partially_quoted_term, unquoted_term)).parse(s)
}
/// Quoted text, including the outer double quotes.
-fn quoted_term(s: &str) -> IResult {
+fn quoted_term(s: &str) -> IResult<'_, Node> {
let (remaining, term) = quoted_term_str(s)?;
Ok((remaining, Node::Search(search_node_for_text(term)?)))
}
/// eg deck:"foo bar" - quotes must come after the :
-fn partially_quoted_term(s: &str) -> IResult {
+fn partially_quoted_term(s: &str) -> IResult<'_, Node> {
let (remaining, (key, val)) = separated_pair(
escaped(is_not("\"(): \u{3000}\\"), '\\', none_of(" \u{3000}")),
char(':'),
@@ -260,7 +277,7 @@ fn partially_quoted_term(s: &str) -> IResult {
}
/// Unquoted text, terminated by whitespace or unescaped ", ( or )
-fn unquoted_term(s: &str) -> IResult {
+fn unquoted_term(s: &str) -> IResult<'_, Node> {
match escaped(is_not("\"() \u{3000}\\"), '\\', none_of(" \u{3000}"))(s) {
Ok((tail, term)) => {
if term.is_empty() {
@@ -297,7 +314,7 @@ fn unquoted_term(s: &str) -> IResult