>,
+ _translations_type: std::marker::PhantomData,
}
-fn get_key(module_idx: usize, translation_idx: usize) -> &'static str {
- KEYS_BY_MODULE
- .get(module_idx)
- .and_then(|translations| translations.get(translation_idx))
- .cloned()
- .unwrap_or("invalid-module-or-translation-index")
-}
+impl I18n {
+ fn get_key(module_idx: usize, translation_idx: usize) -> &'static str {
+ P::KEYS_BY_MODULE
+ .get(module_idx)
+ .and_then(|translations| translations.get(translation_idx))
+ .cloned()
+ .unwrap_or("invalid-module-or-translation-index")
+ }
+
+ fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec {
+ langs
+ .iter()
+ .cloned()
+ .map(|lang| {
+ let mut buf = String::new();
+ let lang_name = remapped_lang_name(&lang);
+ if let Some(strings) = P::STRINGS.get(lang_name) {
+ if desired_modules.is_empty() {
+ // empty list, provide all modules
+ for value in strings.values() {
+ buf.push_str(value)
+ }
+ } else {
+ for module_name in desired_modules {
+ if let Some(text) = strings.get(module_name.as_str()) {
+ buf.push_str(text);
+ }
+ }
+ }
+ }
+ buf
+ })
+ .collect()
+ }
+
+ /// This temporarily behaves like the older code; in the future we could
+ /// either access each &str separately, or load them on demand.
+ fn ftl_localized_text(lang: &LanguageIdentifier) -> Option {
+ let lang = remapped_lang_name(lang);
+ if let Some(module) = P::STRINGS.get(lang) {
+ let mut text = String::new();
+ for module_text in module.values() {
+ text.push_str(module_text)
+ }
+ Some(text)
+ } else {
+ None
+ }
+ }
-impl I18n {
pub fn template_only() -> Self {
Self::new::<&str>(&[])
}
@@ -225,7 +275,7 @@ impl I18n {
let mut output_langs = vec![];
for lang in input_langs {
// if the language is bundled in the binary
- if let Some(text) = ftl_localized_text(&lang).or_else(|| {
+ if let Some(text) = Self::ftl_localized_text(&lang).or_else(|| {
// when testing, allow missing translations
if cfg!(test) {
Some(String::new())
@@ -244,7 +294,7 @@ impl I18n {
// add English templates
let template_lang = "en-US".parse().unwrap();
- let template_text = ftl_localized_text(&template_lang).unwrap();
+ let template_text = Self::ftl_localized_text(&template_lang).unwrap();
let template_bundle = get_bundle_with_extra(&template_text, None).unwrap();
bundles.push(template_bundle);
output_langs.push(template_lang);
@@ -261,6 +311,7 @@ impl I18n {
bundles,
langs: output_langs,
})),
+ _translations_type: PhantomData,
}
}
@@ -270,7 +321,7 @@ impl I18n {
message_index: usize,
args: FluentArgs,
) -> String {
- let key = get_key(module_index, message_index);
+ let key = Self::get_key(module_index, message_index);
self.translate(key, Some(args)).into()
}
@@ -305,7 +356,7 @@ impl I18n {
/// implementation.
pub fn resources_for_js(&self, desired_modules: &[String]) -> ResourcesForJavascript {
let inner = self.inner.lock().unwrap();
- let resources = get_modules(&inner.langs, desired_modules);
+ let resources = Self::get_modules(&inner.langs, desired_modules);
ResourcesForJavascript {
langs: inner.langs.iter().map(ToString::to_string).collect(),
resources,
@@ -313,47 +364,6 @@ impl I18n {
}
}
-fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec {
- langs
- .iter()
- .cloned()
- .map(|lang| {
- let mut buf = String::new();
- let lang_name = remapped_lang_name(&lang);
- if let Some(strings) = STRINGS.get(lang_name) {
- if desired_modules.is_empty() {
- // empty list, provide all modules
- for value in strings.values() {
- buf.push_str(value)
- }
- } else {
- for module_name in desired_modules {
- if let Some(text) = strings.get(module_name.as_str()) {
- buf.push_str(text);
- }
- }
- }
- }
- buf
- })
- .collect()
-}
-
-/// This temporarily behaves like the older code; in the future we could either
-/// access each &str separately, or load them on demand.
-fn ftl_localized_text(lang: &LanguageIdentifier) -> Option {
- let lang = remapped_lang_name(lang);
- if let Some(module) = STRINGS.get(lang) {
- let mut text = String::new();
- for module_text in module.values() {
- text.push_str(module_text)
- }
- Some(text)
- } else {
- None
- }
-}
-
struct I18nInner {
// bundles in preferred language order, with template English as the
// last element
@@ -490,7 +500,7 @@ mod test {
#[test]
fn i18n() {
// English template
- let tr = I18n::new(&["zz"]);
+ let tr = I18n::::new(&["zz"]);
assert_eq!(tr.translate("valid-key", None), "a valid key");
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
@@ -513,7 +523,7 @@ mod test {
);
// Another language
- let tr = I18n::new(&["ja_JP"]);
+ let tr = I18n::::new(&["ja_JP"]);
assert_eq!(tr.translate("valid-key", None), "キー");
assert_eq!(tr.translate("only-in-english", None), "not translated");
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
@@ -524,7 +534,7 @@ mod test {
);
// Decimal separator
- let tr = I18n::new(&["pl-PL"]);
+ let tr = I18n::::new(&["pl-PL"]);
// Polish will use a comma if the string is translated
assert_eq!(
tr.translate("one-arg-key", Some(tr_args!["one"=>2.07])),
diff --git a/rslib/i18n/write_strings.rs b/rslib/i18n/write_strings.rs
index 33905d98f..db31be2b7 100644
--- a/rslib/i18n/write_strings.rs
+++ b/rslib/i18n/write_strings.rs
@@ -15,7 +15,7 @@ use crate::extract::VariableKind;
use crate::gather::TranslationsByFile;
use crate::gather::TranslationsByLang;
-pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
+pub fn write_strings(map: &TranslationsByLang, modules: &[Module], out_fn: &str, tag: &str) {
let mut buf = String::new();
// lang->module map
@@ -25,23 +25,25 @@ pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
// ordered list of translations by module
write_translation_key_index(modules, &mut buf);
// methods to generate messages
- write_methods(modules, &mut buf);
+ write_methods(modules, &mut buf, tag);
let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
- let path = dir.join("strings.rs");
+ let path = dir.join(out_fn);
fs::write(path, buf).unwrap();
}
-fn write_methods(modules: &[Module], buf: &mut String) {
+fn write_methods(modules: &[Module], buf: &mut String, tag: &str) {
buf.push_str(
r#"
-use crate::{I18n,Number};
+#[allow(unused_imports)]
+use crate::{I18n,Number,Translations};
+#[allow(unused_imports)]
use fluent::{FluentValue, FluentArgs};
use std::borrow::Cow;
-impl I18n {
"#,
);
+ writeln!(buf, "impl I18n<{tag}> {{").unwrap();
for module in modules {
for translation in &module.translations {
let func = translation.key.to_snake_case();
@@ -142,7 +144,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) {
writeln!(
buf,
- "pub(crate) const KEYS_BY_MODULE: [&[&str]; {count}] = [",
+ "pub(crate) const _KEYS_BY_MODULE: [&[&str]; {count}] = [",
count = modules.len(),
)
.unwrap();
@@ -162,7 +164,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) {
fn write_lang_map(map: &TranslationsByLang, buf: &mut String) {
buf.push_str(
"
-pub(crate) const STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! {
+pub(crate) const _STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! {
",
);
diff --git a/rslib/proto/python.rs b/rslib/proto/python.rs
index a5adb4179..5c245de1d 100644
--- a/rslib/proto/python.rs
+++ b/rslib/proto/python.rs
@@ -22,7 +22,7 @@ pub(crate) fn write_python_interface(services: &[BackendService]) -> Result<()>
write_header(&mut out)?;
for service in services {
- if service.name == "BackendAnkidroidService" {
+ if ["BackendAnkidroidService", "BackendFrontendService"].contains(&service.name.as_str()) {
continue;
}
for method in service.all_methods() {
diff --git a/rslib/src/cloze.rs b/rslib/src/cloze.rs
index 027c14c0c..70a5d1703 100644
--- a/rslib/src/cloze.rs
+++ b/rslib/src/cloze.rs
@@ -10,6 +10,7 @@ use std::sync::LazyLock;
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion;
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape;
use htmlescape::encode_attribute;
+use itertools::Itertools;
use nom::branch::alt;
use nom::bytes::complete::tag;
use nom::bytes::complete::take_while;
@@ -26,7 +27,7 @@ use crate::template::RenderContext;
use crate::text::strip_html_preserving_entities;
static CLOZE: LazyLock =
- LazyLock::new(|| Regex::new(r"(?s)\{\{c\d+::(.*?)(::.*?)?\}\}").unwrap());
+ LazyLock::new(|| Regex::new(r"(?s)\{\{c[\d,]+::(.*?)(::.*?)?\}\}").unwrap());
static MATHJAX: LazyLock = LazyLock::new(|| {
Regex::new(
@@ -48,7 +49,7 @@ mod mathjax_caps {
#[derive(Debug)]
enum Token<'a> {
// The parameter is the cloze number as is appears in the field content.
- OpenCloze(u16),
+ OpenCloze(Vec),
Text(&'a str),
CloseCloze,
}
@@ -58,21 +59,24 @@ fn tokenize(mut text: &str) -> impl Iterator- > {
fn open_cloze(text: &str) -> IResult<&str, Token<'_>> {
// opening brackets and 'c'
let (text, _opening_brackets_and_c) = tag("{{c")(text)?;
- // following number
- let (text, digits) = take_while(|c: char| c.is_ascii_digit())(text)?;
- let digits: u16 = match digits.parse() {
- Ok(digits) => digits,
- Err(_) => {
- // not a valid number; fail to recognize
- return Err(nom::Err::Error(nom::error::make_error(
- text,
- nom::error::ErrorKind::Digit,
- )));
- }
- };
+ // following comma-seperated numbers
+ let (text, ordinals) = take_while(|c: char| c.is_ascii_digit() || c == ',')(text)?;
+ let ordinals: Vec = ordinals
+ .split(',')
+ .filter_map(|s| s.parse().ok())
+ .collect::>() // deduplicate
+ .into_iter()
+ .sorted() // set conversion can de-order
+ .collect();
+ if ordinals.is_empty() {
+ return Err(nom::Err::Error(nom::error::make_error(
+ text,
+ nom::error::ErrorKind::Digit,
+ )));
+ }
// ::
let (text, _colons) = tag("::")(text)?;
- Ok((text, Token::OpenCloze(digits)))
+ Ok((text, Token::OpenCloze(ordinals)))
}
fn close_cloze(text: &str) -> IResult<&str, Token<'_>> {
@@ -121,11 +125,20 @@ enum TextOrCloze<'a> {
#[derive(Debug)]
struct ExtractedCloze<'a> {
// `ordinal` is the cloze number as is appears in the field content.
- ordinal: u16,
+ ordinals: Vec,
nodes: Vec>,
hint: Option<&'a str>,
}
+/// Generate a string representation of the ordinals for HTML
+fn ordinals_str(ordinals: &[u16]) -> String {
+ ordinals
+ .iter()
+ .map(|o| o.to_string())
+ .collect::>()
+ .join(",")
+}
+
impl ExtractedCloze<'_> {
/// Return the cloze's hint, or "..." if none was provided.
fn hint(&self) -> &str {
@@ -151,6 +164,11 @@ impl ExtractedCloze<'_> {
buf.into()
}
+ /// Checks if this cloze is active for a given ordinal
+ fn contains_ordinal(&self, ordinal: u16) -> bool {
+ self.ordinals.contains(&ordinal)
+ }
+
/// If cloze starts with image-occlusion:, return the text following that.
fn image_occlusion(&self) -> Option<&str> {
let TextOrCloze::Text(text) = self.nodes.first()? else {
@@ -165,10 +183,10 @@ fn parse_text_with_clozes(text: &str) -> Vec> {
let mut output = vec![];
for token in tokenize(text) {
match token {
- Token::OpenCloze(ordinal) => {
+ Token::OpenCloze(ordinals) => {
if open_clozes.len() < 10 {
open_clozes.push(ExtractedCloze {
- ordinal,
+ ordinals,
nodes: Vec::with_capacity(1), // common case
hint: None,
})
@@ -214,7 +232,7 @@ fn reveal_cloze_text_in_nodes(
output: &mut Vec,
) {
if let TextOrCloze::Cloze(cloze) = node {
- if cloze.ordinal == cloze_ord {
+ if cloze.contains_ordinal(cloze_ord) {
if question {
output.push(cloze.hint().into())
} else {
@@ -234,14 +252,15 @@ fn reveal_cloze(
active_cloze_found_in_text: &mut bool,
buf: &mut String,
) {
- let active = cloze.ordinal == cloze_ord;
+ let active = cloze.contains_ordinal(cloze_ord);
*active_cloze_found_in_text |= active;
+
if let Some(image_occlusion_text) = cloze.image_occlusion() {
buf.push_str(&render_image_occlusion(
image_occlusion_text,
question,
active,
- cloze.ordinal,
+ &cloze.ordinals,
));
return;
}
@@ -265,7 +284,7 @@ fn reveal_cloze(
buf,
r#"[{}]"#,
encode_attribute(&content_buf),
- cloze.ordinal,
+ ordinals_str(&cloze.ordinals),
cloze.hint()
)
.unwrap();
@@ -274,7 +293,7 @@ fn reveal_cloze(
write!(
buf,
r#""#,
- cloze.ordinal
+ ordinals_str(&cloze.ordinals)
)
.unwrap();
for node in &cloze.nodes {
@@ -292,7 +311,7 @@ fn reveal_cloze(
write!(
buf,
r#""#,
- cloze.ordinal
+ ordinals_str(&cloze.ordinals)
)
.unwrap();
for node in &cloze.nodes {
@@ -308,23 +327,28 @@ fn reveal_cloze(
}
}
-fn render_image_occlusion(text: &str, question_side: bool, active: bool, ordinal: u16) -> String {
- if (question_side && active) || ordinal == 0 {
+fn render_image_occlusion(
+ text: &str,
+ question_side: bool,
+ active: bool,
+ ordinals: &[u16],
+) -> String {
+ if (question_side && active) || ordinals.contains(&0) {
format!(
r#""#,
- ordinal,
+ ordinals_str(ordinals),
&get_image_cloze_data(text)
)
} else if !active {
format!(
r#""#,
- ordinal,
+ ordinals_str(ordinals),
&get_image_cloze_data(text)
)
} else if !question_side && active {
format!(
r#""#,
- ordinal,
+ ordinals_str(ordinals),
&get_image_cloze_data(text)
)
} else {
@@ -338,7 +362,10 @@ pub fn parse_image_occlusions(text: &str) -> Vec {
if let TextOrCloze::Cloze(cloze) = node {
if cloze.image_occlusion().is_some() {
if let Some(shape) = parse_image_cloze(cloze.image_occlusion().unwrap()) {
- occlusions.entry(cloze.ordinal).or_default().push(shape);
+ // Associate this occlusion with all ordinals in this cloze
+ for &ordinal in &cloze.ordinals {
+ occlusions.entry(ordinal).or_default().push(shape.clone());
+ }
}
}
}
@@ -420,7 +447,7 @@ pub fn expand_clozes_to_reveal_latex(text: &str) -> String {
pub(crate) fn contains_cloze(text: &str) -> bool {
parse_text_with_clozes(text)
.iter()
- .any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinal != 0))
+ .any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinals.iter().any(|&o| o != 0)))
}
/// Returns the set of cloze number as they appear in the fields's content.
@@ -433,10 +460,12 @@ pub fn cloze_numbers_in_string(html: &str) -> HashSet {
fn add_cloze_numbers_in_text_with_clozes(nodes: &[TextOrCloze], set: &mut HashSet) {
for node in nodes {
if let TextOrCloze::Cloze(cloze) = node {
- if cloze.ordinal != 0 {
- set.insert(cloze.ordinal);
- add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set);
+ for &ordinal in &cloze.ordinals {
+ if ordinal != 0 {
+ set.insert(ordinal);
+ }
}
+ add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set);
}
}
}
@@ -654,4 +683,160 @@ mod test {
)
);
}
+
+ #[test]
+ fn multi_card_card_generation() {
+ let text = "{{c1,2,3::multi}}";
+ assert_eq!(
+ cloze_number_in_fields(vec![text]),
+ vec![1, 2, 3].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn multi_card_cloze_basic() {
+ let text = "{{c1,2::shared}} word and {{c1::first}} vs {{c2::second}}";
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
+ "[...] word and [...] vs second"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
+ "[...] word and first vs [...]"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, false)).as_ref(),
+ "shared word and first vs second"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, false)).as_ref(),
+ "shared word and first vs second"
+ );
+ assert_eq!(
+ cloze_numbers_in_string(text),
+ vec![1, 2].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn multi_card_cloze_html_attributes() {
+ let text = "{{c1,2,3::multi}}";
+
+ let card1_html = reveal_cloze_text(text, 1, true);
+ assert!(card1_html.contains(r#"data-ordinal="1,2,3""#));
+
+ let card2_html = reveal_cloze_text(text, 2, true);
+ assert!(card2_html.contains(r#"data-ordinal="1,2,3""#));
+
+ let card3_html = reveal_cloze_text(text, 3, true);
+ assert!(card3_html.contains(r#"data-ordinal="1,2,3""#));
+ }
+
+ #[test]
+ fn multi_card_cloze_with_hints() {
+ let text = "{{c1,2::answer::hint}}";
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
+ "[hint]"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
+ "[hint]"
+ );
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, false)).as_ref(),
+ "answer"
+ );
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, false)).as_ref(),
+ "answer"
+ );
+ }
+
+ #[test]
+ fn multi_card_cloze_edge_cases() {
+ assert_eq!(
+ cloze_numbers_in_string("{{c1,1,2::test}}"),
+ vec![1, 2].into_iter().collect::>()
+ );
+
+ assert_eq!(
+ cloze_numbers_in_string("{{c0,1,2::test}}"),
+ vec![1, 2].into_iter().collect::>()
+ );
+
+ assert_eq!(
+ cloze_numbers_in_string("{{c1,,3::test}}"),
+ vec![1, 3].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn multi_card_cloze_only_filter() {
+ let text = "{{c1,2::shared}} and {{c1::first}} vs {{c2::second}}";
+
+ assert_eq!(reveal_cloze_text_only(text, 1, true), "..., ...");
+ assert_eq!(reveal_cloze_text_only(text, 2, true), "..., ...");
+ assert_eq!(reveal_cloze_text_only(text, 1, false), "shared, first");
+ assert_eq!(reveal_cloze_text_only(text, 2, false), "shared, second");
+ }
+
+ #[test]
+ fn multi_card_nested_cloze() {
+ let text = "{{c1,2::outer {{c3::inner}}}}";
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
+ "[...]"
+ );
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
+ "[...]"
+ );
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 3, true)).as_ref(),
+ "outer [...]"
+ );
+
+ assert_eq!(
+ cloze_numbers_in_string(text),
+ vec![1, 2, 3].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn nested_parent_child_card_same_cloze() {
+ let text = "{{c1::outer {{c1::inner}}}}";
+
+ assert_eq!(
+ strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
+ "[...]"
+ );
+
+ assert_eq!(
+ cloze_numbers_in_string(text),
+ vec![1].into_iter().collect::>()
+ );
+ }
+
+ #[test]
+ fn multi_card_image_occlusion() {
+ let text = "{{c1,2::image-occlusion:rect:left=10:top=20:width=30:height=40}}";
+
+ let occlusions = parse_image_occlusions(text);
+ assert_eq!(occlusions.len(), 2);
+ assert!(occlusions.iter().any(|o| o.ordinal == 1));
+ assert!(occlusions.iter().any(|o| o.ordinal == 2));
+
+ let card1_html = reveal_cloze_text(text, 1, true);
+ assert!(card1_html.contains(r#"data-ordinal="1,2""#));
+
+ let card2_html = reveal_cloze_text(text, 2, true);
+ assert!(card2_html.contains(r#"data-ordinal="1,2""#));
+ }
}
diff --git a/rslib/src/collection/service.rs b/rslib/src/collection/service.rs
index 2050a6897..a37360782 100644
--- a/rslib/src/collection/service.rs
+++ b/rslib/src/collection/service.rs
@@ -1,8 +1,10 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
+use anki_proto::collection::GetCustomColoursResponse;
use anki_proto::generic;
use crate::collection::Collection;
+use crate::config::ConfigKey;
use crate::error;
use crate::prelude::BoolKey;
use crate::prelude::Op;
@@ -62,4 +64,13 @@ impl crate::services::CollectionService for Collection {
})
.map(Into::into)
}
+
+ fn get_custom_colours(
+ &mut self,
+ ) -> error::Result {
+ let colours = self
+ .get_config_optional(ConfigKey::CustomColorPickerPalette)
+ .unwrap_or_default();
+ Ok(GetCustomColoursResponse { colours })
+ }
}
diff --git a/rslib/src/config/mod.rs b/rslib/src/config/mod.rs
index 5ece5b7e1..1e507281a 100644
--- a/rslib/src/config/mod.rs
+++ b/rslib/src/config/mod.rs
@@ -71,6 +71,7 @@ pub(crate) enum ConfigKey {
NextNewCardPosition,
#[strum(to_string = "schedVer")]
SchedulerVersion,
+ CustomColorPickerPalette,
}
#[derive(PartialEq, Eq, Serialize_repr, Deserialize_repr, Clone, Copy, Debug)]
diff --git a/rslib/src/import_export/package/apkg/import/media.rs b/rslib/src/import_export/package/apkg/import/media.rs
index 32bf7c807..20543e074 100644
--- a/rslib/src/import_export/package/apkg/import/media.rs
+++ b/rslib/src/import_export/package/apkg/import/media.rs
@@ -17,6 +17,7 @@ use crate::import_export::package::media::SafeMediaEntry;
use crate::import_export::ImportProgress;
use crate::media::files::add_hash_suffix_to_file_stem;
use crate::media::files::sha1_of_reader;
+use crate::media::Checksums;
use crate::prelude::*;
use crate::progress::ThrottlingProgressHandler;
@@ -75,7 +76,7 @@ impl Context<'_> {
fn prepare_media(
media_entries: Vec,
archive: &mut ZipArchive,
- existing_sha1s: &HashMap,
+ existing_sha1s: &Checksums,
progress: &mut ThrottlingProgressHandler,
) -> Result {
let mut media_map = MediaUseMap::default();
diff --git a/rslib/src/import_export/text/csv/import.rs b/rslib/src/import_export/text/csv/import.rs
index e45bbca1b..31dee84e4 100644
--- a/rslib/src/import_export/text/csv/import.rs
+++ b/rslib/src/import_export/text/csv/import.rs
@@ -1,6 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
+use std::collections::HashSet;
use std::io::BufRead;
use std::io::BufReader;
use std::io::Read;
@@ -106,6 +107,8 @@ struct ColumnContext {
notetype_column: Option,
/// Source column indices for the fields of a notetype
field_source_columns: FieldSourceColumns,
+ /// Metadata column indices (1-based)
+ meta_columns: HashSet,
/// How fields are converted to strings. Used for escaping HTML if
/// appropriate.
stringify: fn(&str) -> String,
@@ -119,6 +122,7 @@ impl ColumnContext {
deck_column: metadata.deck()?.column(),
notetype_column: metadata.notetype()?.column(),
field_source_columns: metadata.field_source_columns()?,
+ meta_columns: metadata.meta_columns(),
stringify: stringify_fn(metadata.is_html),
})
}
@@ -166,11 +170,19 @@ impl ColumnContext {
}
fn gather_note_fields(&self, record: &csv::StringRecord) -> Vec