diff --git a/.gitignore b/.gitignore index 68139a32a..4c43afd06 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,4 @@ node_modules .ninja_log .ninja_deps /extra +yarn-error.log diff --git a/build/configure/src/web.rs b/build/configure/src/web.rs index 80eaf1e5a..a37f7c4d3 100644 --- a/build/configure/src/web.rs +++ b/build/configure/src/web.rs @@ -314,6 +314,17 @@ fn build_and_check_pages(build: &mut Build) -> Result<()> { ":sass" ], )?; + build_page( + "import-anki-package", + true, + inputs![ + // + ":ts:lib", + ":ts:components", + ":ts:sveltelib", + ":sass" + ], + )?; // we use the generated .css file separately build_page( "editable", diff --git a/ftl/core/importing.ftl b/ftl/core/importing.ftl index 9284e6696..a761abd6e 100644 --- a/ftl/core/importing.ftl +++ b/ftl/core/importing.ftl @@ -30,6 +30,18 @@ importing-map-to = Map to { $val } importing-map-to-tags = Map to Tags importing-mapped-to = mapped to { $val } importing-mapped-to-tags = mapped to Tags +# the action of combining two existing notetypes to create a new one +importing-merge-notetypes = Merge notetypes +importing-merge-notetypes-help = + If checked, and you or the deck author altered the schema of a notetype, Anki will + merge the two versions instead of keeping both. + + Altering a notetype's schema means adding, removing, or reordering fields or templates, + or changing the sort field. + As a counterexample, changing the front side of an existing template does *not* constitute + a schema change. + + Warning: This will require a one-way sync, and may mark existing notes as modified. importing-mnemosyne-20-deck-db = Mnemosyne 2.0 Deck (*.db) importing-multicharacter-separators-are-not-supported-please = Multi-character separators are not supported. Please enter one character only. importing-notes-added-from-file = Notes added from file: { $val } @@ -37,6 +49,10 @@ importing-notes-found-in-file = Notes found in file: { $val } importing-notes-skipped-as-theyre-already-in = Notes skipped, as up-to-date copies are already in your collection: { $val } importing-notes-skipped-update-due-to-notetype = Notes not updated, as notetype has been modified since you first imported the notes: { $val } importing-notes-updated-as-file-had-newer = Notes updated, as file had newer version: { $val } +importing-include-reviews = Include reviews +importing-include-reviews-help = + If enabled, any previous reviews that the deck sharer included will also be imported. + Otherwise, all cards will be imported as new cards. importing-packaged-anki-deckcollection-apkg-colpkg-zip = Packaged Anki Deck/Collection (*.apkg *.colpkg *.zip) importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz) # the '|' character @@ -57,6 +73,19 @@ importing-unable-to-import-from-a-readonly = Unable to import from a read-only f importing-unknown-file-format = Unknown file format. importing-update-existing-notes-when-first-field = Update existing notes when first field matches importing-updated = Updated +importing-update-if-newer = If newer +importing-update-always = Always +importing-update-never = Never +importing-update-notes = Update notes +importing-update-notes-help = + When to update an existing note in your collection. By default, this is only done + if the matching imported note was more recently modified. +importing-update-notetypes = Update notetypes +importing-update-notetypes-help = + When to update an existing notetype in your collection. By default, this is only done + if the matching imported notetype was more recently modified. Changes to template text + and styling can always be imported, but for schema changes (e.g. the number or order of + fields has changed), the '{ importing-merge-notetypes }' option will also need to be enabled. importing-note-added = { $count -> [one] { $count } note added @@ -136,6 +165,11 @@ importing-conflicting-notes-skipped = [one] { $count } note was not imported, because its note type has changed. *[other] { $count } were not imported, because their note type has changed. } +importing-conflicting-notes-skipped2 = + { $count -> + [one] { $count } note was not imported, because its notetype has changed, and '{ importing-merge-notetypes }' was not enabled. + *[other] { $count } were not imported, because their notetype has changed, and '{ importing-merge-notetypes }' was not enabled. + } importing-import-log = Import Log importing-no-notes-in-file = No notes found in file. importing-notes-found-in-file2 = diff --git a/proto/anki/import_export.proto b/proto/anki/import_export.proto index ba0d60a5b..0ecfc7b3b 100644 --- a/proto/anki/import_export.proto +++ b/proto/anki/import_export.proto @@ -14,6 +14,8 @@ import "anki/generic.proto"; service ImportExportService { rpc ImportAnkiPackage(ImportAnkiPackageRequest) returns (ImportResponse); + rpc GetImportAnkiPackagePresets(generic.Empty) + returns (ImportAnkiPackageOptions); rpc ExportAnkiPackage(ExportAnkiPackageRequest) returns (generic.UInt32); rpc GetCsvMetadata(CsvMetadataRequest) returns (CsvMetadata); rpc ImportCsv(ImportCsvRequest) returns (ImportResponse); @@ -45,8 +47,22 @@ message ExportCollectionPackageRequest { bool legacy = 3; } +enum ImportAnkiPackageUpdateCondition { + IMPORT_ANKI_PACKAGE_UPDATE_CONDITION_IF_NEWER = 0; + IMPORT_ANKI_PACKAGE_UPDATE_CONDITION_ALWAYS = 1; + IMPORT_ANKI_PACKAGE_UPDATE_CONDITION_NEVER = 2; +} + +message ImportAnkiPackageOptions { + bool merge_notetypes = 1; + ImportAnkiPackageUpdateCondition update_notes = 2; + ImportAnkiPackageUpdateCondition update_notetypes = 3; + bool with_scheduling = 4; +} + message ImportAnkiPackageRequest { string package_path = 1; + ImportAnkiPackageOptions options = 2; } message ImportResponse { diff --git a/proto/anki/notetypes.proto b/proto/anki/notetypes.proto index 87d7a2eb8..a40c75b07 100644 --- a/proto/anki/notetypes.proto +++ b/proto/anki/notetypes.proto @@ -70,6 +70,8 @@ message Notetype { repeated CardRequirement reqs = 8; // Only set on notetypes created with Anki 2.1.62+. StockNotetype.OriginalStockKind original_stock_kind = 9; + // the id in the source collection for imported notetypes (Anki 23.09) + optional int64 original_id = 10; bytes other = 255; } @@ -83,6 +85,8 @@ message Notetype { bool plain_text = 6; bool collapsed = 7; bool exclude_from_search = 8; + // used for merging notetypes on import (Anki 23.09) + optional int64 id = 9; bytes other = 255; } @@ -99,6 +103,8 @@ message Notetype { int64 target_deck_id = 5; string browser_font_name = 6; uint32 browser_font_size = 7; + // used for merging notetypes on import (Anki 23.09) + optional int64 id = 8; bytes other = 255; } diff --git a/pylib/anki/collection.py b/pylib/anki/collection.py index 9f2df734f..8073334c6 100644 --- a/pylib/anki/collection.py +++ b/pylib/anki/collection.py @@ -38,6 +38,7 @@ BrowserRow = search_pb2.BrowserRow BrowserColumns = search_pb2.BrowserColumns StripHtmlMode = card_rendering_pb2.StripHtmlRequest ImportLogWithChanges = import_export_pb2.ImportResponse +ImportAnkiPackageRequest = import_export_pb2.ImportAnkiPackageRequest ImportCsvRequest = import_export_pb2.ImportCsvRequest CsvMetadata = import_export_pb2.CsvMetadata DupeResolution = CsvMetadata.DupeResolution @@ -395,8 +396,11 @@ class Collection(DeprecatedNamesMixin): out_path=out_path, include_media=include_media, legacy=legacy ) - def import_anki_package(self, path: str) -> ImportLogWithChanges: - return self._backend.import_anki_package(package_path=path) + def import_anki_package( + self, request: ImportAnkiPackageRequest + ) -> ImportLogWithChanges: + log = self._backend.import_anki_package_raw(request.SerializeToString()) + return ImportLogWithChanges.FromString(log) def export_anki_package( self, diff --git a/qt/aqt/import_export/import_csv_dialog.py b/qt/aqt/import_export/import_dialog.py similarity index 64% rename from qt/aqt/import_export/import_csv_dialog.py rename to qt/aqt/import_export/import_dialog.py index 0726d1009..2318b47d3 100644 --- a/qt/aqt/import_export/import_csv_dialog.py +++ b/qt/aqt/import_export/import_dialog.py @@ -12,8 +12,13 @@ from aqt.utils import addCloseShortcut, disable_help_button, restoreGeom, saveGe from aqt.webview import AnkiWebView, AnkiWebViewKind -class ImportCsvDialog(QDialog): - TITLE = "csv import" +class ImportDialog(QDialog): + TITLE: str + KIND: AnkiWebViewKind + TS_PAGE: str + SETUP_FUNCTION_NAME: str + DEFAULT_SIZE = (800, 800) + MIN_SIZE = (400, 300) silentlyClose = True def __init__( @@ -29,13 +34,14 @@ class ImportCsvDialog(QDialog): def _setup_ui(self, path: str) -> None: self.setWindowModality(Qt.WindowModality.ApplicationModal) self.mw.garbage_collect_on_dialog_finish(self) - self.setMinimumSize(400, 300) + self.setMinimumSize(*self.MIN_SIZE) disable_help_button(self) + restoreGeom(self, self.TITLE, default_size=self.DEFAULT_SIZE) addCloseShortcut(self) - self.web = AnkiWebView(kind=AnkiWebViewKind.IMPORT_CSV) + self.web = AnkiWebView(kind=self.KIND) self.web.setVisible(False) - self.web.load_ts_page("import-csv") + self.web.load_ts_page(self.TS_PAGE) layout = QVBoxLayout() layout.setContentsMargins(0, 0, 0, 0) layout.addWidget(self.web) @@ -44,7 +50,8 @@ class ImportCsvDialog(QDialog): escaped_path = path.replace("'", r"\'") self.web.evalWithCallback( - f"anki.setupImportCsvPage('{escaped_path}');", lambda _: self.web.setFocus() + f"anki.{self.SETUP_FUNCTION_NAME}('{escaped_path}');", + lambda _: self.web.setFocus(), ) self.setWindowTitle(tr.decks_import_file()) @@ -55,3 +62,17 @@ class ImportCsvDialog(QDialog): self.web = None saveGeom(self, self.TITLE) QDialog.reject(self) + + +class ImportCsvDialog(ImportDialog): + TITLE = "csv import" + KIND = AnkiWebViewKind.IMPORT_CSV + TS_PAGE = "import-csv" + SETUP_FUNCTION_NAME = "setupImportCsvPage" + + +class ImportAnkiPackageDialog(ImportDialog): + TITLE = "anki package import" + KIND = AnkiWebViewKind.IMPORT_ANKI_PACKAGE + TS_PAGE = "import-anki-package" + SETUP_FUNCTION_NAME = "setupImportAnkiPackagePage" diff --git a/qt/aqt/import_export/import_log_dialog.py b/qt/aqt/import_export/import_log_dialog.py index 55c12df1c..f93f1de3b 100644 --- a/qt/aqt/import_export/import_log_dialog.py +++ b/qt/aqt/import_export/import_log_dialog.py @@ -25,11 +25,6 @@ class _CommonArgs: return json.dumps(dataclasses.asdict(self)) -@dataclass -class ApkgArgs(_CommonArgs): - type = "apkg" - - @dataclass class JsonFileArgs(_CommonArgs): type = "json_file" @@ -48,7 +43,7 @@ class ImportLogDialog(QDialog): def __init__( self, mw: aqt.main.AnkiQt, - args: ApkgArgs | JsonFileArgs | JsonStringArgs, + args: JsonFileArgs | JsonStringArgs, ) -> None: QDialog.__init__(self, mw, Qt.WindowType.Window) self.mw = mw @@ -57,7 +52,7 @@ class ImportLogDialog(QDialog): def _setup_ui( self, - args: ApkgArgs | JsonFileArgs | JsonStringArgs, + args: JsonFileArgs | JsonStringArgs, ) -> None: self.setWindowModality(Qt.WindowModality.ApplicationModal) self.mw.garbage_collect_on_dialog_finish(self) diff --git a/qt/aqt/import_export/importing.py b/qt/aqt/import_export/importing.py index 0d228d7cb..a155630dc 100644 --- a/qt/aqt/import_export/importing.py +++ b/qt/aqt/import_export/importing.py @@ -13,9 +13,8 @@ from anki.collection import Collection, Progress from anki.errors import Interrupted from anki.foreign_data import mnemosyne from anki.lang import without_unicode_isolation -from aqt.import_export.import_csv_dialog import ImportCsvDialog +from aqt.import_export.import_dialog import ImportAnkiPackageDialog, ImportCsvDialog from aqt.import_export.import_log_dialog import ( - ApkgArgs, ImportLogDialog, JsonFileArgs, JsonStringArgs, @@ -88,7 +87,7 @@ class ApkgImporter(Importer): @staticmethod def do_import(mw: aqt.main.AnkiQt, path: str) -> None: - ImportLogDialog(mw, ApkgArgs(path=path)) + ImportAnkiPackageDialog(mw, path) class MnemosyneImporter(Importer): diff --git a/qt/aqt/mediasrv.py b/qt/aqt/mediasrv.py index b169023ec..33726de77 100644 --- a/qt/aqt/mediasrv.py +++ b/qt/aqt/mediasrv.py @@ -424,12 +424,10 @@ def set_scheduling_states() -> bytes: def import_done() -> bytes: def update_window_modality() -> None: if window := aqt.mw.app.activeWindow(): - from aqt.import_export.import_csv_dialog import ImportCsvDialog + from aqt.import_export.import_dialog import ImportDialog from aqt.import_export.import_log_dialog import ImportLogDialog - if isinstance(window, ImportCsvDialog) or isinstance( - window, ImportLogDialog - ): + if isinstance(window, (ImportDialog, ImportLogDialog)): window.hide() window.setWindowModality(Qt.WindowModality.NonModal) window.show() @@ -517,6 +515,7 @@ exposed_backend_list = [ "i18n_resources", # ImportExportService "get_csv_metadata", + "get_import_anki_package_presets", # NotesService "get_field_names", "get_note", diff --git a/qt/aqt/webview.py b/qt/aqt/webview.py index 19d834a77..9f100ac3f 100644 --- a/qt/aqt/webview.py +++ b/qt/aqt/webview.py @@ -249,6 +249,7 @@ class AnkiWebViewKind(Enum): FIND_DUPLICATES = "find duplicates" FIELDS = "fields" IMPORT_LOG = "import log" + IMPORT_ANKI_PACKAGE = "anki package import" class AnkiWebView(QWebEngineView): diff --git a/rslib/proto/rust.rs b/rslib/proto/rust.rs index c3af4fe77..93012874a 100644 --- a/rslib/proto/rust.rs +++ b/rslib/proto/rust.rs @@ -45,6 +45,10 @@ pub fn write_rust_protos(descriptors_path: PathBuf) -> Result { "CsvMetadata.MatchScope", "#[derive(serde::Deserialize, serde::Serialize)]", ) + .type_attribute( + "ImportAnkiPackageUpdateCondition", + "#[derive(serde::Deserialize, serde::Serialize)]", + ) .compile_protos(paths.as_slice(), &[proto_dir]) .context("prost build")?; diff --git a/rslib/src/config/bool.rs b/rslib/src/config/bool.rs index 82bf75fdc..e90235745 100644 --- a/rslib/src/config/bool.rs +++ b/rslib/src/config/bool.rs @@ -34,6 +34,8 @@ pub enum BoolKey { RandomOrderReposition, Sched2021, ShiftPositionOfExistingCards, + MergeNotetypes, + WithScheduling, #[strum(to_string = "normalize_note_text")] NormalizeNoteText, diff --git a/rslib/src/config/mod.rs b/rslib/src/config/mod.rs index 2133b2e30..4ed13cd17 100644 --- a/rslib/src/config/mod.rs +++ b/rslib/src/config/mod.rs @@ -21,6 +21,7 @@ pub use self::deck::DeckConfigKey; pub use self::notetype::get_aux_notetype_config_key; pub use self::number::I32ConfigKey; pub use self::string::StringKey; +use crate::import_export::package::UpdateCondition; use crate::prelude::*; /// Only used when updating/undoing. @@ -51,6 +52,8 @@ pub(crate) enum ConfigKey { LocalOffset, Rollover, Backups, + UpdateNotes, + UpdateNotetypes, #[strum(to_string = "timeLim")] AnswerTimeLimitSecs, @@ -286,6 +289,16 @@ impl Collection { pub(crate) fn set_backup_limits(&mut self, limits: BackupLimits) -> Result<()> { self.set_config(ConfigKey::Backups, &limits).map(|_| ()) } + + pub(crate) fn get_update_notes(&self) -> UpdateCondition { + self.get_config_optional(ConfigKey::UpdateNotes) + .unwrap_or_default() + } + + pub(crate) fn get_update_notetypes(&self) -> UpdateCondition { + self.get_config_optional(ConfigKey::UpdateNotetypes) + .unwrap_or_default() + } } // 2021 scheduler moves this into deck config diff --git a/rslib/src/image_occlusion/notetype.rs b/rslib/src/image_occlusion/notetype.rs index 497793aa1..2ed763088 100644 --- a/rslib/src/image_occlusion/notetype.rs +++ b/rslib/src/image_occlusion/notetype.rs @@ -43,7 +43,7 @@ impl Collection { } pub(crate) fn get_first_io_notetype(&mut self) -> Result>> { - for (_, nt) in self.get_all_notetypes()? { + for nt in self.get_all_notetypes()? { if nt.config.original_stock_kind() == OriginalStockKind::ImageOcclusion { return Some(io_notetype_if_valid(nt)).transpose(); } diff --git a/rslib/src/import_export/package/apkg/import/cards.rs b/rslib/src/import_export/package/apkg/import/cards.rs index 338213f0b..bba483f31 100644 --- a/rslib/src/import_export/package/apkg/import/cards.rs +++ b/rslib/src/import_export/package/apkg/import/cards.rs @@ -6,6 +6,7 @@ use std::collections::HashSet; use std::mem; use super::Context; +use super::TemplateMap; use crate::card::CardQueue; use crate::card::CardType; use crate::config::SchedulerVersion; @@ -19,6 +20,8 @@ struct CardContext<'a> { usn: Usn, imported_notes: &'a HashMap, + notetype_map: &'a HashMap, + remapped_templates: &'a HashMap, remapped_decks: &'a HashMap, /// The number of days the source collection is ahead of the target @@ -37,6 +40,8 @@ impl<'c> CardContext<'c> { days_elapsed: u32, target_col: &'a mut Collection, imported_notes: &'a HashMap, + notetype_map: &'a HashMap, + remapped_templates: &'a HashMap, imported_decks: &'a HashMap, ) -> Result { let existing_cards = target_col.storage.all_cards_as_nid_and_ord()?; @@ -47,6 +52,8 @@ impl<'c> CardContext<'c> { target_col, usn, imported_notes, + notetype_map, + remapped_templates, remapped_decks: imported_decks, existing_cards, collection_delta, @@ -68,6 +75,8 @@ impl Context<'_> { pub(super) fn import_cards_and_revlog( &mut self, imported_notes: &HashMap, + notetype_map: &HashMap, + remapped_templates: &HashMap, imported_decks: &HashMap, keep_filtered: bool, ) -> Result<()> { @@ -76,6 +85,8 @@ impl Context<'_> { self.data.days_elapsed, self.target_col, imported_notes, + notetype_map, + remapped_templates, imported_decks, )?; ctx.import_cards(mem::take(&mut self.data.cards), keep_filtered)?; @@ -122,6 +133,7 @@ impl CardContext<'_> { fn add_card(&mut self, card: &mut Card, keep_filtered: bool) -> Result<()> { card.usn = self.usn; self.remap_deck_ids(card); + self.remap_template_index(card); card.shift_collection_relative_dates(self.collection_delta); if !keep_filtered { card.maybe_remove_from_filtered_deck(self.scheduler_version); @@ -151,6 +163,16 @@ impl CardContext<'_> { card.original_deck_id = *did; } } + + fn remap_template_index(&self, card: &mut Card) { + card.template_idx = self + .notetype_map + .get(&card.note_id) + .and_then(|ntid| self.remapped_templates.get(ntid)) + .and_then(|map| map.get(&card.template_idx)) + .copied() + .unwrap_or(card.template_idx); + } } impl Card { diff --git a/rslib/src/import_export/package/apkg/import/mod.rs b/rslib/src/import_export/package/apkg/import/mod.rs index fcce79db3..f1093d9c4 100644 --- a/rslib/src/import_export/package/apkg/import/mod.rs +++ b/rslib/src/import_export/package/apkg/import/mod.rs @@ -21,8 +21,11 @@ use zip::ZipArchive; use super::super::meta::MetaExt; use crate::collection::CollectionBuilder; +use crate::config::ConfigKey; use crate::import_export::gather::ExchangeData; +use crate::import_export::package::ImportAnkiPackageOptions; use crate::import_export::package::Meta; +use crate::import_export::package::UpdateCondition; use crate::import_export::ImportProgress; use crate::import_export::NoteLog; use crate::media::MediaManager; @@ -30,8 +33,14 @@ use crate::prelude::*; use crate::progress::ThrottlingProgressHandler; use crate::search::SearchNode; +/// A map of old to new template indices for a given notetype. +type TemplateMap = std::collections::HashMap; + struct Context<'a> { target_col: &'a mut Collection, + merge_notetypes: bool, + update_notes: UpdateCondition, + update_notetypes: UpdateCondition, media_manager: MediaManager, archive: ZipArchive, meta: Meta, @@ -41,13 +50,21 @@ struct Context<'a> { } impl Collection { - pub fn import_apkg(&mut self, path: impl AsRef) -> Result> { + pub fn import_apkg( + &mut self, + path: impl AsRef, + options: ImportAnkiPackageOptions, + ) -> Result> { let file = open_file(path)?; let archive = ZipArchive::new(file)?; let progress = self.new_progress_handler(); self.transact(Op::Import, |col| { - let mut ctx = Context::new(archive, col, progress)?; + col.set_config(BoolKey::MergeNotetypes, &options.merge_notetypes)?; + col.set_config(BoolKey::WithScheduling, &options.with_scheduling)?; + col.set_config(ConfigKey::UpdateNotes, &options.update_notes())?; + col.set_config(ConfigKey::UpdateNotetypes, &options.update_notetypes())?; + let mut ctx = Context::new(archive, col, options, progress)?; ctx.import() }) } @@ -57,6 +74,7 @@ impl<'a> Context<'a> { fn new( mut archive: ZipArchive, target_col: &'a mut Collection, + options: ImportAnkiPackageOptions, mut progress: ThrottlingProgressHandler, ) -> Result { let media_manager = target_col.media()?; @@ -66,11 +84,14 @@ impl<'a> Context<'a> { &meta, SearchNode::WholeCollection, &mut progress, - true, + options.with_scheduling, )?; let usn = target_col.usn()?; Ok(Self { target_col, + merge_notetypes: options.merge_notetypes, + update_notes: options.update_notes(), + update_notetypes: options.update_notetypes(), media_manager, archive, meta, @@ -81,12 +102,24 @@ impl<'a> Context<'a> { } fn import(&mut self) -> Result { + let notetypes = self + .data + .notes + .iter() + .map(|n| (n.id, n.notetype_id)) + .collect(); let mut media_map = self.prepare_media()?; let note_imports = self.import_notes_and_notetypes(&mut media_map)?; let keep_filtered = self.data.enables_filtered_decks(); let contains_scheduling = self.data.contains_scheduling(); let imported_decks = self.import_decks_and_configs(keep_filtered, contains_scheduling)?; - self.import_cards_and_revlog(¬e_imports.id_map, &imported_decks, keep_filtered)?; + self.import_cards_and_revlog( + ¬e_imports.id_map, + ¬etypes, + ¬e_imports.remapped_templates, + &imported_decks, + keep_filtered, + )?; self.copy_media(&mut media_map)?; Ok(note_imports.log) } diff --git a/rslib/src/import_export/package/apkg/import/notes.rs b/rslib/src/import_export/package/apkg/import/notes.rs index d544a0089..bb2b3c42c 100644 --- a/rslib/src/import_export/package/apkg/import/notes.rs +++ b/rslib/src/import_export/package/apkg/import/notes.rs @@ -7,14 +7,14 @@ use std::collections::HashSet; use std::mem; use std::sync::Arc; -use sha1::Digest; -use sha1::Sha1; - use super::media::MediaUseMap; use super::Context; +use super::TemplateMap; use crate::import_export::package::media::safe_normalized_file_name; +use crate::import_export::package::UpdateCondition; use crate::import_export::ImportProgress; use crate::import_export::NoteLog; +use crate::notetype::ChangeNotetypeInput; use crate::prelude::*; use crate::progress::ThrottlingProgressHandler; use crate::text::replace_media_refs; @@ -24,15 +24,21 @@ struct NoteContext<'a> { usn: Usn, normalize_notes: bool, remapped_notetypes: HashMap, + remapped_fields: HashMap>>, target_guids: HashMap, target_ids: HashSet, + target_notetypes: Vec>, media_map: &'a mut MediaUseMap, + merge_notetypes: bool, + update_notes: UpdateCondition, + update_notetypes: UpdateCondition, imports: NoteImports, } #[derive(Debug, Default)] pub(super) struct NoteImports { pub(super) id_map: HashMap, + pub(super) remapped_templates: HashMap, /// All notes from the source collection as [Vec]s of their fields, and /// grouped by import result kind. pub(super) log: NoteLog, @@ -83,7 +89,14 @@ impl Context<'_> { &mut self, media_map: &mut MediaUseMap, ) -> Result { - let mut ctx = NoteContext::new(self.usn, self.target_col, media_map)?; + let mut ctx = NoteContext::new( + self.usn, + self.target_col, + media_map, + self.merge_notetypes, + self.update_notes, + self.update_notetypes, + )?; ctx.import_notetypes(mem::take(&mut self.data.notetypes))?; ctx.import_notes(mem::take(&mut self.data.notes), &mut self.progress)?; Ok(ctx.imports) @@ -95,26 +108,41 @@ impl<'n> NoteContext<'n> { usn: Usn, target_col: &'a mut Collection, media_map: &'a mut MediaUseMap, + merge_notetypes: bool, + update_notes: UpdateCondition, + update_notetypes: UpdateCondition, ) -> Result { let target_guids = target_col.storage.note_guid_map()?; let normalize_notes = target_col.get_config_bool(BoolKey::NormalizeNoteText); let target_ids = target_col.storage.get_all_note_ids()?; + let target_notetypes = target_col.get_all_notetypes()?; Ok(Self { target_col, usn, normalize_notes, remapped_notetypes: HashMap::new(), + remapped_fields: HashMap::new(), target_guids, target_ids, + target_notetypes, imports: NoteImports::default(), + merge_notetypes, + update_notes, + update_notetypes, media_map, }) } fn import_notetypes(&mut self, mut notetypes: Vec) -> Result<()> { for notetype in &mut notetypes { - if let Some(existing) = self.target_col.storage.get_notetype(notetype.id)? { - self.merge_or_remap_notetype(notetype, existing)?; + notetype.config.original_id.replace(notetype.id.0); + if let Some(nt) = self.get_target_notetype(notetype.id) { + let existing = nt.as_ref().clone(); + if self.merge_notetypes { + self.update_or_merge_notetype(notetype, existing)?; + } else { + self.update_or_duplicate_notetype(notetype, existing)?; + } } else { self.add_notetype(notetype)?; } @@ -122,21 +150,54 @@ impl<'n> NoteContext<'n> { Ok(()) } - fn merge_or_remap_notetype( + fn get_target_notetype(&self, ntid: NotetypeId) -> Option<&Arc> { + self.target_notetypes.iter().find(|nt| nt.id == ntid) + } + + fn update_or_duplicate_notetype( &mut self, incoming: &mut Notetype, - existing: Notetype, + mut existing: Notetype, ) -> Result<()> { - if incoming.schema_hash() == existing.schema_hash() { - if incoming.mtime_secs > existing.mtime_secs { - self.update_notetype(incoming, existing)?; + if !existing.equal_schema(incoming) { + if let Some(nt) = self.get_previously_duplicated_notetype(incoming) { + existing = nt; + self.remapped_notetypes.insert(incoming.id, existing.id); + incoming.id = existing.id; + } else { + return self.add_notetype_with_remapped_id(incoming); } - } else { - self.add_notetype_with_remapped_id(incoming)?; + } + if should_update( + self.update_notetypes, + existing.mtime_secs, + incoming.mtime_secs, + ) { + self.update_notetype(incoming, existing, false)?; } Ok(()) } + /// Try to find a notetype with matching original id and schema. + fn get_previously_duplicated_notetype(&self, original: &Notetype) -> Option { + self.target_notetypes + .iter() + .find(|nt| { + nt.id != original.id + && nt.config.original_id == Some(original.id.0) + && nt.equal_schema(original) + }) + .map(|nt| nt.as_ref().clone()) + } + + fn should_update_notetype(&self, existing: &Notetype, incoming: &Notetype) -> bool { + match self.update_notetypes { + UpdateCondition::IfNewer => existing.mtime_secs < incoming.mtime_secs, + UpdateCondition::Always => true, + UpdateCondition::Never => false, + } + } + fn add_notetype(&mut self, notetype: &mut Notetype) -> Result<()> { notetype.prepare_for_update(None, true)?; self.target_col @@ -146,12 +207,101 @@ impl<'n> NoteContext<'n> { .add_notetype_with_unique_id_undoable(notetype) } - fn update_notetype(&mut self, notetype: &mut Notetype, original: Notetype) -> Result<()> { - notetype.usn = self.usn; + fn update_notetype( + &mut self, + notetype: &mut Notetype, + original: Notetype, + modified: bool, + ) -> Result<()> { + if modified { + notetype.set_modified(self.usn); + notetype.prepare_for_update(Some(&original), true)?; + } else { + notetype.usn = self.usn; + } self.target_col .add_or_update_notetype_with_existing_id_inner(notetype, Some(original), self.usn, true) } + fn update_or_merge_notetype( + &mut self, + incoming: &mut Notetype, + mut existing: Notetype, + ) -> Result<()> { + let original_existing = existing.clone(); + // get and merge duplicated notetypes from previous no-merge imports + let mut siblings = self.get_sibling_notetypes(existing.id); + existing.merge_all(&siblings); + incoming.merge(&existing); + existing.merge(incoming); + self.record_remapped_ords(incoming); + let new_incoming = if self.should_update_notetype(&existing, incoming) { + // ords must be existing's as they are used to remap note fields and card + // template indices + incoming.copy_ords(&existing); + incoming + } else { + &mut existing + }; + self.update_notetype(new_incoming, original_existing, true)?; + self.drop_sibling_notetypes(new_incoming, &mut siblings) + } + + /// Get notetypes with different id, but matching original id. + fn get_sibling_notetypes(&mut self, original_id: NotetypeId) -> Vec { + self.target_notetypes + .iter() + .filter(|nt| nt.id != original_id && nt.config.original_id == Some(original_id.0)) + .map(|nt| nt.as_ref().clone()) + .collect() + } + + /// Removes the sibling notetypes, changing their notes' notetype to + /// `original`. This assumes `siblings` have already been merged into + /// `original`. + fn drop_sibling_notetypes( + &mut self, + original: &Notetype, + siblings: &mut [Notetype], + ) -> Result<()> { + for nt in siblings { + nt.merge(original); + let note_ids = self.target_col.search_notes_unordered(nt.id)?; + self.target_col + .change_notetype_of_notes_inner(ChangeNotetypeInput { + current_schema: self.target_col_schema_change()?, + note_ids, + old_notetype_name: nt.name.clone(), + old_notetype_id: nt.id, + new_notetype_id: original.id, + new_fields: nt.field_ords_vec(), + new_templates: Some(nt.template_ords_vec()), + })?; + self.target_col.remove_notetype_inner(nt.id)?; + } + Ok(()) + } + + fn target_col_schema_change(&self) -> Result { + self.target_col + .storage + .get_collection_timestamps() + .map(|ts| ts.schema_change) + } + + fn record_remapped_ords(&mut self, incoming: &Notetype) { + self.remapped_fields + .insert(incoming.id, incoming.field_ords().collect()); + self.imports.remapped_templates.insert( + incoming.id, + incoming + .template_ords() + .enumerate() + .filter_map(|(new, old)| old.map(|ord| (ord as u16, new as u16))) + .collect(), + ); + } + fn add_notetype_with_remapped_id(&mut self, notetype: &mut Notetype) -> Result<()> { let old_id = mem::take(&mut notetype.id); notetype.usn = self.usn; @@ -170,26 +320,10 @@ impl<'n> NoteContext<'n> { self.imports.log.found_notes = notes.len() as u32; for mut note in notes { incrementor.increment()?; - let remapped_notetype_id = self.remapped_notetypes.get(¬e.notetype_id); + self.remap_notetype_and_fields(&mut note); if let Some(existing_note) = self.target_guids.get(¬e.guid) { - if existing_note.mtime < note.mtime { - if existing_note.notetype_id != note.notetype_id - || remapped_notetype_id.is_some() - { - // Existing GUID with different notetype id, or changed notetype schema - self.imports.log_conflicting(note); - } else { - self.update_note(note, existing_note.id)?; - } - } else { - self.imports.log_duplicate(note, existing_note.id); - } + self.maybe_update_existing_note(*existing_note, note)?; } else { - if let Some(remapped_ntid) = remapped_notetype_id { - // Notetypes have diverged, but this is a new note, so we can import - // with a new notetype id. - note.notetype_id = *remapped_ntid; - } self.add_note(note)?; } } @@ -197,6 +331,29 @@ impl<'n> NoteContext<'n> { Ok(()) } + fn remap_notetype_and_fields(&mut self, note: &mut Note) { + if let Some(new_ords) = self.remapped_fields.get(¬e.notetype_id) { + note.reorder_fields(new_ords); + } + if let Some(remapped_ntid) = self.remapped_notetypes.get(¬e.notetype_id) { + note.notetype_id = *remapped_ntid; + } + } + + fn maybe_update_existing_note(&mut self, existing: NoteMeta, incoming: Note) -> Result<()> { + if incoming.notetype_id != existing.notetype_id { + // notetype of existing note has changed, or notetype of incoming note has been + // remapped due to a schema conflict + self.imports.log_conflicting(incoming); + } else if should_update(self.update_notes, existing.mtime, incoming.mtime) { + self.update_note(incoming, existing.id)?; + } else { + // TODO: might still want to update merged in fields + self.imports.log_duplicate(incoming, existing.id); + } + Ok(()) + } + fn add_note(&mut self, mut note: Note) -> Result<()> { self.munge_media(&mut note)?; self.target_col.canonify_note_tags(&mut note, self.usn)?; @@ -274,23 +431,74 @@ impl<'n> NoteContext<'n> { } } +fn should_update( + cond: UpdateCondition, + existing_mtime: TimestampSecs, + incoming_mtime: TimestampSecs, +) -> bool { + match cond { + UpdateCondition::IfNewer => existing_mtime < incoming_mtime, + UpdateCondition::Always => existing_mtime != incoming_mtime, + UpdateCondition::Never => false, + } +} + impl Notetype { - fn schema_hash(&self) -> Sha1Hash { - let mut hasher = Sha1::new(); - for field in &self.fields { - hasher.update(field.name.as_bytes()); + pub(crate) fn field_ords(&self) -> impl Iterator> + '_ { + self.fields.iter().map(|f| f.ord) + } + + pub(crate) fn template_ords(&self) -> impl Iterator> + '_ { + self.templates.iter().map(|t| t.ord) + } + + fn field_ords_vec(&self) -> Vec> { + self.field_ords() + .map(|opt| opt.map(|u| u as usize)) + .collect() + } + + fn template_ords_vec(&self) -> Vec> { + self.template_ords() + .map(|opt| opt.map(|u| u as usize)) + .collect() + } + + fn equal_schema(&self, other: &Self) -> bool { + self.fields.len() == other.fields.len() + && self.templates.len() == other.templates.len() + && self + .fields + .iter() + .zip(other.fields.iter()) + .all(|(f1, f2)| f1.is_match(f2)) + && self + .templates + .iter() + .zip(other.templates.iter()) + .all(|(t1, t2)| t1.is_match(t2)) + } + + fn copy_ords(&mut self, other: &Self) { + for (field, other_ord) in self.fields.iter_mut().zip(other.field_ords()) { + field.ord = other_ord; } - for template in &self.templates { - hasher.update(template.name.as_bytes()); + for (template, other_ord) in self.templates.iter_mut().zip(other.template_ords()) { + template.ord = other_ord; } - hasher.finalize().into() } } #[cfg(test)] mod test { + use anki_proto::import_export::ImportAnkiPackageOptions; + use tempfile::TempDir; + use super::*; + use crate::collection::CollectionBuilder; use crate::import_export::package::media::SafeMediaEntry; + use crate::notetype::CardTemplate; + use crate::notetype::NoteField; /// Import [Note] into [Collection], optionally taking a [MediaUseMap], /// or a [Notetype] remapping. @@ -298,14 +506,30 @@ mod test { ($col:expr, $note:expr, $old_notetype:expr => $new_notetype:expr) => {{ let mut media_map = MediaUseMap::default(); let mut progress = $col.new_progress_handler(); - let mut ctx = NoteContext::new(Usn(1), &mut $col, &mut media_map).unwrap(); + let mut ctx = NoteContext::new( + Usn(1), + &mut $col, + &mut media_map, + false, + UpdateCondition::IfNewer, + UpdateCondition::IfNewer, + ) + .unwrap(); ctx.remapped_notetypes.insert($old_notetype, $new_notetype); ctx.import_notes(vec![$note], &mut progress).unwrap(); ctx.imports.log }}; ($col:expr, $note:expr, $media_map:expr) => {{ let mut progress = $col.new_progress_handler(); - let mut ctx = NoteContext::new(Usn(1), &mut $col, &mut $media_map).unwrap(); + let mut ctx = NoteContext::new( + Usn(1), + &mut $col, + &mut $media_map, + false, + UpdateCondition::IfNewer, + UpdateCondition::IfNewer, + ) + .unwrap(); ctx.import_notes(vec![$note], &mut progress).unwrap(); ctx.imports.log }}; @@ -327,6 +551,38 @@ mod test { }; } + struct Remappings { + remapped_notetypes: HashMap, + remapped_fields: HashMap>>, + remapped_templates: HashMap, + } + + /// Imports the notetype into the collection, and returns its remapped id if + /// any. + macro_rules! import_notetype { + ($col:expr, $notetype:expr) => {{ + import_notetype!($col, $notetype, merge = false) + }}; + ($col:expr, $notetype:expr, merge = $merge:expr) => {{ + let mut media_map = MediaUseMap::default(); + let mut ctx = NoteContext::new( + Usn(1), + $col, + &mut media_map, + $merge, + UpdateCondition::IfNewer, + UpdateCondition::IfNewer, + ) + .unwrap(); + ctx.import_notetypes(vec![$notetype]).unwrap(); + Remappings { + remapped_notetypes: ctx.remapped_notetypes, + remapped_fields: ctx.remapped_fields, + remapped_templates: ctx.imports.remapped_templates, + } + }}; + } + impl Collection { fn note_id_for_guid(&self, guid: &str) -> NoteId { self.storage @@ -336,6 +592,16 @@ mod test { } } + impl Notetype { + pub(crate) fn field_names(&self) -> impl Iterator { + self.fields.iter().map(|f| &f.name) + } + + pub(crate) fn template_names(&self) -> impl Iterator { + self.templates.iter().map(|t| &t.name) + } + } + #[test] fn should_add_note_with_new_id_if_guid_is_unique_and_id_is_not() { let mut col = Collection::new(); @@ -403,11 +669,10 @@ mod test { let mut col = Collection::new(); let basic_ntid = col.get_notetype_by_name("basic").unwrap().unwrap().id; let mut note = NoteAdder::basic(&mut col).add(&mut col); - note.notetype_id.0 = 123; note.mtime.0 += 1; note.fields_mut()[0] = "updated".to_string(); - let mut log = import_note!(col, note, NotetypeId(123) => basic_ntid); + let mut log = import_note!(col, note, basic_ntid => NotetypeId(123)); assert_eq!(col.get_all_notes()[0].fields()[0], ""); assert_note_logged!(log, conflicting, &["updated", ""]); } @@ -426,4 +691,205 @@ mod test { assert_eq!(col.get_all_notes()[0].fields()[0], ""); assert_note_logged!(log, new, &[" bar.jpg ", ""]); } + + #[test] + fn should_import_new_notetype() { + let mut col = Collection::new(); + let mut new_basic = crate::notetype::stock::basic(&col.tr); + new_basic.id.0 = 123; + import_notetype!(&mut col, new_basic); + assert!(col.storage.get_notetype(NotetypeId(123)).unwrap().is_some()); + } + + #[test] + fn should_update_existing_notetype_with_older_mtime_and_matching_schema() { + let mut col = Collection::new(); + let mut basic = col.basic_notetype(); + basic.mtime_secs.0 += 1; + basic.name = String::from("new"); + import_notetype!(&mut col, basic); + assert!(col.get_notetype_by_name("new").unwrap().is_some()); + } + + #[test] + fn should_not_update_existing_notetype_with_newer_mtime_and_matching_schema() { + let mut col = Collection::new(); + let mut basic = col.basic_notetype(); + basic.mtime_secs.0 -= 1; + basic.name = String::from("new"); + import_notetype!(&mut col, basic); + assert!(col.get_notetype_by_name("new").unwrap().is_none()); + } + + #[test] + fn should_rename_field_with_matching_id_without_schema_change() { + let mut col = Collection::new(); + let mut to_import = col.basic_notetype(); + to_import.fields[0].name = String::from("renamed"); + to_import.mtime_secs.0 += 1; + import_notetype!(&mut col, to_import); + assert_eq!(col.basic_notetype().fields[0].name, "renamed"); + } + + #[test] + fn should_add_remapped_notetype_if_schema_has_changed_and_reuse_it_subsequently() { + let mut col = Collection::new(); + let mut to_import = col.basic_notetype(); + to_import.fields[0].name = String::from("new field"); + // clear id or schemas would still match + to_import.fields[0].config.id.take(); + + // schema mismatch => notetype should be imported with new id + let out = import_notetype!(&mut col, to_import.clone()); + let remapped_id = *out.remapped_notetypes.values().next().unwrap(); + assert_eq!(col.basic_notetype().fields[0].name, "Front"); + let remapped = col.storage.get_notetype(remapped_id).unwrap().unwrap(); + assert_eq!(remapped.fields[0].name, "new field"); + + // notetype with matching schema and original id exists => should be reused + to_import.name = String::from("new name"); + to_import.mtime_secs.0 = remapped.mtime_secs.0 + 1; + let out_2 = import_notetype!(&mut col, to_import); + let remapped_id_2 = *out_2.remapped_notetypes.values().next().unwrap(); + assert_eq!(remapped_id, remapped_id_2); + let updated = col.storage.get_notetype(remapped_id).unwrap().unwrap(); + assert_eq!(updated.name, "new name"); + } + + #[test] + fn should_merge_notetype_fields() { + let mut col = Collection::new(); + let mut to_import = col.basic_notetype(); + to_import.mtime_secs.0 += 1; + to_import.fields.remove(0); + to_import.fields[0].name = String::from("renamed"); + to_import.fields[0].ord.replace(0); + to_import.fields.push(NoteField::new("new")); + to_import.fields[1].ord.replace(1); + + let fields = import_notetype!(&mut col, to_import.clone(), merge = true).remapped_fields; + // Front field is preserved and new field added + assert!(col + .basic_notetype() + .field_names() + .eq(["Front", "renamed", "new"])); + // extra field must be inserted into incoming notes + assert_eq!( + fields.get(&to_import.id).unwrap(), + &[None, Some(0), Some(1)] + ); + } + + #[test] + fn should_merge_notetype_templates() { + let mut col = Collection::new(); + let mut to_import = col.basic_rev_notetype(); + to_import.mtime_secs.0 += 1; + to_import.templates.remove(0); + to_import.templates[0].name = String::from("renamed"); + to_import.templates[0].ord.replace(0); + to_import.templates.push(CardTemplate::new("new", "", "")); + to_import.templates[1].ord.replace(1); + + let templates = + import_notetype!(&mut col, to_import.clone(), merge = true).remapped_templates; + // Card 1 is preserved and new template added + assert!(col + .basic_rev_notetype() + .template_names() + .eq(["Card 1", "renamed", "new"])); + // templates must be shifted accordingly + let map = templates.get(&to_import.id).unwrap(); + assert_eq!(map.get(&0), Some(&1)); + assert_eq!(map.get(&1), Some(&2)); + } + + #[test] + fn should_merge_notetype_duplicates_from_previous_imports() { + let mut col = Collection::new(); + let mut incoming = col.basic_notetype(); + incoming.fields.push(NoteField::new("new incoming")); + // simulate a notetype duplicated during previous import + let mut remapped = col.basic_notetype(); + remapped.config.original_id.replace(incoming.id.0); + // ... which was modified and has notes + remapped.fields.push(NoteField::new("new remapped")); + remapped.id.0 = 0; + col.add_notetype_inner(&mut remapped, Usn(0), true).unwrap(); + let mut note = Note::new(&remapped); + *note.fields_mut() = vec![ + String::from("front"), + String::from("back"), + String::from("new"), + ]; + col.add_note(&mut note, DeckId(1)).unwrap(); + + let ntid = incoming.id; + import_notetype!(&mut col, incoming, merge = true); + + // both notetypes should have been merged into it + assert!(col.get_notetype(ntid).unwrap().unwrap().field_names().eq([ + "Front", + "Back", + "new remapped", + "new incoming", + ])); + assert!(col.get_all_notes()[0] + .fields() + .iter() + .eq(["front", "back", "new", ""])) + } + + #[test] + fn reimport_with_merge_enabled_should_handle_duplicates() -> Result<()> { + // import from src to dst + let mut src = Collection::new(); + NoteAdder::basic(&mut src) + .fields(&["foo", "bar"]) + .add(&mut src); + let temp_dir = TempDir::new()?; + let path = temp_dir.path().join("foo.apkg"); + src.export_apkg(&path, "", false, false, false, None)?; + + let mut dst = CollectionBuilder::new(temp_dir.path().join("dst.anki2")) + .with_desktop_media_paths() + .build()?; + dst.import_apkg(&path, ImportAnkiPackageOptions::default())?; + + // add a field to src + let mut nt = src.basic_notetype(); + nt.fields.push(NoteField::new("new incoming")); + src.update_notetype(&mut nt, false)?; + + // importing again with merge enabled will fail, and add an empty notetype + assert_eq!(dst.storage.get_all_notetype_names().unwrap().len(), 6); + src.export_apkg(&path, "", false, false, false, None)?; + assert_eq!( + dst.import_apkg(&path, ImportAnkiPackageOptions::default())? + .output + .conflicting + .len(), + 1 + ); + assert_eq!(dst.storage.get_all_notetype_names().unwrap().len(), 7); + + // if enabling merge, it should succeed and remove the empty notetype + src.export_apkg(&path, "", false, false, false, None)?; + assert_eq!( + dst.import_apkg( + &path, + ImportAnkiPackageOptions { + merge_notetypes: true, + ..Default::default() + } + )? + .output + .conflicting + .len(), + 0 + ); + assert_eq!(dst.storage.get_all_notetype_names().unwrap().len(), 6); + + Ok(()) + } } diff --git a/rslib/src/import_export/package/apkg/tests.rs b/rslib/src/import_export/package/apkg/tests.rs index 2553334a1..47b322722 100644 --- a/rslib/src/import_export/package/apkg/tests.rs +++ b/rslib/src/import_export/package/apkg/tests.rs @@ -8,6 +8,7 @@ use std::fs::File; use std::io::Write; use anki_io::read_file; +use anki_proto::import_export::ImportAnkiPackageOptions; use crate::media::files::sha1_of_data; use crate::media::MediaManager; @@ -50,7 +51,9 @@ fn roundtrip_inner(legacy: bool) { None, ) .unwrap(); - target_col.import_apkg(&apkg_path).unwrap(); + target_col + .import_apkg(&apkg_path, ImportAnkiPackageOptions::default()) + .unwrap(); target_col.assert_decks(); target_col.assert_notetype(¬etype); diff --git a/rslib/src/import_export/package/mod.rs b/rslib/src/import_export/package/mod.rs index 823cf0633..b99a38ddf 100644 --- a/rslib/src/import_export/package/mod.rs +++ b/rslib/src/import_export/package/mod.rs @@ -7,6 +7,8 @@ mod media; mod meta; use anki_proto::import_export::media_entries::MediaEntry; +pub use anki_proto::import_export::ImportAnkiPackageOptions; +pub use anki_proto::import_export::ImportAnkiPackageUpdateCondition as UpdateCondition; use anki_proto::import_export::MediaEntries; pub(crate) use apkg::NoteMeta; pub(crate) use colpkg::export::export_colpkg_from_data; diff --git a/rslib/src/import_export/service.rs b/rslib/src/import_export/service.rs index 2fe89de4f..c51c59906 100644 --- a/rslib/src/import_export/service.rs +++ b/rslib/src/import_export/service.rs @@ -4,23 +4,33 @@ use anki_proto::generic; use anki_proto::import_export::import_response::Log as NoteLog; use anki_proto::import_export::ExportLimit; -use crate::collection::Collection; -use crate::error; -use crate::ops::OpOutput; +use crate::prelude::*; use crate::search::SearchNode; impl crate::services::ImportExportService for Collection { fn import_anki_package( &mut self, input: anki_proto::import_export::ImportAnkiPackageRequest, - ) -> error::Result { - self.import_apkg(&input.package_path).map(Into::into) + ) -> Result { + self.import_apkg(&input.package_path, input.options.unwrap_or_default()) + .map(Into::into) + } + + fn get_import_anki_package_presets( + &mut self, + ) -> Result { + Ok(anki_proto::import_export::ImportAnkiPackageOptions { + merge_notetypes: self.get_config_bool(BoolKey::MergeNotetypes), + with_scheduling: self.get_config_bool(BoolKey::WithScheduling), + update_notes: self.get_update_notes() as i32, + update_notetypes: self.get_update_notetypes() as i32, + }) } fn export_anki_package( &mut self, input: anki_proto::import_export::ExportAnkiPackageRequest, - ) -> error::Result { + ) -> Result { self.export_apkg( &input.out_path, SearchNode::from(input.limit.unwrap_or_default()), @@ -35,7 +45,7 @@ impl crate::services::ImportExportService for Collection { fn get_csv_metadata( &mut self, input: anki_proto::import_export::CsvMetadataRequest, - ) -> error::Result { + ) -> Result { let delimiter = input.delimiter.is_some().then(|| input.delimiter()); self.get_csv_metadata( @@ -50,7 +60,7 @@ impl crate::services::ImportExportService for Collection { fn import_csv( &mut self, input: anki_proto::import_export::ImportCsvRequest, - ) -> error::Result { + ) -> Result { self.import_csv(&input.path, input.metadata.unwrap_or_default()) .map(Into::into) } @@ -58,14 +68,14 @@ impl crate::services::ImportExportService for Collection { fn export_note_csv( &mut self, input: anki_proto::import_export::ExportNoteCsvRequest, - ) -> error::Result { + ) -> Result { self.export_note_csv(input).map(Into::into) } fn export_card_csv( &mut self, input: anki_proto::import_export::ExportCardCsvRequest, - ) -> error::Result { + ) -> Result { self.export_card_csv( &input.out_path, SearchNode::from(input.limit.unwrap_or_default()), @@ -77,14 +87,14 @@ impl crate::services::ImportExportService for Collection { fn import_json_file( &mut self, input: generic::String, - ) -> error::Result { + ) -> Result { self.import_json_file(&input.val).map(Into::into) } fn import_json_string( &mut self, input: generic::String, - ) -> error::Result { + ) -> Result { self.import_json_string(&input.val).map(Into::into) } } diff --git a/rslib/src/media/check.rs b/rslib/src/media/check.rs index b7e6d38b6..13130f344 100644 --- a/rslib/src/media/check.rs +++ b/rslib/src/media/check.rs @@ -350,9 +350,12 @@ impl MediaChecker<'_> { for nid in nids { self.increment_progress()?; let mut note = self.col.storage.get_note(nid)?.unwrap(); - let nt = notetypes.get(¬e.notetype_id).ok_or_else(|| { - AnkiError::db_error("missing note type", DbErrorKind::MissingEntity) - })?; + let nt = notetypes + .iter() + .find(|nt| nt.id == note.notetype_id) + .ok_or_else(|| { + AnkiError::db_error("missing note type", DbErrorKind::MissingEntity) + })?; let mut tracker = |fname| { referenced_files .entry(fname) diff --git a/rslib/src/notetype/checks.rs b/rslib/src/notetype/checks.rs index 8a74e5d6f..b53ed3c3d 100644 --- a/rslib/src/notetype/checks.rs +++ b/rslib/src/notetype/checks.rs @@ -31,7 +31,7 @@ lazy_static! { impl Collection { pub fn report_media_field_referencing_templates(&mut self, buf: &mut String) -> Result<()> { let notetypes = self.get_all_notetypes()?; - let templates = media_field_referencing_templates(notetypes.values().map(Deref::deref)); + let templates = media_field_referencing_templates(notetypes.iter().map(Deref::deref)); write_template_report(buf, &templates, &self.tr); Ok(()) } diff --git a/rslib/src/notetype/emptycards.rs b/rslib/src/notetype/emptycards.rs index ab875622b..eb26e80c8 100644 --- a/rslib/src/notetype/emptycards.rs +++ b/rslib/src/notetype/emptycards.rs @@ -77,7 +77,7 @@ impl Collection { let mut buf = String::new(); for (ntid, notes) in empty { if !notes.is_empty() { - let nt = nts.get(ntid).unwrap(); + let nt = nts.iter().find(|nt| nt.id == *ntid).unwrap(); write!( buf, "
{}
    ", diff --git a/rslib/src/notetype/fields.rs b/rslib/src/notetype/fields.rs index ec1a4c875..6aaebb784 100644 --- a/rslib/src/notetype/fields.rs +++ b/rslib/src/notetype/fields.rs @@ -38,6 +38,7 @@ impl NoteField { ord: None, name: name.into(), config: NoteFieldConfig { + id: Some(rand::random()), sticky: false, rtl: false, plain_text: false, diff --git a/rslib/src/notetype/merge.rs b/rslib/src/notetype/merge.rs new file mode 100644 index 000000000..f88be64ab --- /dev/null +++ b/rslib/src/notetype/merge.rs @@ -0,0 +1,170 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use super::CardTemplate; +use crate::notetype::NoteField; +use crate::prelude::*; + +impl Notetype { + /// Inserts not yet existing fields ands templates from `other`. + pub(crate) fn merge(&mut self, other: &Self) { + self.merge_fields(other); + self.merge_templates(other); + } + + pub(crate) fn merge_all<'a>(&mut self, others: impl IntoIterator) { + for other in others { + self.merge(other); + } + } + + /// Inserts not yet existing fields from `other`. + fn merge_fields(&mut self, other: &Self) { + for (index, field) in other.fields.iter().enumerate() { + match self.find_field(field) { + Some(i) if i == index => (), + Some(i) => self.fields.swap(i, index), + None => { + let mut missing = field.clone(); + missing.ord.take(); + self.fields.insert(index, missing); + } + } + } + } + + fn find_field(&self, like: &NoteField) -> Option { + self.fields + .iter() + .enumerate() + .find_map(|(i, f)| f.is_match(like).then_some(i)) + } + + /// Inserts not yet existing templates from `other`. + fn merge_templates(&mut self, other: &Self) { + for (index, template) in other.templates.iter().enumerate() { + match self.find_template(template) { + Some(i) if i == index => (), + Some(i) => self.templates.swap(i, index), + None => { + let mut missing = template.clone(); + missing.ord.take(); + self.templates.insert(index, missing); + } + } + } + } + + fn find_template(&self, like: &CardTemplate) -> Option { + self.templates + .iter() + .enumerate() + .find_map(|(i, t)| t.is_match(like).then_some(i)) + } +} + +impl NoteField { + /// True if both ids are identical, but not [None], or at least one id is + /// [None] and the names are identical. + pub(crate) fn is_match(&self, other: &Self) -> bool { + if let (Some(id), Some(other_id)) = (self.config.id, other.config.id) { + id == other_id + } else { + self.name == other.name + } + } +} + +impl CardTemplate { + /// True if both ids are identical, but not [None], or at least one id is + /// [None] and the names are identical. + pub(crate) fn is_match(&self, other: &Self) -> bool { + if let (Some(id), Some(other_id)) = (self.config.id, other.config.id) { + id == other_id + } else { + self.name == other.name + } + } +} + +#[cfg(test)] +mod test { + use itertools::assert_equal; + + use super::*; + use crate::notetype::stock; + + impl Notetype { + fn field_ids(&self) -> impl Iterator> + '_ { + self.fields.iter().map(|field| field.config.id) + } + + fn template_ids(&self) -> impl Iterator> + '_ { + self.templates.iter().map(|template| template.config.id) + } + } + + #[test] + fn merge_new_fields() { + let mut basic = stock::basic(&I18n::template_only()); + let mut other = basic.clone(); + other.add_field("with id"); + other.add_field("without id"); + other.fields[3].config.id.take(); + basic.merge(&other); + assert_equal(basic.field_ids(), other.field_ids()); + assert_equal(basic.field_names(), other.field_names()); + } + + #[test] + fn skip_merging_field_with_existing_id() { + let mut basic = stock::basic(&I18n::template_only()); + let mut other = basic.clone(); + other.fields[1].name = String::from("renamed"); + basic.merge(&other); + assert_equal(basic.field_ids(), other.field_ids()); + assert_equal(basic.field_names(), ["Front", "Back"].iter()); + } + + #[test] + fn align_field_order() { + let mut basic = stock::basic(&I18n::template_only()); + let mut other = basic.clone(); + other.fields.swap(0, 1); + basic.merge(&other); + assert_equal(basic.field_ids(), other.field_ids()); + assert_equal(basic.field_names(), other.field_names()); + } + + #[test] + fn merge_new_templates() { + let mut basic = stock::basic(&I18n::template_only()); + let mut other = basic.clone(); + other.add_template("with id", "", ""); + other.add_template("without id", "", ""); + other.templates[2].config.id.take(); + basic.merge(&other); + assert_equal(basic.template_ids(), other.template_ids()); + assert_equal(basic.template_names(), other.template_names()); + } + + #[test] + fn skip_merging_template_with_existing_id() { + let mut basic = stock::basic(&I18n::template_only()); + let mut other = basic.clone(); + other.templates[0].name = String::from("renamed"); + basic.merge(&other); + assert_equal(basic.template_ids(), other.template_ids()); + assert_equal(basic.template_names(), std::iter::once("Card 1")); + } + + #[test] + fn align_template_order() { + let mut basic_rev = stock::basic_forward_reverse(&I18n::template_only()); + let mut other = basic_rev.clone(); + other.templates.swap(0, 1); + basic_rev.merge(&other); + assert_equal(basic_rev.template_ids(), other.template_ids()); + assert_equal(basic_rev.template_names(), other.template_names()); + } +} diff --git a/rslib/src/notetype/mod.rs b/rslib/src/notetype/mod.rs index d2f3f2af6..547999972 100644 --- a/rslib/src/notetype/mod.rs +++ b/rslib/src/notetype/mod.rs @@ -5,6 +5,7 @@ mod cardgen; mod checks; mod emptycards; mod fields; +mod merge; mod notetypechange; mod render; mod restore; @@ -213,16 +214,11 @@ impl Collection { } } - pub fn get_all_notetypes(&mut self) -> Result>> { + pub fn get_all_notetypes(&mut self) -> Result>> { self.storage - .get_all_notetype_names()? + .get_all_notetype_ids()? .into_iter() - .map(|(ntid, _)| { - self.get_notetype(ntid) - .transpose() - .unwrap() - .map(|nt| (ntid, nt)) - }) + .filter_map(|ntid| self.get_notetype(ntid).transpose()) .collect() } @@ -718,7 +714,7 @@ impl Collection { Ok(()) } - fn remove_notetype_inner(&mut self, ntid: NotetypeId) -> Result<()> { + pub(crate) fn remove_notetype_inner(&mut self, ntid: NotetypeId) -> Result<()> { let notetype = if let Some(notetype) = self.storage.get_notetype(ntid)? { notetype } else { diff --git a/rslib/src/notetype/notetypechange.rs b/rslib/src/notetype/notetypechange.rs index 5bc7e43f6..35ca21e75 100644 --- a/rslib/src/notetype/notetypechange.rs +++ b/rslib/src/notetype/notetypechange.rs @@ -209,7 +209,10 @@ fn default_field_map(current_notetype: &Notetype, new_notetype: &Notetype) -> Ve } impl Collection { - fn change_notetype_of_notes_inner(&mut self, input: ChangeNotetypeInput) -> Result<()> { + pub(crate) fn change_notetype_of_notes_inner( + &mut self, + input: ChangeNotetypeInput, + ) -> Result<()> { require!( input.current_schema == self.storage.get_collection_timestamps()?.schema_change, "schema changed" diff --git a/rslib/src/notetype/schema11.rs b/rslib/src/notetype/schema11.rs index ec61550ac..669af7ab7 100644 --- a/rslib/src/notetype/schema11.rs +++ b/rslib/src/notetype/schema11.rs @@ -64,6 +64,8 @@ pub struct NotetypeSchema11 { pub(crate) req: CardRequirementsSchema11, #[serde(default, skip_serializing_if = "is_default")] pub(crate) original_stock_kind: i32, + #[serde(default, skip_serializing_if = "is_default")] + pub(crate) original_id: Option, #[serde(flatten)] pub(crate) other: HashMap, } @@ -109,6 +111,7 @@ impl From for Notetype { latex_svg: nt.latexsvg, reqs: nt.req.0.into_iter().map(Into::into).collect(), original_stock_kind: nt.original_stock_kind, + original_id: nt.original_id, other: other_to_bytes(&nt.other), }, fields: nt.flds.into_iter().map(Into::into).collect(), @@ -172,6 +175,7 @@ impl From for NotetypeSchema11 { latexsvg: c.latex_svg, req: CardRequirementsSchema11(c.reqs.into_iter().map(Into::into).collect()), original_stock_kind: c.original_stock_kind, + original_id: c.original_id, other: parse_other_fields(&c.other, &RESERVED_NOTETYPE_KEYS), } } @@ -249,6 +253,9 @@ pub struct NoteFieldSchema11 { #[serde(default, deserialize_with = "default_on_invalid")] pub(crate) exclude_from_search: bool, + #[serde(default, deserialize_with = "default_on_invalid")] + pub(crate) id: Option, + #[serde(flatten)] pub(crate) other: HashMap, } @@ -266,6 +273,7 @@ impl Default for NoteFieldSchema11 { description: String::new(), collapsed: false, exclude_from_search: false, + id: None, other: Default::default(), } } @@ -285,6 +293,7 @@ impl From for NoteField { description: f.description, collapsed: f.collapsed, exclude_from_search: f.exclude_from_search, + id: f.id, other: other_to_bytes(&f.other), }, } @@ -305,6 +314,7 @@ impl From for NoteFieldSchema11 { description: conf.description, collapsed: conf.collapsed, exclude_from_search: conf.exclude_from_search, + id: conf.id, other: parse_other_fields(&conf.other, &RESERVED_FIELD_KEYS), } } @@ -321,6 +331,7 @@ static RESERVED_FIELD_KEYS: Set<&'static str> = phf_set! { "collapsed", "description", "excludeFromSearch", + "id", }; #[derive(Serialize, Deserialize, Debug, Default, Clone)] @@ -340,6 +351,8 @@ pub struct CardTemplateSchema11 { pub(crate) bfont: String, #[serde(default, deserialize_with = "default_on_invalid")] pub(crate) bsize: u8, + #[serde(default, deserialize_with = "default_on_invalid")] + pub(crate) id: Option, #[serde(flatten)] pub(crate) other: HashMap, } @@ -359,6 +372,7 @@ impl From for CardTemplate { target_deck_id: t.did.unwrap_or(DeckId(0)).0, browser_font_name: t.bfont, browser_font_size: t.bsize as u32, + id: t.id, other: other_to_bytes(&t.other), }, } @@ -382,6 +396,7 @@ impl From for CardTemplateSchema11 { }, bfont: conf.browser_font_name, bsize: conf.browser_font_size as u8, + id: conf.id, other: parse_other_fields(&conf.other, &RESERVED_TEMPLATE_KEYS), } } @@ -397,6 +412,7 @@ static RESERVED_TEMPLATE_KEYS: Set<&'static str> = phf_set! { "bqfmt", "bfont", "bsize", + "id", }; #[cfg(test)] diff --git a/rslib/src/notetype/schemachange.rs b/rslib/src/notetype/schemachange.rs index 3013e48e2..c253afd62 100644 --- a/rslib/src/notetype/schemachange.rs +++ b/rslib/src/notetype/schemachange.rs @@ -4,6 +4,7 @@ //! Updates to notes/cards when the structure of a notetype is changed. use std::collections::HashMap; +use std::mem; use super::CardGenContext; use super::CardTemplate; @@ -102,20 +103,7 @@ impl Collection { for nid in nids { let mut note = self.storage.get_note(nid)?.unwrap(); let original = note.clone(); - *note.fields_mut() = ords - .iter() - .map(|f| { - if let Some(idx) = f { - note.fields() - .get(*idx as usize) - .map(AsRef::as_ref) - .unwrap_or("") - } else { - "" - } - }) - .map(Into::into) - .collect(); + note.reorder_fields(&ords); self.update_note_inner_without_cards( &mut note, &original, @@ -192,6 +180,19 @@ impl Notetype { } } +impl Note { + pub(crate) fn reorder_fields(&mut self, new_ords: &[Option]) { + *self.fields_mut() = new_ords + .iter() + .map(|ord| { + ord.and_then(|idx| self.fields_mut().get_mut(idx as usize)) + .map(mem::take) + .unwrap_or_default() + }) + .collect(); + } +} + #[cfg(test)] mod test { use super::*; diff --git a/rslib/src/notetype/templates.rs b/rslib/src/notetype/templates.rs index b75fbb8e0..aba9afb7a 100644 --- a/rslib/src/notetype/templates.rs +++ b/rslib/src/notetype/templates.rs @@ -86,6 +86,7 @@ impl CardTemplate { mtime_secs: TimestampSecs(0), usn: Usn(0), config: CardTemplateConfig { + id: Some(rand::random()), q_format: qfmt.into(), a_format: afmt.into(), q_format_browser: "".into(), diff --git a/rslib/src/search/sqlwriter.rs b/rslib/src/search/sqlwriter.rs index d13374fa7..9808b5d64 100644 --- a/rslib/src/search/sqlwriter.rs +++ b/rslib/src/search/sqlwriter.rs @@ -608,11 +608,10 @@ impl SqlWriter<'_> { &mut self, field_name: &str, ) -> Result> { - let notetypes = self.col.get_all_notetypes()?; let matches_glob = glob_matcher(field_name); let mut field_map = vec![]; - for nt in notetypes.values() { + for nt in self.col.get_all_notetypes()? { let matched_fields = nt .fields .iter() @@ -639,11 +638,10 @@ impl SqlWriter<'_> { &mut self, field_name: &str, ) -> Result)>> { - let notetypes = self.col.get_all_notetypes()?; let matches_glob = glob_matcher(field_name); let mut field_map = vec![]; - for nt in notetypes.values() { + for nt in self.col.get_all_notetypes()? { let matched_fields: Vec = nt .fields .iter() @@ -663,10 +661,9 @@ impl SqlWriter<'_> { } fn included_fields_by_notetype(&mut self) -> Result>> { - let notetypes = self.col.get_all_notetypes()?; let mut any_excluded = false; let mut field_map = vec![]; - for nt in notetypes.values() { + for nt in self.col.get_all_notetypes()? { let mut sortf_excluded = false; let matched_fields = nt .fields @@ -699,10 +696,9 @@ impl SqlWriter<'_> { fn included_fields_for_unqualified_regex( &mut self, ) -> Result>> { - let notetypes = self.col.get_all_notetypes()?; let mut any_excluded = false; let mut field_map = vec![]; - for nt in notetypes.values() { + for nt in self.col.get_all_notetypes()? { let matched_fields: Vec = nt .fields .iter() diff --git a/rslib/src/storage/notetype/mod.rs b/rslib/src/storage/notetype/mod.rs index 30625eaff..7e3821b30 100644 --- a/rslib/src/storage/notetype/mod.rs +++ b/rslib/src/storage/notetype/mod.rs @@ -136,6 +136,13 @@ impl SqliteStorage { .collect() } + pub fn get_all_notetype_ids(&self) -> Result> { + self.db + .prepare_cached("SELECT id FROM notetypes")? + .query_and_then([], |row| row.get(0).map_err(Into::into))? + .collect() + } + /// Returns list of (id, name, use_count) pub fn get_notetype_use_counts(&self) -> Result> { self.db diff --git a/rslib/src/tests.rs b/rslib/src/tests.rs index de3251749..033d7a69b 100644 --- a/rslib/src/tests.rs +++ b/rslib/src/tests.rs @@ -99,6 +99,20 @@ impl Collection { self.adjust_remaining_steps_in_deck(DeckId(1), Some(&config), Some(&new_config), Usn(0)) .unwrap(); } + + pub(crate) fn basic_notetype(&self) -> Notetype { + let ntid = self.storage.get_notetype_id("Basic").unwrap().unwrap(); + self.storage.get_notetype(ntid).unwrap().unwrap() + } + + pub(crate) fn basic_rev_notetype(&self) -> Notetype { + let ntid = self + .storage + .get_notetype_id("Basic (and reversed card)") + .unwrap() + .unwrap(); + self.storage.get_notetype(ntid).unwrap().unwrap() + } } #[derive(Debug, Default, Clone)] diff --git a/ts/deck-options/ConfigInput.svelte b/ts/components/ConfigInput.svelte similarity index 100% rename from ts/deck-options/ConfigInput.svelte rename to ts/components/ConfigInput.svelte diff --git a/ts/deck-options/EnumSelector.svelte b/ts/components/EnumSelector.svelte similarity index 80% rename from ts/deck-options/EnumSelector.svelte rename to ts/components/EnumSelector.svelte index 503058458..b7e440bc4 100644 --- a/ts/deck-options/EnumSelector.svelte +++ b/ts/components/EnumSelector.svelte @@ -3,8 +3,8 @@ Copyright: Ankitects Pty Ltd and contributors License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html --> + +
    + {error.message} +
    + + diff --git a/ts/deck-options/HelpModal.svelte b/ts/components/HelpModal.svelte similarity index 92% rename from ts/deck-options/HelpModal.svelte rename to ts/components/HelpModal.svelte index 5d79258d7..ca5de06ab 100644 --- a/ts/deck-options/HelpModal.svelte +++ b/ts/components/HelpModal.svelte @@ -8,19 +8,19 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import Modal from "bootstrap/js/dist/modal"; import { createEventDispatcher, getContext, onMount } from "svelte"; - import Badge from "../components/Badge.svelte"; - import Col from "../components/Col.svelte"; - import { modalsKey } from "../components/context-keys"; - import Row from "../components/Row.svelte"; import { pageTheme } from "../sveltelib/theme"; + import Badge from "./Badge.svelte"; + import Col from "./Col.svelte"; + import { modalsKey } from "./context-keys"; import HelpSection from "./HelpSection.svelte"; import { infoCircle, manualIcon } from "./icons"; - import type { DeckOption } from "./types"; + import Row from "./Row.svelte"; + import type { HelpItem } from "./types"; export let title: string; export let url: string; export let startIndex = 0; - export let helpSections: DeckOption[]; + export let helpSections: HelpItem[]; export const modalKey: string = Math.random().toString(36).substring(2); @@ -117,12 +117,12 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html bind:this={carouselRef} > diff --git a/ts/deck-options/HelpSection.svelte b/ts/components/HelpSection.svelte similarity index 64% rename from ts/deck-options/HelpSection.svelte rename to ts/components/HelpSection.svelte index d5768a969..02d3578e3 100644 --- a/ts/deck-options/HelpSection.svelte +++ b/ts/components/HelpSection.svelte @@ -6,22 +6,22 @@ import * as tr from "@tslib/ftl"; import { renderMarkdown } from "@tslib/helpers"; - import Row from "../components/Row.svelte"; - import type { DeckOption } from "./types"; + import Row from "./Row.svelte"; + import type { HelpItem } from "./types"; - export let section: DeckOption; + export let item: HelpItem;

    - {#if section.url} - {@html section.title} + {#if item.url} + {@html item.title} {:else} - {@html section.title} + {@html item.title} {/if}

    - {#if section.help} - {@html renderMarkdown(section.help)} + {#if item.help} + {@html renderMarkdown(item.help)} {:else} {@html renderMarkdown( tr.helpNoExplanation({ @@ -30,14 +30,14 @@ )} {/if}
    -{#if section.url} +{#if item.url}
    {@html renderMarkdown( tr.helpForMoreInfo({ - link: `${section.title}`, + link: `${item.title}`, }), )}
    diff --git a/ts/deck-options/Label.svelte b/ts/components/Label.svelte similarity index 100% rename from ts/deck-options/Label.svelte rename to ts/components/Label.svelte diff --git a/ts/deck-options/RevertButton.svelte b/ts/components/RevertButton.svelte similarity index 88% rename from ts/deck-options/RevertButton.svelte rename to ts/components/RevertButton.svelte index b15131821..fdd470f1c 100644 --- a/ts/deck-options/RevertButton.svelte +++ b/ts/components/RevertButton.svelte @@ -7,12 +7,12 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import { cloneDeep, isEqual as isEqualLodash } from "lodash-es"; import { getContext } from "svelte"; - import Badge from "../components/Badge.svelte"; - import { touchDeviceKey } from "../components/context-keys"; - import DropdownItem from "../components/DropdownItem.svelte"; - import Popover from "../components/Popover.svelte"; - import WithFloating from "../components/WithFloating.svelte"; + import Badge from "./Badge.svelte"; + import { touchDeviceKey } from "./context-keys"; + import DropdownItem from "./DropdownItem.svelte"; import { revertIcon } from "./icons"; + import Popover from "./Popover.svelte"; + import WithFloating from "./WithFloating.svelte"; type T = unknown; diff --git a/ts/deck-options/SettingTitle.svelte b/ts/components/SettingTitle.svelte similarity index 100% rename from ts/deck-options/SettingTitle.svelte rename to ts/components/SettingTitle.svelte diff --git a/ts/import-csv/StickyHeader.svelte b/ts/components/StickyHeader.svelte similarity index 91% rename from ts/import-csv/StickyHeader.svelte rename to ts/components/StickyHeader.svelte index a043ae35f..57dbde768 100644 --- a/ts/import-csv/StickyHeader.svelte +++ b/ts/components/StickyHeader.svelte @@ -6,8 +6,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import * as tr from "@tslib/ftl"; import { getPlatformString } from "@tslib/shortcuts"; - import LabelButton from "../components/LabelButton.svelte"; - import Shortcut from "../components/Shortcut.svelte"; + import LabelButton from "./LabelButton.svelte"; + import Shortcut from "./Shortcut.svelte"; export let path: string; export let onImport: () => void; diff --git a/ts/deck-options/SwitchRow.svelte b/ts/components/SwitchRow.svelte similarity index 83% rename from ts/deck-options/SwitchRow.svelte rename to ts/components/SwitchRow.svelte index 56a6f7181..dbfbd0895 100644 --- a/ts/deck-options/SwitchRow.svelte +++ b/ts/components/SwitchRow.svelte @@ -3,12 +3,12 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html --> + +

    + {heading} +

    + + diff --git a/ts/import-anki-package/ImportAnkiPackagePage.svelte b/ts/import-anki-package/ImportAnkiPackagePage.svelte new file mode 100644 index 000000000..c08d26009 --- /dev/null +++ b/ts/import-anki-package/ImportAnkiPackagePage.svelte @@ -0,0 +1,169 @@ + + + +{#if importing} + +{:else if importResponse} + +{:else} + (importing = true)} /> + + + + + { + modal = e.detail.modal; + carousel = e.detail.carousel; + }} + /> + + + + openHelpModal( + Object.keys(settings).indexOf("mergeNotetypes"), + )} + > + {settings.mergeNotetypes.title} + + + + + + openHelpModal(Object.keys(settings).indexOf("updateNotes"))} + > + {settings.updateNotes.title} + + + + + + openHelpModal( + Object.keys(settings).indexOf("updateNotetypes"), + )} + > + {settings.updateNotetypes.title} + + + + + + openHelpModal( + Object.keys(settings).indexOf("withScheduling"), + )} + > + {settings.withScheduling.title} + + + + + +{/if} + + diff --git a/ts/import-anki-package/import-anki-package-base.scss b/ts/import-anki-package/import-anki-package-base.scss new file mode 100644 index 000000000..7c6cf484b --- /dev/null +++ b/ts/import-anki-package/import-anki-package-base.scss @@ -0,0 +1,28 @@ +@use "sass/bootstrap-dark"; + +@import "sass/base"; + +@import "bootstrap/scss/alert"; +@import "bootstrap/scss/buttons"; +@import "bootstrap/scss/button-group"; +@import "bootstrap/scss/close"; +@import "bootstrap/scss/grid"; +@import "bootstrap/scss/transitions"; +@import "bootstrap/scss/modal"; +@import "bootstrap/scss/carousel"; +@import "sass/bootstrap-forms"; + +.night-mode { + @include bootstrap-dark.night-mode; +} + +body { + min-height: 100vh; + width: min(100vw, 70em); + margin: 0 auto; + padding: 0 1em 1em 1em; +} + +html { + height: initial; +} diff --git a/ts/import-anki-package/index.ts b/ts/import-anki-package/index.ts new file mode 100644 index 000000000..9696ec053 --- /dev/null +++ b/ts/import-anki-package/index.ts @@ -0,0 +1,51 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +import "./import-anki-package-base.scss"; + +import { getImportAnkiPackagePresets } from "@tslib/backend"; +import { ModuleName, setupI18n } from "@tslib/i18n"; +import { checkNightMode } from "@tslib/nightmode"; + +import { modalsKey } from "../components/context-keys"; +import ImportAnkiPackagePage from "./ImportAnkiPackagePage.svelte"; + +const i18n = setupI18n({ + modules: [ + ModuleName.IMPORTING, + ModuleName.ACTIONS, + ModuleName.HELP, + ModuleName.DECK_CONFIG, + ModuleName.ADDING, + ModuleName.EDITING, + ModuleName.KEYBOARD, + ], +}); + +export async function setupImportAnkiPackagePage( + path: string, +): Promise { + const [_, options] = await Promise.all([ + i18n, + getImportAnkiPackagePresets({}), + ]); + + const context = new Map(); + context.set(modalsKey, new Map()); + checkNightMode(); + + return new ImportAnkiPackagePage({ + target: document.body, + props: { + path, + options, + }, + context, + }); +} + +// eg http://localhost:40000/_anki/pages/import-anki-package.html#test-/home/dae/foo.apkg +if (window.location.hash.startsWith("#test-")) { + const apkgPath = window.location.hash.replace("#test-", ""); + setupImportAnkiPackagePage(apkgPath); +} diff --git a/ts/import-anki-package/tsconfig.json b/ts/import-anki-package/tsconfig.json new file mode 100644 index 000000000..276e90989 --- /dev/null +++ b/ts/import-anki-package/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../tsconfig.json", + "include": ["*"], + "references": [ + { "path": "../lib" }, + { "path": "../sveltelib" }, + { "path": "../components" } + ], + "compilerOptions": { + "types": ["jest"] + } +} diff --git a/ts/import-csv/ImportCsvPage.svelte b/ts/import-csv/ImportCsvPage.svelte index 172566a86..76e035681 100644 --- a/ts/import-csv/ImportCsvPage.svelte +++ b/ts/import-csv/ImportCsvPage.svelte @@ -21,6 +21,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import Container from "../components/Container.svelte"; import Row from "../components/Row.svelte"; import Spacer from "../components/Spacer.svelte"; + import StickyHeader from "../components/StickyHeader.svelte"; import ImportLogPage from "../import-log/ImportLogPage.svelte"; import DeckDupeCheckSwitch from "./DeckDupeCheckSwitch.svelte"; import DeckSelector from "./DeckSelector.svelte"; @@ -38,7 +39,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html } from "./lib"; import NotetypeSelector from "./NotetypeSelector.svelte"; import Preview from "./Preview.svelte"; - import StickyHeader from "./StickyHeader.svelte"; import Tags from "./Tags.svelte"; export let path: string; diff --git a/ts/import-csv/index.ts b/ts/import-csv/index.ts index 00b01b0f7..b16a55178 100644 --- a/ts/import-csv/index.ts +++ b/ts/import-csv/index.ts @@ -7,6 +7,7 @@ import { getCsvMetadata, getDeckNames, getNotetypeNames } from "@tslib/backend"; import { ModuleName, setupI18n } from "@tslib/i18n"; import { checkNightMode } from "@tslib/nightmode"; +import ErrorPage from "../components/ErrorPage.svelte"; import ImportCsvPage from "./ImportCsvPage.svelte"; import { tryGetDeckColumn, tryGetDeckId, tryGetGlobalNotetype, tryGetNotetypeColumn } from "./lib"; @@ -24,44 +25,45 @@ const i18n = setupI18n({ ], }); -export async function setupImportCsvPage(path: string): Promise { - const [notetypes, decks, metadata, _i18n] = await Promise.all([ +export async function setupImportCsvPage(path: string): Promise { + checkNightMode(); + return Promise.all([ getNotetypeNames({}), getDeckNames({ skipEmptyDefault: false, includeFiltered: false, }), - getCsvMetadata({ path }), + getCsvMetadata({ path }, { alertOnError: false }), i18n, - ]); - - checkNightMode(); - - return new ImportCsvPage({ - target: document.body, - props: { - path: path, - deckNameIds: decks.entries, - notetypeNameIds: notetypes.entries, - dupeResolution: metadata.dupeResolution, - matchScope: metadata.matchScope, - delimiter: metadata.delimiter, - forceDelimiter: metadata.forceDelimiter, - isHtml: metadata.isHtml, - forceIsHtml: metadata.forceIsHtml, - globalTags: metadata.globalTags, - updatedTags: metadata.updatedTags, - columnLabels: metadata.columnLabels, - tagsColumn: metadata.tagsColumn, - guidColumn: metadata.guidColumn, - preview: metadata.preview, - globalNotetype: tryGetGlobalNotetype(metadata), - // Unset oneof numbers default to 0, which also means n/a here, - // but it's vital to differentiate between unset and 0 when reserializing. - notetypeColumn: tryGetNotetypeColumn(metadata), - deckId: tryGetDeckId(metadata), - deckColumn: tryGetDeckColumn(metadata), - }, + ]).then(([notetypes, decks, metadata, _i18n]) => { + return new ImportCsvPage({ + target: document.body, + props: { + path: path, + deckNameIds: decks.entries, + notetypeNameIds: notetypes.entries, + dupeResolution: metadata.dupeResolution, + matchScope: metadata.matchScope, + delimiter: metadata.delimiter, + forceDelimiter: metadata.forceDelimiter, + isHtml: metadata.isHtml, + forceIsHtml: metadata.forceIsHtml, + globalTags: metadata.globalTags, + updatedTags: metadata.updatedTags, + columnLabels: metadata.columnLabels, + tagsColumn: metadata.tagsColumn, + guidColumn: metadata.guidColumn, + preview: metadata.preview, + globalNotetype: tryGetGlobalNotetype(metadata), + // Unset oneof numbers default to 0, which also means n/a here, + // but it's vital to differentiate between unset and 0 when reserializing. + notetypeColumn: tryGetNotetypeColumn(metadata), + deckId: tryGetDeckId(metadata), + deckColumn: tryGetDeckColumn(metadata), + }, + }); + }).catch((error) => { + return new ErrorPage({ target: document.body, props: { error } }); }); } diff --git a/ts/import-log/ImportLogPage.svelte b/ts/import-log/ImportLogPage.svelte index 78162726b..92e1a1d1c 100644 --- a/ts/import-log/ImportLogPage.svelte +++ b/ts/import-log/ImportLogPage.svelte @@ -4,12 +4,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -->