diff --git a/.vscode.dist/launch.json b/.vscode.dist/launch.json
index dd1a9dcfd..34028b852 100644
--- a/.vscode.dist/launch.json
+++ b/.vscode.dist/launch.json
@@ -18,7 +18,9 @@
"env": {
"PYTHONWARNINGS": "default",
"PYTHONPYCACHEPREFIX": "out/pycache",
- "ANKIDEV": "1"
+ "ANKIDEV": "1",
+ "QTWEBENGINE_REMOTE_DEBUGGING": "8080",
+ "QTWEBENGINE_CHROMIUM_FLAGS": "--remote-allow-origins=http://localhost:8080"
},
"justMyCode": true,
"preLaunchTask": "ninja"
diff --git a/ftl/core/exporting.ftl b/ftl/core/exporting.ftl
index 5bab81296..be612f7da 100644
--- a/ftl/core/exporting.ftl
+++ b/ftl/core/exporting.ftl
@@ -13,6 +13,7 @@ exporting-include = Include:
exporting-include-html-and-media-references = Include HTML and media references
exporting-include-media = Include media
exporting-include-scheduling-information = Include scheduling information
+exporting-include-deck-configs = Include deck presets
exporting-include-tags = Include tags
exporting-support-older-anki-versions = Support older Anki versions (slower/larger files)
exporting-notes-in-plain-text = Notes in Plain Text
diff --git a/ftl/core/importing.ftl b/ftl/core/importing.ftl
index 05659bfed..690e3ca57 100644
--- a/ftl/core/importing.ftl
+++ b/ftl/core/importing.ftl
@@ -50,11 +50,15 @@ importing-notes-skipped-as-theyre-already-in = Notes skipped, as up-to-date copi
importing-notes-skipped-update-due-to-notetype = Notes not updated, as notetype has been modified since you first imported the notes: { $val }
importing-notes-updated-as-file-had-newer = Notes updated, as file had newer version: { $val }
importing-include-reviews = Include reviews
-importing-also-import-progress = Also import any learning progress
+importing-also-import-progress = Import any learning progress
+importing-with-deck-configs = Import any deck presets
importing-updates = Updates
importing-include-reviews-help =
If enabled, any previous reviews that the deck sharer included will also be imported.
Otherwise, all cards will be imported as new cards.
+importing-with-deck-configs-help =
+ If enabled, any deck options that the deck sharer included will also be imported.
+ Otherwise, all decks will be assigned the default preset.
importing-packaged-anki-deckcollection-apkg-colpkg-zip = Packaged Anki Deck/Collection (*.apkg *.colpkg *.zip)
importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz)
# the '|' character
diff --git a/proto/anki/import_export.proto b/proto/anki/import_export.proto
index 0ecfc7b3b..88a7ad163 100644
--- a/proto/anki/import_export.proto
+++ b/proto/anki/import_export.proto
@@ -58,6 +58,7 @@ message ImportAnkiPackageOptions {
ImportAnkiPackageUpdateCondition update_notes = 2;
ImportAnkiPackageUpdateCondition update_notetypes = 3;
bool with_scheduling = 4;
+ bool with_deck_configs = 5;
}
message ImportAnkiPackageRequest {
@@ -88,10 +89,15 @@ message ImportResponse {
message ExportAnkiPackageRequest {
string out_path = 1;
- bool with_scheduling = 2;
+ ExportAnkiPackageOptions options = 2;
+ ExportLimit limit = 3;
+}
+
+message ExportAnkiPackageOptions {
+ bool with_scheduling = 1;
+ bool with_deck_configs = 2;
bool with_media = 3;
bool legacy = 4;
- ExportLimit limit = 5;
}
message PackageMetadata {
diff --git a/pylib/anki/collection.py b/pylib/anki/collection.py
index c1993130f..29a0bdc4c 100644
--- a/pylib/anki/collection.py
+++ b/pylib/anki/collection.py
@@ -42,6 +42,7 @@ BrowserColumns = search_pb2.BrowserColumns
StripHtmlMode = card_rendering_pb2.StripHtmlRequest
ImportLogWithChanges = import_export_pb2.ImportResponse
ImportAnkiPackageRequest = import_export_pb2.ImportAnkiPackageRequest
+ExportAnkiPackageOptions = import_export_pb2.ExportAnkiPackageOptions
ImportCsvRequest = import_export_pb2.ImportCsvRequest
CsvMetadata = import_export_pb2.CsvMetadata
DupeResolution = CsvMetadata.DupeResolution
@@ -361,19 +362,11 @@ class Collection(DeprecatedNamesMixin):
return ImportLogWithChanges.FromString(log)
def export_anki_package(
- self,
- *,
- out_path: str,
- limit: ExportLimit,
- with_scheduling: bool,
- with_media: bool,
- legacy_support: bool,
+ self, *, out_path: str, options: ExportAnkiPackageOptions, limit: ExportLimit
) -> int:
return self._backend.export_anki_package(
out_path=out_path,
- with_scheduling=with_scheduling,
- with_media=with_media,
- legacy=legacy_support,
+ options=options,
limit=pb_export_limit(limit),
)
diff --git a/qt/aqt/forms/exporting.ui b/qt/aqt/forms/exporting.ui
index cb150f6e7..ca627f90f 100644
--- a/qt/aqt/forms/exporting.ui
+++ b/qt/aqt/forms/exporting.ui
@@ -67,6 +67,16 @@
+ -
+
+
+ exporting_include_deck_configs
+
+
+ false
+
+
+
-
@@ -162,9 +172,14 @@
format
deck
includeSched
+ include_deck_configs
includeMedia
+ includeHTML
includeTags
- buttonBox
+ includeDeck
+ includeNotetype
+ includeGuid
+ legacy_support
diff --git a/qt/aqt/import_export/exporting.py b/qt/aqt/import_export/exporting.py
index 529856061..f06d7a47a 100644
--- a/qt/aqt/import_export/exporting.py
+++ b/qt/aqt/import_export/exporting.py
@@ -12,7 +12,13 @@ from typing import Optional, Sequence, Type
import aqt.forms
import aqt.main
-from anki.collection import DeckIdLimit, ExportLimit, NoteIdsLimit, Progress
+from anki.collection import (
+ DeckIdLimit,
+ ExportAnkiPackageOptions,
+ ExportLimit,
+ NoteIdsLimit,
+ Progress,
+)
from anki.decks import DeckId, DeckNameId
from anki.notes import NoteId
from aqt import gui_hooks
@@ -90,6 +96,9 @@ class ExportDialog(QDialog):
def exporter_changed(self, idx: int) -> None:
self.exporter = self.exporter_classes[idx]()
self.frm.includeSched.setVisible(self.exporter.show_include_scheduling)
+ self.frm.include_deck_configs.setVisible(
+ self.exporter.show_include_deck_configs
+ )
self.frm.includeMedia.setVisible(self.exporter.show_include_media)
self.frm.includeTags.setVisible(self.exporter.show_include_tags)
self.frm.includeHTML.setVisible(self.exporter.show_include_html)
@@ -137,6 +146,7 @@ class ExportDialog(QDialog):
return ExportOptions(
out_path=out_path,
include_scheduling=self.frm.includeSched.isChecked(),
+ include_deck_configs=self.frm.include_deck_configs.isChecked(),
include_media=self.frm.includeMedia.isChecked(),
include_tags=self.frm.includeTags.isChecked(),
include_html=self.frm.includeHTML.isChecked(),
@@ -170,6 +180,7 @@ class ExportDialog(QDialog):
class ExportOptions:
out_path: str
include_scheduling: bool
+ include_deck_configs: bool
include_media: bool
include_tags: bool
include_html: bool
@@ -184,6 +195,7 @@ class Exporter(ABC):
extension: str
show_deck_list = False
show_include_scheduling = False
+ show_include_deck_configs = False
show_include_media = False
show_include_tags = False
show_include_html = False
@@ -241,6 +253,7 @@ class ApkgExporter(Exporter):
extension = "apkg"
show_deck_list = True
show_include_scheduling = True
+ show_include_deck_configs = True
show_include_media = True
show_legacy_support = True
@@ -260,9 +273,12 @@ class ApkgExporter(Exporter):
op=lambda col: col.export_anki_package(
out_path=options.out_path,
limit=options.limit,
- with_scheduling=options.include_scheduling,
- with_media=options.include_media,
- legacy_support=options.legacy_support,
+ options=ExportAnkiPackageOptions(
+ with_scheduling=options.include_scheduling,
+ with_deck_configs=options.include_deck_configs,
+ with_media=options.include_media,
+ legacy=options.legacy_support,
+ ),
),
success=on_success,
).with_backend_progress(export_progress_update).run_in_background()
diff --git a/rslib/src/config/bool.rs b/rslib/src/config/bool.rs
index 3d12e4e0e..6bdf3143c 100644
--- a/rslib/src/config/bool.rs
+++ b/rslib/src/config/bool.rs
@@ -36,6 +36,7 @@ pub enum BoolKey {
ShiftPositionOfExistingCards,
MergeNotetypes,
WithScheduling,
+ WithDeckConfigs,
Fsrs,
#[strum(to_string = "normalize_note_text")]
NormalizeNoteText,
diff --git a/rslib/src/import_export/gather.rs b/rslib/src/import_export/gather.rs
index 5b3c15304..99e4babe2 100644
--- a/rslib/src/import_export/gather.rs
+++ b/rslib/src/import_export/gather.rs
@@ -9,6 +9,7 @@ use itertools::Itertools;
use super::ExportProgress;
use crate::decks::immediate_parent_name;
+use crate::decks::NormalDeck;
use crate::latex::extract_latex;
use crate::prelude::*;
use crate::progress::ThrottlingProgressHandler;
@@ -36,6 +37,7 @@ impl ExchangeData {
col: &mut Collection,
search: impl TryIntoSearch,
with_scheduling: bool,
+ with_deck_configs: bool,
) -> Result<()> {
self.days_elapsed = col.timing_today()?.days_elapsed;
self.creation_utc_offset = col.get_creation_utc_offset();
@@ -43,18 +45,26 @@ impl ExchangeData {
self.notes = notes;
let (cards, guard) = guard.col.gather_cards()?;
self.cards = cards;
- self.decks = guard.col.gather_decks(with_scheduling)?;
+ self.decks = guard.col.gather_decks(with_scheduling, !with_scheduling)?;
self.notetypes = guard.col.gather_notetypes()?;
- self.check_ids()?;
+
+ let allow_filtered = self.enables_filtered_decks();
if with_scheduling {
self.revlog = guard.col.gather_revlog()?;
- self.deck_configs = guard.col.gather_deck_configs(&self.decks)?;
+ if !allow_filtered {
+ self.restore_cards_from_filtered_decks();
+ }
} else {
- self.remove_scheduling_information(guard.col);
+ self.reset_cards_and_notes(guard.col);
};
- Ok(())
+ if with_deck_configs {
+ self.deck_configs = guard.col.gather_deck_configs(&self.decks)?;
+ }
+ self.reset_decks(!with_deck_configs, !with_scheduling, allow_filtered);
+
+ self.check_ids()
}
pub(super) fn gather_media_names(
@@ -78,9 +88,8 @@ impl ExchangeData {
Ok(())
}
- fn remove_scheduling_information(&mut self, col: &Collection) {
+ fn reset_cards_and_notes(&mut self, col: &Collection) {
self.remove_system_tags();
- self.reset_deck_config_ids_and_limits();
self.reset_cards(col);
}
@@ -94,26 +103,73 @@ impl ExchangeData {
}
}
- fn reset_deck_config_ids_and_limits(&mut self) {
+ fn reset_decks(
+ &mut self,
+ reset_config_ids: bool,
+ reset_study_info: bool,
+ allow_filtered: bool,
+ ) {
for deck in self.decks.iter_mut() {
- if let Ok(normal_mut) = deck.normal_mut() {
- normal_mut.config_id = 1;
- normal_mut.review_limit = None;
- normal_mut.review_limit_today = None;
- normal_mut.new_limit = None;
- normal_mut.new_limit_today = None;
- } else {
- // filtered decks are reset at import time for legacy reasons
+ if reset_study_info {
+ deck.common = Default::default();
+ }
+ match &mut deck.kind {
+ DeckKind::Normal(normal) => {
+ if reset_config_ids {
+ normal.config_id = 1;
+ }
+ if reset_study_info {
+ normal.extend_new = 0;
+ normal.extend_review = 0;
+ normal.review_limit = None;
+ normal.review_limit_today = None;
+ normal.new_limit = None;
+ normal.new_limit_today = None;
+ }
+ }
+ DeckKind::Filtered(_) if reset_study_info || !allow_filtered => {
+ deck.kind = DeckKind::Normal(NormalDeck {
+ config_id: 1,
+ ..Default::default()
+ })
+ }
+ DeckKind::Filtered(_) => (),
}
}
}
+ /// Because the legacy exporter relied on the importer handling filtered
+ /// decks by converting them into regular ones, there are two scenarios to
+ /// watch out for:
+ /// 1. If exported without scheduling, cards have been reset, but their deck
+ /// ids may point to filtered decks.
+ /// 2. If exported with scheduling, cards have not been reset, but their
+ /// original deck ids may point to missing decks.
+ fn enables_filtered_decks(&self) -> bool {
+ self.cards
+ .iter()
+ .all(|c| self.card_and_its_deck_are_normal(c) || self.original_deck_exists(c))
+ }
+
+ fn card_and_its_deck_are_normal(&self, card: &Card) -> bool {
+ card.original_deck_id.0 == 0
+ && self
+ .decks
+ .iter()
+ .find(|d| d.id == card.deck_id)
+ .map(|d| !d.is_filtered())
+ .unwrap_or_default()
+ }
+
+ fn original_deck_exists(&self, card: &Card) -> bool {
+ card.original_deck_id.0 == 1 || self.decks.iter().any(|d| d.id == card.original_deck_id)
+ }
+
fn reset_cards(&mut self, col: &Collection) {
let mut position = col.get_next_card_position();
for card in self.cards.iter_mut() {
// schedule_as_new() removes cards from filtered decks, but we want to
- // leave cards in their current deck, which gets converted to a regular
- // deck on import
+ // leave cards in their current deck, which gets converted to a regular one
let deck_id = card.deck_id;
if card.schedule_as_new(position, true, true) {
position += 1;
@@ -123,6 +179,16 @@ impl ExchangeData {
}
}
+ fn restore_cards_from_filtered_decks(&mut self) {
+ for card in self.cards.iter_mut() {
+ if card.is_filtered() {
+ // instead of moving between decks, the deck is converted to a regular one
+ card.original_deck_id = card.deck_id;
+ card.remove_from_filtered_deck_restoring_queue();
+ }
+ }
+ }
+
fn check_ids(&self) -> Result<()> {
let tomorrow = TimestampMillis::now().adding_secs(86_400).0;
if self
@@ -183,12 +249,12 @@ impl Collection {
.map(|cards| (cards, guard))
}
- /// If with_scheduling, also gather all original decks of cards in filtered
+ /// If with_original, also gather all original decks of cards in filtered
/// decks, so they don't have to be converted to regular decks on import.
- /// If not with_scheduling, skip exporting the default deck to avoid
+ /// If skip_default, skip exporting the default deck to avoid
/// changing the importing client's defaults.
- fn gather_decks(&mut self, with_scheduling: bool) -> Result> {
- let decks = if with_scheduling {
+ fn gather_decks(&mut self, with_original: bool, skip_default: bool) -> Result> {
+ let decks = if with_original {
self.storage.get_decks_and_original_for_search_cards()
} else {
self.storage.get_decks_for_search_cards()
@@ -197,7 +263,7 @@ impl Collection {
Ok(decks
.into_iter()
.chain(parents)
- .filter(|deck| with_scheduling || deck.id != DeckId(1))
+ .filter(|deck| !(skip_default && deck.id.0 == 1))
.collect())
}
@@ -263,7 +329,7 @@ mod test {
let mut col = Collection::new();
let note = NoteAdder::basic(&mut col).add(&mut col);
- data.gather_data(&mut col, SearchNode::WholeCollection, true)
+ data.gather_data(&mut col, SearchNode::WholeCollection, true, true)
.unwrap();
assert_eq!(data.notes, [note]);
@@ -280,7 +346,7 @@ mod test {
col.add_note_only_with_id_undoable(&mut note).unwrap();
assert!(data
- .gather_data(&mut col, SearchNode::WholeCollection, true)
+ .gather_data(&mut col, SearchNode::WholeCollection, true, true)
.is_err());
}
}
diff --git a/rslib/src/import_export/package/apkg/export.rs b/rslib/src/import_export/package/apkg/export.rs
index 6bf16f0a8..eeb60a369 100644
--- a/rslib/src/import_export/package/apkg/export.rs
+++ b/rslib/src/import_export/package/apkg/export.rs
@@ -14,6 +14,7 @@ use crate::collection::CollectionBuilder;
use crate::import_export::gather::ExchangeData;
use crate::import_export::package::colpkg::export::export_collection;
use crate::import_export::package::media::MediaIter;
+use crate::import_export::package::ExportAnkiPackageOptions;
use crate::import_export::package::Meta;
use crate::import_export::ExportProgress;
use crate::prelude::*;
@@ -21,14 +22,11 @@ use crate::progress::ThrottlingProgressHandler;
impl Collection {
/// Returns number of exported notes.
- #[allow(clippy::too_many_arguments)]
pub fn export_apkg(
&mut self,
out_path: impl AsRef,
+ options: ExportAnkiPackageOptions,
search: impl TryIntoSearch,
- with_scheduling: bool,
- with_media: bool,
- legacy: bool,
media_fn: Option) -> MediaIter>>,
) -> Result {
let mut progress = self.new_progress_handler();
@@ -38,19 +36,13 @@ impl Collection {
.path()
.to_str()
.or_invalid("non-unicode filename")?;
- let meta = if legacy {
+ let meta = if options.legacy {
Meta::new_legacy()
} else {
Meta::new()
};
- let data = self.export_into_collection_file(
- &meta,
- temp_col_path,
- search,
- &mut progress,
- with_scheduling,
- with_media,
- )?;
+ let data =
+ self.export_into_collection_file(&meta, temp_col_path, options, search, &mut progress)?;
progress.set(ExportProgress::File)?;
let media = if let Some(media_fn) = media_fn {
@@ -77,15 +69,19 @@ impl Collection {
&mut self,
meta: &Meta,
path: &str,
+ options: ExportAnkiPackageOptions,
search: impl TryIntoSearch,
progress: &mut ThrottlingProgressHandler,
- with_scheduling: bool,
- with_media: bool,
) -> Result {
let mut data = ExchangeData::default();
progress.set(ExportProgress::Gathering)?;
- data.gather_data(self, search, with_scheduling)?;
- if with_media {
+ data.gather_data(
+ self,
+ search,
+ options.with_scheduling,
+ options.with_deck_configs,
+ )?;
+ if options.with_media {
data.gather_media_names(progress)?;
}
diff --git a/rslib/src/import_export/package/apkg/import/cards.rs b/rslib/src/import_export/package/apkg/import/cards.rs
index e04bbaf23..e91695e14 100644
--- a/rslib/src/import_export/package/apkg/import/cards.rs
+++ b/rslib/src/import_export/package/apkg/import/cards.rs
@@ -78,7 +78,6 @@ impl Context<'_> {
notetype_map: &HashMap,
remapped_templates: &HashMap,
imported_decks: &HashMap,
- keep_filtered: bool,
) -> Result<()> {
let mut ctx = CardContext::new(
self.usn,
@@ -92,16 +91,16 @@ impl Context<'_> {
if ctx.scheduler_version == SchedulerVersion::V1 {
return Err(AnkiError::SchedulerUpgradeRequired);
}
- ctx.import_cards(mem::take(&mut self.data.cards), keep_filtered)?;
+ ctx.import_cards(mem::take(&mut self.data.cards))?;
ctx.import_revlog(mem::take(&mut self.data.revlog))
}
}
impl CardContext<'_> {
- fn import_cards(&mut self, mut cards: Vec, keep_filtered: bool) -> Result<()> {
+ fn import_cards(&mut self, mut cards: Vec) -> Result<()> {
for card in &mut cards {
if self.map_to_imported_note(card) && !self.card_ordinal_already_exists(card) {
- self.add_card(card, keep_filtered)?;
+ self.add_card(card)?;
}
// TODO: could update existing card
}
@@ -133,14 +132,11 @@ impl CardContext<'_> {
.contains(&(card.note_id, card.template_idx))
}
- fn add_card(&mut self, card: &mut Card, keep_filtered: bool) -> Result<()> {
+ fn add_card(&mut self, card: &mut Card) -> Result<()> {
card.usn = self.usn;
self.remap_deck_ids(card);
self.remap_template_index(card);
card.shift_collection_relative_dates(self.collection_delta);
- if !keep_filtered {
- card.maybe_remove_from_filtered_deck();
- }
let old_id = self.uniquify_card_id(card);
self.target_col.add_card_if_unique_undoable(card)?;
@@ -198,12 +194,4 @@ impl Card {
fn original_due_in_days_since_collection_creation(&self) -> bool {
self.ctype == CardType::Review
}
-
- fn maybe_remove_from_filtered_deck(&mut self) {
- if self.is_filtered() {
- // instead of moving between decks, the deck is converted to a regular one
- self.original_deck_id = self.deck_id;
- self.remove_from_filtered_deck_restoring_queue();
- }
- }
}
diff --git a/rslib/src/import_export/package/apkg/import/decks.rs b/rslib/src/import_export/package/apkg/import/decks.rs
index e0ee3b439..400554095 100644
--- a/rslib/src/import_export/package/apkg/import/decks.rs
+++ b/rslib/src/import_export/package/apkg/import/decks.rs
@@ -29,18 +29,10 @@ impl<'d> DeckContext<'d> {
}
impl Context<'_> {
- pub(super) fn import_decks_and_configs(
- &mut self,
- keep_filtered: bool,
- contains_scheduling: bool,
- ) -> Result> {
+ pub(super) fn import_decks_and_configs(&mut self) -> Result> {
let mut ctx = DeckContext::new(self.target_col, self.usn);
ctx.import_deck_configs(mem::take(&mut self.data.deck_configs))?;
- ctx.import_decks(
- mem::take(&mut self.data.decks),
- keep_filtered,
- contains_scheduling,
- )?;
+ ctx.import_decks(mem::take(&mut self.data.decks))?;
Ok(ctx.imported_decks)
}
}
@@ -54,42 +46,16 @@ impl DeckContext<'_> {
Ok(())
}
- fn import_decks(
- &mut self,
- mut decks: Vec,
- keep_filtered: bool,
- contains_scheduling: bool,
- ) -> Result<()> {
+ fn import_decks(&mut self, mut decks: Vec) -> Result<()> {
// ensure parents are seen before children
decks.sort_unstable_by_key(|deck| deck.level());
for deck in &mut decks {
- self.prepare_deck(deck, keep_filtered, contains_scheduling);
+ self.maybe_reparent(deck);
self.import_deck(deck)?;
}
Ok(())
}
- fn prepare_deck(&self, deck: &mut Deck, keep_filtered: bool, contains_scheduling: bool) {
- self.maybe_reparent(deck);
- if !keep_filtered && deck.is_filtered() {
- deck.kind = DeckKind::Normal(NormalDeck {
- config_id: 1,
- ..Default::default()
- });
- } else if !contains_scheduling {
- // reset things like today's study count and collapse state
- deck.common = Default::default();
- deck.kind = match &mut deck.kind {
- DeckKind::Normal(normal) => DeckKind::Normal(NormalDeck {
- config_id: 1,
- description: mem::take(&mut normal.description),
- ..Default::default()
- }),
- DeckKind::Filtered(_) => unreachable!(),
- }
- }
- }
-
fn import_deck(&mut self, deck: &mut Deck) -> Result<()> {
if let Some(original) = self.get_deck_by_name(deck)? {
if original.is_same_kind(deck) {
@@ -225,7 +191,7 @@ mod test {
DeckAdder::new("NEW PARENT::child").deck(),
DeckAdder::new("new parent").deck(),
];
- ctx.import_decks(imports, false, false).unwrap();
+ ctx.import_decks(imports).unwrap();
let existing_decks: HashSet<_> = ctx
.target_col
.get_all_deck_names(true)
diff --git a/rslib/src/import_export/package/apkg/import/mod.rs b/rslib/src/import_export/package/apkg/import/mod.rs
index f1093d9c4..9c8e707cc 100644
--- a/rslib/src/import_export/package/apkg/import/mod.rs
+++ b/rslib/src/import_export/package/apkg/import/mod.rs
@@ -6,7 +6,6 @@ mod decks;
mod media;
mod notes;
-use std::collections::HashSet;
use std::fs::File;
use std::path::Path;
@@ -62,6 +61,7 @@ impl Collection {
self.transact(Op::Import, |col| {
col.set_config(BoolKey::MergeNotetypes, &options.merge_notetypes)?;
col.set_config(BoolKey::WithScheduling, &options.with_scheduling)?;
+ col.set_config(BoolKey::WithDeckConfigs, &options.with_deck_configs)?;
col.set_config(ConfigKey::UpdateNotes, &options.update_notes())?;
col.set_config(ConfigKey::UpdateNotetypes, &options.update_notetypes())?;
let mut ctx = Context::new(archive, col, options, progress)?;
@@ -85,6 +85,7 @@ impl<'a> Context<'a> {
SearchNode::WholeCollection,
&mut progress,
options.with_scheduling,
+ options.with_deck_configs,
)?;
let usn = target_col.usn()?;
Ok(Self {
@@ -110,15 +111,12 @@ impl<'a> Context<'a> {
.collect();
let mut media_map = self.prepare_media()?;
let note_imports = self.import_notes_and_notetypes(&mut media_map)?;
- let keep_filtered = self.data.enables_filtered_decks();
- let contains_scheduling = self.data.contains_scheduling();
- let imported_decks = self.import_decks_and_configs(keep_filtered, contains_scheduling)?;
+ let imported_decks = self.import_decks_and_configs()?;
self.import_cards_and_revlog(
¬e_imports.id_map,
¬etypes,
¬e_imports.remapped_templates,
&imported_decks,
- keep_filtered,
)?;
self.copy_media(&mut media_map)?;
Ok(note_imports.log)
@@ -132,6 +130,7 @@ impl ExchangeData {
search: impl TryIntoSearch,
progress: &mut ThrottlingProgressHandler,
with_scheduling: bool,
+ with_deck_configs: bool,
) -> Result {
let tempfile = collection_to_tempfile(meta, archive)?;
let mut col = CollectionBuilder::new(tempfile.path()).build()?;
@@ -140,31 +139,10 @@ impl ExchangeData {
progress.set(ImportProgress::Gathering)?;
let mut data = ExchangeData::default();
- data.gather_data(&mut col, search, with_scheduling)?;
+ data.gather_data(&mut col, search, with_scheduling, with_deck_configs)?;
Ok(data)
}
-
- fn enables_filtered_decks(&self) -> bool {
- // Earlier versions relied on the importer handling filtered decks by converting
- // them into regular ones, so there is no guarantee that all original decks
- // are included. And the legacy exporter included the default deck config, so we
- // can't use it to determine if scheduling is included.
- self.contains_scheduling()
- && self.contains_all_original_decks()
- && !self.deck_configs.is_empty()
- }
-
- fn contains_scheduling(&self) -> bool {
- !self.revlog.is_empty()
- }
-
- fn contains_all_original_decks(&self) -> bool {
- let deck_ids: HashSet<_> = self.decks.iter().map(|d| d.id).collect();
- self.cards
- .iter()
- .all(|c| c.original_deck_id.0 == 0 || deck_ids.contains(&c.original_deck_id))
- }
}
fn collection_to_tempfile(meta: &Meta, archive: &mut ZipArchive) -> Result {
diff --git a/rslib/src/import_export/package/apkg/import/notes.rs b/rslib/src/import_export/package/apkg/import/notes.rs
index 49633fba7..b41237989 100644
--- a/rslib/src/import_export/package/apkg/import/notes.rs
+++ b/rslib/src/import_export/package/apkg/import/notes.rs
@@ -602,6 +602,7 @@ impl Notetype {
#[cfg(test)]
mod test {
+ use anki_proto::import_export::ExportAnkiPackageOptions;
use anki_proto::import_export::ImportAnkiPackageOptions;
use tempfile::TempDir;
@@ -961,7 +962,7 @@ mod test {
.add(&mut src);
let temp_dir = TempDir::new()?;
let path = temp_dir.path().join("foo.apkg");
- src.export_apkg(&path, "", false, false, false, None)?;
+ src.export_apkg(&path, ExportAnkiPackageOptions::default(), "", None)?;
let mut dst = CollectionBuilder::new(temp_dir.path().join("dst.anki2"))
.with_desktop_media_paths()
@@ -980,7 +981,7 @@ mod test {
// importing again with merge disabled will fail for the exisitng note,
// but the new one will be added with an extra notetype
assert_eq!(dst.storage.get_all_notetype_names().unwrap().len(), 7);
- src.export_apkg(&path, "", false, false, false, None)?;
+ src.export_apkg(&path, ExportAnkiPackageOptions::default(), "", None)?;
assert_eq!(
dst.import_apkg(&path, ImportAnkiPackageOptions::default())?
.output
@@ -992,7 +993,7 @@ mod test {
// if enabling merge, it should succeed and remove the empty notetype, remapping
// its note
- src.export_apkg(&path, "", false, false, false, None)?;
+ src.export_apkg(&path, ExportAnkiPackageOptions::default(), "", None)?;
assert_eq!(
dst.import_apkg(
&path,
diff --git a/rslib/src/import_export/package/apkg/tests.rs b/rslib/src/import_export/package/apkg/tests.rs
index 47b322722..9c0e15eca 100644
--- a/rslib/src/import_export/package/apkg/tests.rs
+++ b/rslib/src/import_export/package/apkg/tests.rs
@@ -10,6 +10,7 @@ use std::io::Write;
use anki_io::read_file;
use anki_proto::import_export::ImportAnkiPackageOptions;
+use crate::import_export::package::ExportAnkiPackageOptions;
use crate::media::files::sha1_of_data;
use crate::media::MediaManager;
use crate::prelude::*;
@@ -44,10 +45,13 @@ fn roundtrip_inner(legacy: bool) {
src_col
.export_apkg(
&apkg_path,
+ ExportAnkiPackageOptions {
+ with_scheduling: true,
+ with_deck_configs: true,
+ with_media: true,
+ legacy,
+ },
SearchNode::from_deck_name("parent::sample"),
- true,
- true,
- legacy,
None,
)
.unwrap();
diff --git a/rslib/src/import_export/package/mod.rs b/rslib/src/import_export/package/mod.rs
index b99a38ddf..935fc7e9c 100644
--- a/rslib/src/import_export/package/mod.rs
+++ b/rslib/src/import_export/package/mod.rs
@@ -7,6 +7,7 @@ mod media;
mod meta;
use anki_proto::import_export::media_entries::MediaEntry;
+pub use anki_proto::import_export::ExportAnkiPackageOptions;
pub use anki_proto::import_export::ImportAnkiPackageOptions;
pub use anki_proto::import_export::ImportAnkiPackageUpdateCondition as UpdateCondition;
use anki_proto::import_export::MediaEntries;
diff --git a/rslib/src/import_export/service.rs b/rslib/src/import_export/service.rs
index c51c59906..c324ebf54 100644
--- a/rslib/src/import_export/service.rs
+++ b/rslib/src/import_export/service.rs
@@ -22,6 +22,7 @@ impl crate::services::ImportExportService for Collection {
Ok(anki_proto::import_export::ImportAnkiPackageOptions {
merge_notetypes: self.get_config_bool(BoolKey::MergeNotetypes),
with_scheduling: self.get_config_bool(BoolKey::WithScheduling),
+ with_deck_configs: self.get_config_bool(BoolKey::WithDeckConfigs),
update_notes: self.get_update_notes() as i32,
update_notetypes: self.get_update_notetypes() as i32,
})
@@ -33,10 +34,8 @@ impl crate::services::ImportExportService for Collection {
) -> Result {
self.export_apkg(
&input.out_path,
- SearchNode::from(input.limit.unwrap_or_default()),
- input.with_scheduling,
- input.with_media,
- input.legacy,
+ input.options.unwrap_or_default(),
+ input.limit.unwrap_or_default(),
None,
)
.map(Into::into)
diff --git a/ts/import-anki-package/ImportAnkiPackagePage.svelte b/ts/import-anki-package/ImportAnkiPackagePage.svelte
index b0fc4ff8e..43cbe57b3 100644
--- a/ts/import-anki-package/ImportAnkiPackagePage.svelte
+++ b/ts/import-anki-package/ImportAnkiPackagePage.svelte
@@ -29,6 +29,11 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
help: tr.importingIncludeReviewsHelp(),
url: HelpPage.PackageImporting.scheduling,
},
+ withDeckConfigs: {
+ title: tr.importingWithDeckConfigs(),
+ help: tr.importingWithDeckConfigsHelp(),
+ url: HelpPage.PackageImporting.scheduling,
+ },
mergeNotetypes: {
title: tr.importingMergeNotetypes(),
help: tr.importingMergeNotetypesHelp(),
@@ -84,6 +89,15 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
+
+
+ openHelpModal(Object.keys(settings).indexOf("withDeckConfigs"))}
+ >
+ {settings.withDeckConfigs.title}
+
+
+
{tr.importingUpdates()}