Return note log when importing apkg

This commit is contained in:
RumovZ 2022-04-20 20:48:22 +02:00
parent 6b85b5900f
commit 63ad6624a6
6 changed files with 118 additions and 35 deletions

View file

@ -16,7 +16,7 @@ service ImportExportService {
rpc ExportCollectionPackage(ExportCollectionPackageRequest)
returns (generic.Empty);
rpc ImportAnkiPackage(ImportAnkiPackageRequest)
returns (collection.OpChanges);
returns (ImportAnkiPackageResponse);
rpc ExportAnkiPackage(ExportAnkiPackageRequest) returns (generic.Empty);
}
@ -36,6 +36,17 @@ message ImportAnkiPackageRequest {
string package_path = 1;
}
message ImportAnkiPackageResponse {
message Log {
repeated generic.StringList new = 1;
repeated generic.StringList updated = 2;
repeated generic.StringList duplicate = 3;
repeated generic.StringList conflicting = 4;
}
collection.OpChanges changes = 1;
Log log = 2;
}
message ExportAnkiPackageRequest {
string out_path = 1;
bool with_scheduling = 2;

View file

@ -5,7 +5,10 @@ use super::{progress::Progress, Backend};
pub(super) use crate::backend_proto::importexport_service::Service as ImportExportService;
use crate::{
backend_proto::{self as pb, export_anki_package_request::Selector},
import_export::{package::import_colpkg, ImportProgress},
import_export::{
package::{import_colpkg, NoteLog},
ImportProgress,
},
prelude::*,
search::SearchNode,
};
@ -45,7 +48,10 @@ impl ImportExportService for Backend {
.map(Into::into)
}
fn import_anki_package(&self, input: pb::ImportAnkiPackageRequest) -> Result<pb::OpChanges> {
fn import_anki_package(
&self,
input: pb::ImportAnkiPackageRequest,
) -> Result<pb::ImportAnkiPackageResponse> {
self.with_col(|col| col.import_apkg(&input.package_path, &mut self.import_progress_fn()))
.map(Into::into)
}
@ -98,3 +104,12 @@ impl Backend {
}
}
}
impl From<OpOutput<NoteLog>> for pb::ImportAnkiPackageResponse {
fn from(output: OpOutput<NoteLog>) -> Self {
Self {
changes: Some(output.changes.into()),
log: Some(output.output),
}
}
}

View file

@ -15,7 +15,11 @@ use zip::ZipArchive;
use crate::{
collection::CollectionBuilder,
import_export::{gather::ExchangeData, package::Meta, ImportProgress},
import_export::{
gather::ExchangeData,
package::{Meta, NoteLog},
ImportProgress,
},
prelude::*,
search::SearchNode,
};
@ -35,7 +39,7 @@ impl Collection {
&mut self,
path: impl AsRef<Path>,
progress_fn: &mut ProgressFn,
) -> Result<OpOutput<()>> {
) -> Result<OpOutput<NoteLog>> {
let file = File::open(path)?;
let archive = ZipArchive::new(file)?;
@ -64,12 +68,13 @@ impl<'a> Context<'a> {
})
}
fn import(&mut self) -> Result<()> {
fn import(&mut self) -> Result<NoteLog> {
let mut media_map = self.prepare_media()?;
let imported_notes = self.import_notes_and_notetypes(&mut media_map)?;
let note_imports = self.import_notes_and_notetypes(&mut media_map)?;
let imported_decks = self.import_decks_and_configs()?;
self.import_cards_and_revlog(&imported_notes, &imported_decks)?;
self.copy_media(&mut media_map)
self.import_cards_and_revlog(&note_imports.id_map, &imported_decks)?;
self.copy_media(&mut media_map)?;
Ok(note_imports.log)
}
}

View file

@ -12,7 +12,9 @@ use sha1::Sha1;
use super::{media::MediaUseMap, Context};
use crate::{
import_export::package::media::safe_normalized_file_name, prelude::*, text::replace_media_refs,
import_export::package::{media::safe_normalized_file_name, NoteLog},
prelude::*,
text::replace_media_refs,
};
struct NoteContext<'a> {
@ -23,7 +25,36 @@ struct NoteContext<'a> {
target_guids: HashMap<String, NoteMeta>,
target_ids: HashSet<NoteId>,
media_map: &'a mut MediaUseMap,
imported_notes: HashMap<NoteId, NoteId>,
imports: NoteImports,
}
#[derive(Debug, Default)]
pub(super) struct NoteImports {
pub(super) id_map: HashMap<NoteId, NoteId>,
/// All notes from the source collection as [Vec]s of their fields, and grouped
/// by import result kind.
pub(super) log: NoteLog,
}
impl NoteImports {
fn log_new(&mut self, note: Note, source_id: NoteId) {
self.id_map.insert(source_id, note.id);
self.log.new.push(note.take_fields().into());
}
fn log_updated(&mut self, note: Note, source_id: NoteId) {
self.id_map.insert(source_id, note.id);
self.log.updated.push(note.take_fields().into());
}
fn log_duplicate(&mut self, note: Note, target_id: NoteId) {
self.id_map.insert(note.id, target_id);
self.log.duplicate.push(note.take_fields().into());
}
fn log_conflicting(&mut self, note: Note) {
self.log.conflicting.push(note.take_fields().into());
}
}
#[derive(Debug, Clone, Copy)]
@ -47,11 +78,11 @@ impl Context<'_> {
pub(super) fn import_notes_and_notetypes(
&mut self,
media_map: &mut MediaUseMap,
) -> Result<HashMap<NoteId, NoteId>> {
) -> Result<NoteImports> {
let mut ctx = NoteContext::new(self.usn, self.target_col, media_map)?;
ctx.import_notetypes(mem::take(&mut self.data.notetypes))?;
ctx.import_notes(mem::take(&mut self.data.notes))?;
Ok(ctx.imported_notes)
Ok(ctx.imports)
}
}
@ -71,7 +102,7 @@ impl<'n> NoteContext<'n> {
remapped_notetypes: HashMap::new(),
target_guids,
target_ids,
imported_notes: HashMap::new(),
imports: NoteImports::default(),
media_map,
})
}
@ -126,8 +157,8 @@ impl<'n> NoteContext<'n> {
Ok(())
}
fn import_notes(&mut self, mut notes: Vec<Note>) -> Result<()> {
for note in &mut notes {
fn import_notes(&mut self, notes: Vec<Note>) -> Result<()> {
for mut note in notes {
if let Some(notetype_id) = self.remapped_notetypes.get(&note.notetype_id) {
if self.target_guids.contains_key(&note.guid) {
// TODO: Log ignore
@ -144,18 +175,17 @@ impl<'n> NoteContext<'n> {
Ok(())
}
fn add_note(&mut self, mut note: &mut Note) -> Result<()> {
// TODO: Log add
self.munge_media(note)?;
self.target_col.canonify_note_tags(note, self.usn)?;
fn add_note(&mut self, mut note: Note) -> Result<()> {
self.munge_media(&mut note)?;
self.target_col.canonify_note_tags(&mut note, self.usn)?;
let notetype = self.get_expected_notetype(note.notetype_id)?;
note.prepare_for_update(&notetype, self.normalize_notes)?;
note.usn = self.usn;
let old_id = self.uniquify_note_id(note);
let old_id = self.uniquify_note_id(&mut note);
self.target_col.add_note_only_with_id_undoable(note)?;
self.target_col.add_note_only_with_id_undoable(&mut note)?;
self.target_ids.insert(note.id);
self.imported_notes.insert(old_id, note.id);
self.imports.log_new(note, old_id);
Ok(())
}
@ -181,36 +211,36 @@ impl<'n> NoteContext<'n> {
.ok_or(AnkiError::NotFound)
}
fn maybe_update_note(&mut self, note: &mut Note, meta: NoteMeta) -> Result<()> {
fn maybe_update_note(&mut self, note: Note, meta: NoteMeta) -> Result<()> {
if meta.mtime < note.mtime {
if meta.notetype_id == note.notetype_id {
self.imported_notes.insert(note.id, meta.id);
note.id = meta.id;
self.update_note(note)?;
self.update_note(note, meta.id)?;
} else {
// TODO: Log ignore
self.imports.log_conflicting(note);
}
} else {
// TODO: Log duplicate
self.imported_notes.insert(note.id, meta.id);
self.imports.log_duplicate(note, meta.id);
}
Ok(())
}
fn update_note(&mut self, note: &mut Note) -> Result<()> {
// TODO: Log update
self.munge_media(note)?;
fn update_note(&mut self, mut note: Note, target_id: NoteId) -> Result<()> {
let source_id = note.id;
note.id = target_id;
self.munge_media(&mut note)?;
let original = self.get_expected_note(note.id)?;
let notetype = self.get_expected_notetype(note.notetype_id)?;
self.target_col.update_note_inner_without_cards(
note,
&mut note,
&original,
&notetype,
self.usn,
true,
self.normalize_notes,
true,
)
)?;
self.imports.log_updated(note, source_id);
Ok(())
}
fn munge_media(&mut self, note: &mut Note) -> Result<()> {
@ -309,6 +339,17 @@ mod test {
ctx.remapped_notetypes.insert(NotetypeId(123), basic_ntid);
ctx.import_notes(notes).unwrap();
assert_log(
&ctx.imports.log.new,
&[&["<img src='bar.jpg'>", ""], &["", ""], &["", ""]],
);
assert_log(&ctx.imports.log.duplicate, &[&["outdated", ""]]);
assert_log(
&ctx.imports.log.updated,
&[&["updated", ""], &["updated", ""]],
);
assert_log(&ctx.imports.log.conflicting, &[&["updated", ""]]);
// media is remapped
assert_eq!(
col.get_note_field(note_with_media.id, 0),
@ -331,6 +372,12 @@ mod test {
assert_eq!(col.get_note_field(updated_note_with_remapped_nt.id, 0), "");
}
fn assert_log(log: &[crate::backend_proto::StringList], expected: &[&[&str]]) {
for (idx, fields) in log.iter().enumerate() {
assert_eq!(fields.vals, expected[idx]);
}
}
impl Collection {
fn note_id_for_guid(&self, guid: &str) -> NoteId {
self.storage

View file

@ -11,4 +11,5 @@ pub(crate) use colpkg::export::export_colpkg_from_data;
pub use colpkg::import::import_colpkg;
pub(self) use meta::{Meta, Version};
pub use crate::backend_proto::import_anki_package_response::Log as NoteLog;
pub(self) use crate::backend_proto::{media_entries::MediaEntry, MediaEntries};

View file

@ -55,6 +55,10 @@ impl Note {
&self.fields
}
pub fn take_fields(self) -> Vec<String> {
self.fields
}
pub fn set_field(&mut self, idx: usize, text: impl Into<String>) -> Result<()> {
if idx >= self.fields.len() {
return Err(AnkiError::invalid_input(