mirror of
https://github.com/ankitects/anki.git
synced 2025-12-10 13:26:56 -05:00
Add more progress + abort points to export code
The old `media_files_did_export` hook has been kept around for use with the legacy apkg exporter (an add-on uses it), and a new `legacy_export_progress` hook has been added so we can get progress from the new colpkg exporter until we move over fully to the new code.
This commit is contained in:
parent
228b4331c3
commit
c8a4e5ea22
12 changed files with 83 additions and 26 deletions
|
|
@ -32,3 +32,9 @@ exporting-note-exported =
|
|||
[one] { $count } note exported.
|
||||
*[other] { $count } notes exported.
|
||||
}
|
||||
exporting-exporting-file = Exporting file...
|
||||
exporting-processed-media-files =
|
||||
{ $count ->
|
||||
[one] Processed { $count } media file...
|
||||
*[other] Processed { $count } media files...
|
||||
}
|
||||
|
|
|
|||
|
|
@ -108,6 +108,7 @@ message Progress {
|
|||
uint32 stage_total = 2;
|
||||
uint32 stage_current = 3;
|
||||
}
|
||||
|
||||
oneof value {
|
||||
generic.Empty none = 1;
|
||||
MediaSync media_sync = 2;
|
||||
|
|
@ -116,7 +117,7 @@ message Progress {
|
|||
NormalSync normal_sync = 5;
|
||||
DatabaseCheck database_check = 6;
|
||||
string importing = 7;
|
||||
uint32 exporting = 8;
|
||||
string exporting = 8;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -435,14 +435,14 @@ class AnkiCollectionPackageExporter(AnkiPackageExporter):
|
|||
def exporting_media() -> bool:
|
||||
return any(
|
||||
hook.__name__ == "exported_media"
|
||||
for hook in hooks.media_files_did_export._hooks
|
||||
for hook in hooks.legacy_export_progress._hooks
|
||||
)
|
||||
|
||||
def progress() -> None:
|
||||
while exporting_media():
|
||||
progress = self.col._backend.latest_progress()
|
||||
if progress.HasField("exporting"):
|
||||
hooks.media_files_did_export(progress.exporting)
|
||||
hooks.legacy_export_progress(progress.exporting)
|
||||
time.sleep(0.1)
|
||||
|
||||
threading.Thread(target=progress).start()
|
||||
|
|
|
|||
|
|
@ -31,7 +31,16 @@ hooks = [
|
|||
args=["col: anki.collection.Collection", "ids: Sequence[anki.notes.NoteId]"],
|
||||
legacy_hook="remNotes",
|
||||
),
|
||||
Hook(name="media_files_did_export", args=["count: int"]),
|
||||
Hook(
|
||||
name="media_files_did_export",
|
||||
args=["count: int"],
|
||||
doc="Only used by legacy .apkg exporter. Will be deprecated in the future.",
|
||||
),
|
||||
Hook(
|
||||
name="legacy_export_progress",
|
||||
args=["progress: str"],
|
||||
doc="Temporary hook used in transition to new import/export code.",
|
||||
),
|
||||
Hook(
|
||||
name="exporters_list_created",
|
||||
args=["exporters: list[tuple[str, Any]]"],
|
||||
|
|
|
|||
|
|
@ -160,20 +160,29 @@ class ExportDialog(QDialog):
|
|||
else:
|
||||
os.unlink(file)
|
||||
|
||||
# progress handler
|
||||
def exported_media(cnt: int) -> None:
|
||||
# progress handler: old apkg exporter
|
||||
def exported_media_count(cnt: int) -> None:
|
||||
self.mw.taskman.run_on_main(
|
||||
lambda: self.mw.progress.update(
|
||||
label=tr.exporting_exported_media_file(count=cnt)
|
||||
)
|
||||
)
|
||||
|
||||
# progress handler: adaptor for new colpkg importer into old exporting screen.
|
||||
# don't rename this; there's a hack in pylib/exporting.py that assumes this
|
||||
# name
|
||||
def exported_media(progress: str) -> None:
|
||||
self.mw.taskman.run_on_main(
|
||||
lambda: self.mw.progress.update(label=progress)
|
||||
)
|
||||
|
||||
def do_export() -> None:
|
||||
self.exporter.exportInto(file)
|
||||
|
||||
def on_done(future: Future) -> None:
|
||||
self.mw.progress.finish()
|
||||
hooks.media_files_did_export.remove(exported_media)
|
||||
hooks.media_files_did_export.remove(exported_media_count)
|
||||
hooks.legacy_export_progress.remove(exported_media)
|
||||
try:
|
||||
# raises if exporter failed
|
||||
future.result()
|
||||
|
|
@ -186,7 +195,8 @@ class ExportDialog(QDialog):
|
|||
if self.isVerbatim:
|
||||
gui_hooks.collection_will_temporarily_close(self.mw.col)
|
||||
self.mw.progress.start()
|
||||
hooks.media_files_did_export.append(exported_media)
|
||||
hooks.media_files_did_export.append(exported_media_count)
|
||||
hooks.legacy_export_progress.append(exported_media)
|
||||
|
||||
self.mw.taskman.run_in_background(do_export, on_done)
|
||||
|
||||
|
|
|
|||
|
|
@ -246,7 +246,7 @@ class ApkgExporter(Exporter):
|
|||
|
||||
def export_progress_update(progress: Progress, update: ProgressUpdate) -> None:
|
||||
if not progress.HasField("exporting"):
|
||||
return None
|
||||
update.label = tr.exporting_exported_media_file(count=progress.exporting)
|
||||
return
|
||||
update.label = progress.exporting
|
||||
if update.user_wants_abort:
|
||||
update.abort = True
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ use crate::{
|
|||
backend_proto::{self as pb, export_anki_package_request::Selector},
|
||||
import_export::{
|
||||
package::{import_colpkg, NoteLog},
|
||||
ImportProgress,
|
||||
ExportProgress, ImportProgress,
|
||||
},
|
||||
prelude::*,
|
||||
search::SearchNode,
|
||||
|
|
@ -95,7 +95,7 @@ impl Backend {
|
|||
move |progress, throttle| handler.update(Progress::Import(progress), throttle)
|
||||
}
|
||||
|
||||
fn export_progress_fn(&self) -> impl FnMut(usize, bool) -> bool {
|
||||
fn export_progress_fn(&self) -> impl FnMut(ExportProgress, bool) -> bool {
|
||||
let mut handler = self.new_progress_handler();
|
||||
move |progress, throttle| handler.update(Progress::Export(progress), throttle)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ use crate::{
|
|||
backend_proto as pb,
|
||||
dbcheck::DatabaseCheckProgress,
|
||||
i18n::I18n,
|
||||
import_export::ImportProgress,
|
||||
import_export::{ExportProgress, ImportProgress},
|
||||
media::sync::MediaSyncProgress,
|
||||
sync::{FullSyncProgress, NormalSyncProgress, SyncStage},
|
||||
};
|
||||
|
|
@ -52,7 +52,7 @@ pub(super) enum Progress {
|
|||
NormalSync(NormalSyncProgress),
|
||||
DatabaseCheck(DatabaseCheckProgress),
|
||||
Import(ImportProgress),
|
||||
Export(usize),
|
||||
Export(ExportProgress),
|
||||
}
|
||||
|
||||
pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::Progress {
|
||||
|
|
@ -117,7 +117,15 @@ pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::Pr
|
|||
}
|
||||
.into(),
|
||||
),
|
||||
Progress::Export(progress) => pb::progress::Value::Exporting(progress as u32),
|
||||
Progress::Export(progress) => pb::progress::Value::Exporting(
|
||||
match progress {
|
||||
ExportProgress::File => tr.exporting_exporting_file(),
|
||||
ExportProgress::Media(n) => tr.exporting_processed_media_files(n),
|
||||
ExportProgress::Notes(n) => tr.importing_processed_notes(n),
|
||||
ExportProgress::Gathering => tr.importing_gathering(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
}
|
||||
} else {
|
||||
pb::progress::Value::None(pb::Empty {})
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ use std::collections::{HashMap, HashSet};
|
|||
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::{ExportProgress, IncrementableProgress};
|
||||
use crate::{
|
||||
decks::immediate_parent_name,
|
||||
io::filename_is_safe,
|
||||
|
|
@ -52,19 +53,25 @@ impl ExchangeData {
|
|||
col.storage.clear_searched_cards_table()
|
||||
}
|
||||
|
||||
pub(super) fn gather_media_names(&mut self) {
|
||||
pub(super) fn gather_media_names(
|
||||
&mut self,
|
||||
progress: &mut IncrementableProgress<ExportProgress>,
|
||||
) -> Result<()> {
|
||||
let mut inserter = |name: String| {
|
||||
if filename_is_safe(&name) {
|
||||
self.media_filenames.insert(name);
|
||||
}
|
||||
};
|
||||
let mut progress = progress.incrementor(ExportProgress::Notes);
|
||||
let svg_getter = svg_getter(&self.notetypes);
|
||||
for note in self.notes.iter() {
|
||||
progress.increment()?;
|
||||
gather_media_names_from_note(note, &mut inserter, &svg_getter);
|
||||
}
|
||||
for notetype in self.notetypes.iter() {
|
||||
gather_media_names_from_notetype(notetype, &mut inserter);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_scheduling_information(&mut self, col: &Collection) {
|
||||
|
|
|
|||
|
|
@ -19,6 +19,14 @@ pub enum ImportProgress {
|
|||
Notes(usize),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum ExportProgress {
|
||||
File,
|
||||
Gathering,
|
||||
Notes(usize),
|
||||
Media(usize),
|
||||
}
|
||||
|
||||
/// Wrapper around a progress function, usually passed by the [crate::backend::Backend],
|
||||
/// to make repeated calls more ergonomic.
|
||||
pub(crate) struct IncrementableProgress<P>(Box<dyn FnMut(P, bool) -> bool>);
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ use crate::{
|
|||
colpkg::export::{export_collection, MediaIter},
|
||||
Meta,
|
||||
},
|
||||
IncrementableProgress,
|
||||
ExportProgress, IncrementableProgress,
|
||||
},
|
||||
io::{atomic_rename, tempfile_in_parent_of},
|
||||
prelude::*,
|
||||
|
|
@ -33,9 +33,10 @@ impl Collection {
|
|||
with_media: bool,
|
||||
legacy: bool,
|
||||
media_fn: Option<Box<dyn FnOnce(HashSet<String>) -> MediaIter>>,
|
||||
progress_fn: impl 'static + FnMut(usize, bool) -> bool,
|
||||
progress_fn: impl 'static + FnMut(ExportProgress, bool) -> bool,
|
||||
) -> Result<usize> {
|
||||
let mut progress = IncrementableProgress::new(progress_fn);
|
||||
progress.call(ExportProgress::File)?;
|
||||
let temp_apkg = tempfile_in_parent_of(out_path.as_ref())?;
|
||||
let mut temp_col = NamedTempFile::new()?;
|
||||
let temp_col_path = temp_col
|
||||
|
|
@ -51,10 +52,12 @@ impl Collection {
|
|||
&meta,
|
||||
temp_col_path,
|
||||
search,
|
||||
&mut progress,
|
||||
with_scheduling,
|
||||
with_media,
|
||||
)?;
|
||||
|
||||
progress.call(ExportProgress::File)?;
|
||||
let media = if let Some(media_fn) = media_fn {
|
||||
media_fn(data.media_filenames)
|
||||
} else {
|
||||
|
|
@ -80,16 +83,19 @@ impl Collection {
|
|||
meta: &Meta,
|
||||
path: &str,
|
||||
search: impl TryIntoSearch,
|
||||
progress: &mut IncrementableProgress<ExportProgress>,
|
||||
with_scheduling: bool,
|
||||
with_media: bool,
|
||||
) -> Result<ExchangeData> {
|
||||
let mut data = ExchangeData::default();
|
||||
progress.call(ExportProgress::Gathering)?;
|
||||
data.gather_data(self, search, with_scheduling)?;
|
||||
if with_media {
|
||||
data.gather_media_names();
|
||||
data.gather_media_names(progress)?;
|
||||
}
|
||||
|
||||
let mut temp_col = Collection::new_minimal(path)?;
|
||||
progress.call(ExportProgress::File)?;
|
||||
temp_col.insert_data(&data)?;
|
||||
temp_col.set_creation_stamp(self.storage.creation_stamp()?)?;
|
||||
temp_col.set_creation_utc_offset(data.creation_utc_offset)?;
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ use zstd::{
|
|||
use super::super::{MediaEntries, MediaEntry, Meta, Version};
|
||||
use crate::{
|
||||
collection::CollectionBuilder,
|
||||
import_export::IncrementableProgress,
|
||||
import_export::{ExportProgress, IncrementableProgress},
|
||||
io::{atomic_rename, read_dir_files, tempfile_in_parent_of},
|
||||
media::files::filename_if_normalized,
|
||||
prelude::*,
|
||||
|
|
@ -40,9 +40,10 @@ impl Collection {
|
|||
out_path: impl AsRef<Path>,
|
||||
include_media: bool,
|
||||
legacy: bool,
|
||||
progress_fn: impl 'static + FnMut(usize, bool) -> bool,
|
||||
progress_fn: impl 'static + FnMut(ExportProgress, bool) -> bool,
|
||||
) -> Result<()> {
|
||||
let mut progress = IncrementableProgress::new(progress_fn);
|
||||
progress.call(ExportProgress::File)?;
|
||||
let colpkg_name = out_path.as_ref();
|
||||
let temp_colpkg = tempfile_in_parent_of(colpkg_name)?;
|
||||
let src_path = self.col_path.clone();
|
||||
|
|
@ -105,7 +106,7 @@ fn export_collection_file(
|
|||
media_dir: Option<PathBuf>,
|
||||
legacy: bool,
|
||||
tr: &I18n,
|
||||
progress: &mut IncrementableProgress<usize>,
|
||||
progress: &mut IncrementableProgress<ExportProgress>,
|
||||
) -> Result<()> {
|
||||
let meta = if legacy {
|
||||
Meta::new_legacy()
|
||||
|
|
@ -148,8 +149,9 @@ pub(crate) fn export_collection(
|
|||
col_size: usize,
|
||||
media: MediaIter,
|
||||
tr: &I18n,
|
||||
progress: &mut IncrementableProgress<usize>,
|
||||
progress: &mut IncrementableProgress<ExportProgress>,
|
||||
) -> Result<()> {
|
||||
progress.call(ExportProgress::File)?;
|
||||
let out_file = File::create(&out_path)?;
|
||||
let mut zip = ZipWriter::new(out_file);
|
||||
|
||||
|
|
@ -234,7 +236,7 @@ fn write_media(
|
|||
meta: &Meta,
|
||||
zip: &mut ZipWriter<File>,
|
||||
media: MediaIter,
|
||||
progress: &mut IncrementableProgress<usize>,
|
||||
progress: &mut IncrementableProgress<ExportProgress>,
|
||||
) -> Result<()> {
|
||||
let mut media_entries = vec![];
|
||||
write_media_files(meta, zip, media, &mut media_entries, progress)?;
|
||||
|
|
@ -278,10 +280,10 @@ fn write_media_files(
|
|||
zip: &mut ZipWriter<File>,
|
||||
media: MediaIter,
|
||||
media_entries: &mut Vec<MediaEntry>,
|
||||
progress: &mut IncrementableProgress<usize>,
|
||||
progress: &mut IncrementableProgress<ExportProgress>,
|
||||
) -> Result<()> {
|
||||
let mut copier = MediaCopier::new(meta);
|
||||
let mut incrementor = progress.incrementor(|u| u);
|
||||
let mut incrementor = progress.incrementor(ExportProgress::Media);
|
||||
for (index, res) in media.0.enumerate() {
|
||||
incrementor.increment()?;
|
||||
let path = res?;
|
||||
|
|
|
|||
Loading…
Reference in a new issue