mirror of
https://github.com/ankitects/anki.git
synced 2025-09-24 16:56:36 -04:00
Handle zstd-compressed apkg
This commit is contained in:
parent
766396809d
commit
ede9698aea
2 changed files with 28 additions and 8 deletions
|
@ -59,7 +59,12 @@ impl Context<'_> {
|
|||
pub(super) fn prepare_media(&mut self) -> Result<MediaUseMap> {
|
||||
let progress_fn = |u| (&mut self.progress_fn)(ImportProgress::MediaCheck(u)).is_ok();
|
||||
let existing_sha1s = self.target_col.all_existing_sha1s(progress_fn)?;
|
||||
prepare_media(&mut self.archive, &existing_sha1s, &mut self.progress_fn)
|
||||
prepare_media(
|
||||
&self.meta,
|
||||
&mut self.archive,
|
||||
&existing_sha1s,
|
||||
&mut self.progress_fn,
|
||||
)
|
||||
}
|
||||
|
||||
pub(super) fn copy_media(&mut self, media_map: &mut MediaUseMap) -> Result<()> {
|
||||
|
@ -84,6 +89,7 @@ impl Collection {
|
|||
}
|
||||
|
||||
fn prepare_media(
|
||||
meta: &Meta,
|
||||
archive: &mut ZipArchive<File>,
|
||||
existing_sha1s: &HashMap<String, [u8; 20]>,
|
||||
progress_fn: &mut impl FnMut(ImportProgress) -> Result<()>,
|
||||
|
@ -91,7 +97,7 @@ fn prepare_media(
|
|||
let mut media_map = MediaUseMap::default();
|
||||
let mut progress = ProgressHandler::new(|u| progress_fn(ImportProgress::MediaCheck(u)));
|
||||
|
||||
for mut entry in extract_media_entries(&Meta::new_legacy(), archive)? {
|
||||
for mut entry in extract_media_entries(meta, archive)? {
|
||||
progress.increment()?;
|
||||
|
||||
if entry.is_static() {
|
||||
|
|
|
@ -12,6 +12,7 @@ pub(crate) use notes::NoteMeta;
|
|||
use rusqlite::OptionalExtension;
|
||||
use tempfile::NamedTempFile;
|
||||
use zip::ZipArchive;
|
||||
use zstd::stream::copy_decode;
|
||||
|
||||
use crate::{
|
||||
collection::CollectionBuilder,
|
||||
|
@ -29,6 +30,7 @@ type ProgressFn = dyn FnMut(ImportProgress) -> Result<()>;
|
|||
struct Context<'a> {
|
||||
target_col: &'a mut Collection,
|
||||
archive: ZipArchive<File>,
|
||||
meta: Meta,
|
||||
data: ExchangeData,
|
||||
usn: Usn,
|
||||
progress_fn: &'a mut ProgressFn,
|
||||
|
@ -57,12 +59,18 @@ impl<'a> Context<'a> {
|
|||
target_col: &'a mut Collection,
|
||||
progress_fn: &'a mut ProgressFn,
|
||||
) -> Result<Self> {
|
||||
let data =
|
||||
ExchangeData::gather_from_archive(&mut archive, SearchNode::WholeCollection, true)?;
|
||||
let meta = Meta::from_archive(&mut archive)?;
|
||||
let data = ExchangeData::gather_from_archive(
|
||||
&mut archive,
|
||||
&meta,
|
||||
SearchNode::WholeCollection,
|
||||
true,
|
||||
)?;
|
||||
let usn = target_col.usn()?;
|
||||
Ok(Self {
|
||||
target_col,
|
||||
archive,
|
||||
meta,
|
||||
data,
|
||||
usn,
|
||||
progress_fn,
|
||||
|
@ -82,10 +90,11 @@ impl<'a> Context<'a> {
|
|||
impl ExchangeData {
|
||||
fn gather_from_archive(
|
||||
archive: &mut ZipArchive<File>,
|
||||
meta: &Meta,
|
||||
search: impl TryIntoSearch,
|
||||
with_scheduling: bool,
|
||||
) -> Result<Self> {
|
||||
let tempfile = collection_to_tempfile(archive)?;
|
||||
let tempfile = collection_to_tempfile(meta, archive)?;
|
||||
let mut col = CollectionBuilder::new(tempfile.path()).build()?;
|
||||
col.maybe_upgrade_scheduler()?;
|
||||
|
||||
|
@ -96,11 +105,16 @@ impl ExchangeData {
|
|||
}
|
||||
}
|
||||
|
||||
fn collection_to_tempfile(archive: &mut ZipArchive<File>) -> Result<NamedTempFile> {
|
||||
let meta = Meta::from_archive(archive)?;
|
||||
fn collection_to_tempfile(meta: &Meta, archive: &mut ZipArchive<File>) -> Result<NamedTempFile> {
|
||||
let mut zip_file = archive.by_name(meta.collection_filename())?;
|
||||
let mut tempfile = NamedTempFile::new()?;
|
||||
io::copy(&mut zip_file, &mut tempfile)?;
|
||||
if meta.zstd_compressed() {
|
||||
copy_decode(zip_file, &mut tempfile)
|
||||
} else {
|
||||
io::copy(&mut zip_file, &mut tempfile).map(|_| ())
|
||||
}
|
||||
.map_err(|err| AnkiError::file_io_error(err, tempfile.path()))?;
|
||||
|
||||
Ok(tempfile)
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue