mirror of
https://github.com/ankitects/anki.git
synced 2025-09-22 07:52:24 -04:00
Alias [u8; 20]
with Sha1Hash
This commit is contained in:
parent
f2988f8054
commit
382d7417a8
9 changed files with 22 additions and 22 deletions
|
@ -82,7 +82,7 @@ impl Collection {
|
||||||
fn all_existing_sha1s(
|
fn all_existing_sha1s(
|
||||||
&mut self,
|
&mut self,
|
||||||
progress_fn: impl FnMut(usize) -> bool,
|
progress_fn: impl FnMut(usize) -> bool,
|
||||||
) -> Result<HashMap<String, [u8; 20]>> {
|
) -> Result<HashMap<String, Sha1Hash>> {
|
||||||
let mgr = MediaManager::new(&self.media_folder, &self.media_db)?;
|
let mgr = MediaManager::new(&self.media_folder, &self.media_db)?;
|
||||||
mgr.all_checksums(progress_fn, &self.log)
|
mgr.all_checksums(progress_fn, &self.log)
|
||||||
}
|
}
|
||||||
|
@ -91,7 +91,7 @@ impl Collection {
|
||||||
fn prepare_media(
|
fn prepare_media(
|
||||||
meta: &Meta,
|
meta: &Meta,
|
||||||
archive: &mut ZipArchive<File>,
|
archive: &mut ZipArchive<File>,
|
||||||
existing_sha1s: &HashMap<String, [u8; 20]>,
|
existing_sha1s: &HashMap<String, Sha1Hash>,
|
||||||
progress_fn: &mut impl FnMut(ImportProgress) -> Result<()>,
|
progress_fn: &mut impl FnMut(ImportProgress) -> Result<()>,
|
||||||
) -> Result<MediaUseMap> {
|
) -> Result<MediaUseMap> {
|
||||||
let mut media_map = MediaUseMap::default();
|
let mut media_map = MediaUseMap::default();
|
||||||
|
|
|
@ -297,7 +297,7 @@ impl<'n> NoteContext<'n> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Notetype {
|
impl Notetype {
|
||||||
fn schema_hash(&self) -> [u8; 20] {
|
fn schema_hash(&self) -> Sha1Hash {
|
||||||
let mut hasher = Sha1::new();
|
let mut hasher = Sha1::new();
|
||||||
for field in &self.fields {
|
for field in &self.fields {
|
||||||
hasher.update(field.name.as_bytes());
|
hasher.update(field.name.as_bytes());
|
||||||
|
|
|
@ -346,7 +346,7 @@ impl MediaCopier {
|
||||||
&mut self,
|
&mut self,
|
||||||
reader: &mut impl Read,
|
reader: &mut impl Read,
|
||||||
writer: &mut impl Write,
|
writer: &mut impl Write,
|
||||||
) -> Result<(usize, [u8; 20])> {
|
) -> Result<(usize, Sha1Hash)> {
|
||||||
let mut size = 0;
|
let mut size = 0;
|
||||||
let mut hasher = Sha1::new();
|
let mut hasher = Sha1::new();
|
||||||
let mut buf = [0; 64 * 1024];
|
let mut buf = [0; 64 * 1024];
|
||||||
|
|
|
@ -26,7 +26,7 @@ use crate::{
|
||||||
pub(super) struct SafeMediaEntry {
|
pub(super) struct SafeMediaEntry {
|
||||||
pub(super) name: String,
|
pub(super) name: String,
|
||||||
pub(super) size: u32,
|
pub(super) size: u32,
|
||||||
pub(super) sha1: [u8; 20],
|
pub(super) sha1: Sha1Hash,
|
||||||
pub(super) index: usize,
|
pub(super) index: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
use std::{collections::HashMap, path::Path, time};
|
use std::{collections::HashMap, path::Path, time};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
error::{AnkiError, Result},
|
|
||||||
log::{debug, Logger},
|
log::{debug, Logger},
|
||||||
media::{
|
media::{
|
||||||
database::{MediaDatabaseContext, MediaEntry},
|
database::{MediaDatabaseContext, MediaEntry},
|
||||||
|
@ -13,11 +12,12 @@ use crate::{
|
||||||
NONSYNCABLE_FILENAME,
|
NONSYNCABLE_FILENAME,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
prelude::*,
|
||||||
};
|
};
|
||||||
|
|
||||||
struct FilesystemEntry {
|
struct FilesystemEntry {
|
||||||
fname: String,
|
fname: String,
|
||||||
sha1: Option<[u8; 20]>,
|
sha1: Option<Sha1Hash>,
|
||||||
mtime: i64,
|
mtime: i64,
|
||||||
is_new: bool,
|
is_new: bool,
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,7 +47,7 @@ fn initial_db_setup(db: &mut Connection) -> Result<()> {
|
||||||
pub struct MediaEntry {
|
pub struct MediaEntry {
|
||||||
pub fname: String,
|
pub fname: String,
|
||||||
/// If None, file has been deleted
|
/// If None, file has been deleted
|
||||||
pub sha1: Option<[u8; 20]>,
|
pub sha1: Option<Sha1Hash>,
|
||||||
// Modification time; 0 if deleted
|
// Modification time; 0 if deleted
|
||||||
pub mtime: i64,
|
pub mtime: i64,
|
||||||
/// True if changed since last sync
|
/// True if changed since last sync
|
||||||
|
@ -223,7 +223,7 @@ delete from media where fname=?"
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns all filenames and checksums, where the checksum is not null.
|
/// Returns all filenames and checksums, where the checksum is not null.
|
||||||
pub(super) fn all_checksums(&mut self) -> Result<HashMap<String, [u8; 20]>> {
|
pub(super) fn all_checksums(&mut self) -> Result<HashMap<String, Sha1Hash>> {
|
||||||
self.db
|
self.db
|
||||||
.prepare("SELECT fname, csum FROM media WHERE csum IS NOT NULL")?
|
.prepare("SELECT fname, csum FROM media WHERE csum IS NOT NULL")?
|
||||||
.query_and_then([], row_to_name_and_checksum)?
|
.query_and_then([], row_to_name_and_checksum)?
|
||||||
|
@ -258,7 +258,7 @@ fn row_to_entry(row: &Row) -> rusqlite::Result<MediaEntry> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn row_to_name_and_checksum(row: &Row) -> Result<(String, [u8; 20])> {
|
fn row_to_name_and_checksum(row: &Row) -> Result<(String, Sha1Hash)> {
|
||||||
let file_name = row.get(0)?;
|
let file_name = row.get(0)?;
|
||||||
let sha1_str: String = row.get(1)?;
|
let sha1_str: String = row.get(1)?;
|
||||||
let mut sha1 = [0; 20];
|
let mut sha1 = [0; 20];
|
||||||
|
|
|
@ -15,10 +15,7 @@ use sha1::Sha1;
|
||||||
use unic_ucd_category::GeneralCategory;
|
use unic_ucd_category::GeneralCategory;
|
||||||
use unicode_normalization::{is_nfc, UnicodeNormalization};
|
use unicode_normalization::{is_nfc, UnicodeNormalization};
|
||||||
|
|
||||||
use crate::{
|
use crate::prelude::*;
|
||||||
error::{AnkiError, Result},
|
|
||||||
log::{debug, Logger},
|
|
||||||
};
|
|
||||||
|
|
||||||
/// The maximum length we allow a filename to be. When combined
|
/// The maximum length we allow a filename to be. When combined
|
||||||
/// with the rest of the path, the full path needs to be under ~240 chars
|
/// with the rest of the path, the full path needs to be under ~240 chars
|
||||||
|
@ -164,7 +161,7 @@ pub fn add_data_to_folder_uniquely<'a, P>(
|
||||||
folder: P,
|
folder: P,
|
||||||
desired_name: &'a str,
|
desired_name: &'a str,
|
||||||
data: &[u8],
|
data: &[u8],
|
||||||
sha1: [u8; 20],
|
sha1: Sha1Hash,
|
||||||
) -> io::Result<Cow<'a, str>>
|
) -> io::Result<Cow<'a, str>>
|
||||||
where
|
where
|
||||||
P: AsRef<Path>,
|
P: AsRef<Path>,
|
||||||
|
@ -194,7 +191,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert foo.jpg into foo-abcde12345679.jpg
|
/// Convert foo.jpg into foo-abcde12345679.jpg
|
||||||
pub(crate) fn add_hash_suffix_to_file_stem(fname: &str, hash: &[u8; 20]) -> String {
|
pub(crate) fn add_hash_suffix_to_file_stem(fname: &str, hash: &Sha1Hash) -> String {
|
||||||
// when appending a hash to make unique, it will be 40 bytes plus the hyphen.
|
// when appending a hash to make unique, it will be 40 bytes plus the hyphen.
|
||||||
let max_len = MAX_FILENAME_LENGTH - 40 - 1;
|
let max_len = MAX_FILENAME_LENGTH - 40 - 1;
|
||||||
|
|
||||||
|
@ -267,7 +264,7 @@ fn truncate_to_char_boundary(s: &str, mut max: usize) -> &str {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the SHA1 of a file if it exists, or None.
|
/// Return the SHA1 of a file if it exists, or None.
|
||||||
fn existing_file_sha1(path: &Path) -> io::Result<Option<[u8; 20]>> {
|
fn existing_file_sha1(path: &Path) -> io::Result<Option<Sha1Hash>> {
|
||||||
match sha1_of_file(path) {
|
match sha1_of_file(path) {
|
||||||
Ok(o) => Ok(Some(o)),
|
Ok(o) => Ok(Some(o)),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
@ -281,13 +278,13 @@ fn existing_file_sha1(path: &Path) -> io::Result<Option<[u8; 20]>> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the SHA1 of a file, failing if it doesn't exist.
|
/// Return the SHA1 of a file, failing if it doesn't exist.
|
||||||
pub(crate) fn sha1_of_file(path: &Path) -> io::Result<[u8; 20]> {
|
pub(crate) fn sha1_of_file(path: &Path) -> io::Result<Sha1Hash> {
|
||||||
let mut file = fs::File::open(path)?;
|
let mut file = fs::File::open(path)?;
|
||||||
sha1_of_reader(&mut file)
|
sha1_of_reader(&mut file)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the SHA1 of a stream.
|
/// Return the SHA1 of a stream.
|
||||||
pub(crate) fn sha1_of_reader(reader: &mut impl Read) -> io::Result<[u8; 20]> {
|
pub(crate) fn sha1_of_reader(reader: &mut impl Read) -> io::Result<Sha1Hash> {
|
||||||
let mut hasher = Sha1::new();
|
let mut hasher = Sha1::new();
|
||||||
let mut buf = [0; 64 * 1024];
|
let mut buf = [0; 64 * 1024];
|
||||||
loop {
|
loop {
|
||||||
|
@ -307,7 +304,7 @@ pub(crate) fn sha1_of_reader(reader: &mut impl Read) -> io::Result<[u8; 20]> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the SHA1 of provided data.
|
/// Return the SHA1 of provided data.
|
||||||
pub(crate) fn sha1_of_data(data: &[u8]) -> [u8; 20] {
|
pub(crate) fn sha1_of_data(data: &[u8]) -> Sha1Hash {
|
||||||
let mut hasher = Sha1::new();
|
let mut hasher = Sha1::new();
|
||||||
hasher.update(data);
|
hasher.update(data);
|
||||||
hasher.digest().bytes()
|
hasher.digest().bytes()
|
||||||
|
@ -376,7 +373,7 @@ pub(super) fn trash_folder(media_folder: &Path) -> Result<PathBuf> {
|
||||||
|
|
||||||
pub(super) struct AddedFile {
|
pub(super) struct AddedFile {
|
||||||
pub fname: String,
|
pub fname: String,
|
||||||
pub sha1: [u8; 20],
|
pub sha1: Sha1Hash,
|
||||||
pub mtime: i64,
|
pub mtime: i64,
|
||||||
pub renamed_from: Option<String>,
|
pub renamed_from: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,8 @@ pub mod database;
|
||||||
pub mod files;
|
pub mod files;
|
||||||
pub mod sync;
|
pub mod sync;
|
||||||
|
|
||||||
|
pub type Sha1Hash = [u8; 20];
|
||||||
|
|
||||||
pub struct MediaManager {
|
pub struct MediaManager {
|
||||||
db: Connection,
|
db: Connection,
|
||||||
media_folder: PathBuf,
|
media_folder: PathBuf,
|
||||||
|
@ -160,7 +162,7 @@ impl MediaManager {
|
||||||
&self,
|
&self,
|
||||||
progress: impl FnMut(usize) -> bool,
|
progress: impl FnMut(usize) -> bool,
|
||||||
log: &Logger,
|
log: &Logger,
|
||||||
) -> Result<HashMap<String, [u8; 20]>> {
|
) -> Result<HashMap<String, Sha1Hash>> {
|
||||||
let mut dbctx = self.dbctx();
|
let mut dbctx = self.dbctx();
|
||||||
ChangeTracker::new(&self.media_folder, progress, log).register_changes(&mut dbctx)?;
|
ChangeTracker::new(&self.media_folder, progress, log).register_changes(&mut dbctx)?;
|
||||||
dbctx.all_checksums()
|
dbctx.all_checksums()
|
||||||
|
|
|
@ -12,6 +12,7 @@ pub use crate::{
|
||||||
decks::{Deck, DeckId, DeckKind, NativeDeckName},
|
decks::{Deck, DeckId, DeckKind, NativeDeckName},
|
||||||
error::{AnkiError, Result},
|
error::{AnkiError, Result},
|
||||||
i18n::I18n,
|
i18n::I18n,
|
||||||
|
media::Sha1Hash,
|
||||||
notes::{Note, NoteId},
|
notes::{Note, NoteId},
|
||||||
notetype::{Notetype, NotetypeId},
|
notetype::{Notetype, NotetypeId},
|
||||||
ops::{Op, OpChanges, OpOutput},
|
ops::{Op, OpChanges, OpOutput},
|
||||||
|
|
Loading…
Reference in a new issue