mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 14:02:21 -04:00
backend init can now fail, and update media db when file is added
- Adding files inside Anki now updates the media DB, so a full file scan at sync time is no longer required if no other changes have been made. - Use a protobuf message for backend initialization, and return a string error if initialization fails.
This commit is contained in:
parent
96f0a5cc3c
commit
01470c4854
8 changed files with 142 additions and 46 deletions
|
@ -4,6 +4,12 @@ package backend_proto;
|
|||
|
||||
message Empty {}
|
||||
|
||||
message BackendInit {
|
||||
string collection_path = 1;
|
||||
string media_folder_path = 2;
|
||||
string media_db_path = 3;
|
||||
}
|
||||
|
||||
// 1-15 reserved for future use; 2047 for errors
|
||||
|
||||
message BackendInput {
|
||||
|
|
|
@ -23,8 +23,10 @@ from anki.latex import render_latex
|
|||
from anki.utils import checksum, isMac
|
||||
|
||||
|
||||
def media_folder_from_col_path(col_path: str) -> str:
|
||||
return re.sub(r"(?i)\.(anki2)$", ".media", col_path)
|
||||
def media_paths_from_col_path(col_path: str) -> Tuple[str, str]:
|
||||
media_folder = re.sub(r"(?i)\.(anki2)$", ".media", col_path)
|
||||
media_db = media_folder + ".db2"
|
||||
return (media_folder, media_db)
|
||||
|
||||
|
||||
class MediaManager:
|
||||
|
@ -45,7 +47,7 @@ class MediaManager:
|
|||
self._dir = None
|
||||
return
|
||||
# media directory
|
||||
self._dir = media_folder_from_col_path(self.col.path)
|
||||
self._dir = media_paths_from_col_path(self.col.path)[0]
|
||||
if not os.path.exists(self._dir):
|
||||
os.makedirs(self._dir)
|
||||
try:
|
||||
|
@ -63,7 +65,7 @@ class MediaManager:
|
|||
def connect(self) -> None:
|
||||
if self.col.server:
|
||||
return
|
||||
path = self.dir() + ".db2"
|
||||
path = media_paths_from_col_path(self.col.path)[1]
|
||||
create = not os.path.exists(path)
|
||||
os.chdir(self._dir)
|
||||
self.db = DB(path)
|
||||
|
|
|
@ -90,8 +90,13 @@ def proto_replacement_list_to_native(
|
|||
|
||||
|
||||
class RustBackend:
|
||||
def __init__(self, col_path: str, media_folder: str):
|
||||
self._backend = ankirspy.Backend(col_path, media_folder)
|
||||
def __init__(self, col_path: str, media_folder_path: str, media_db_path: str):
|
||||
init_msg = pb.BackendInit(
|
||||
collection_path=col_path,
|
||||
media_folder_path=media_folder_path,
|
||||
media_db_path=media_db_path,
|
||||
)
|
||||
self._backend = ankirspy.Backend(init_msg.SerializeToString())
|
||||
|
||||
def _run_command(self, input: pb.BackendInput) -> pb.BackendOutput:
|
||||
input_bytes = input.SerializeToString()
|
||||
|
|
|
@ -11,7 +11,7 @@ from anki.collection import _Collection
|
|||
from anki.consts import *
|
||||
from anki.db import DB
|
||||
from anki.lang import _
|
||||
from anki.media import media_folder_from_col_path
|
||||
from anki.media import media_paths_from_col_path
|
||||
from anki.rsbackend import RustBackend
|
||||
from anki.stdmodels import (
|
||||
addBasicModel,
|
||||
|
@ -31,8 +31,9 @@ def Collection(
|
|||
path: str, lock: bool = True, server: Optional[ServerData] = None, log: bool = False
|
||||
) -> _Collection:
|
||||
"Open a new or existing collection. Path must be unicode."
|
||||
backend = RustBackend(path, media_folder_from_col_path(path))
|
||||
assert path.endswith(".anki2")
|
||||
(media_dir, media_db) = media_paths_from_col_path(path)
|
||||
backend = RustBackend(path, media_dir, media_db)
|
||||
path = os.path.abspath(path)
|
||||
create = not os.path.exists(path)
|
||||
if create:
|
||||
|
|
|
@ -6,7 +6,7 @@ use crate::backend_proto::backend_input::Value;
|
|||
use crate::backend_proto::RenderedTemplateReplacement;
|
||||
use crate::cloze::expand_clozes_to_reveal_latex;
|
||||
use crate::err::{AnkiError, Result};
|
||||
use crate::media::files::add_data_to_folder_uniquely;
|
||||
use crate::media::MediaManager;
|
||||
use crate::sched::{local_minutes_west_for_stamp, sched_timing_today};
|
||||
use crate::template::{
|
||||
render_card, without_legacy_template_directives, FieldMap, FieldRequirements, ParsedTemplate,
|
||||
|
@ -20,7 +20,7 @@ use std::path::PathBuf;
|
|||
pub struct Backend {
|
||||
#[allow(dead_code)]
|
||||
col_path: PathBuf,
|
||||
media_folder: PathBuf,
|
||||
media_manager: Option<MediaManager>,
|
||||
}
|
||||
|
||||
/// Convert an Anki error to a protobuf error.
|
||||
|
@ -47,12 +47,33 @@ impl std::convert::From<AnkiError> for pt::backend_output::Value {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn init_backend(init_msg: &[u8]) -> std::result::Result<Backend, String> {
|
||||
let input: pt::BackendInit = match pt::BackendInit::decode(init_msg) {
|
||||
Ok(req) => req,
|
||||
Err(_) => return Err("couldn't decode init request".into()),
|
||||
};
|
||||
|
||||
match Backend::new(
|
||||
&input.collection_path,
|
||||
&input.media_folder_path,
|
||||
&input.media_db_path,
|
||||
) {
|
||||
Ok(backend) => Ok(backend),
|
||||
Err(e) => Err(format!("{:?}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
impl Backend {
|
||||
pub fn new<P: Into<PathBuf>>(col_path: P, media_folder: P) -> Backend {
|
||||
Backend {
|
||||
pub fn new(col_path: &str, media_folder: &str, media_db: &str) -> Result<Backend> {
|
||||
let media_manager = match (media_folder.is_empty(), media_db.is_empty()) {
|
||||
(false, false) => Some(MediaManager::new(media_folder, media_db)?),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
Ok(Backend {
|
||||
col_path: col_path.into(),
|
||||
media_folder: media_folder.into(),
|
||||
}
|
||||
media_manager,
|
||||
})
|
||||
}
|
||||
|
||||
/// Decode a request, process it, and return the encoded result.
|
||||
|
@ -77,7 +98,7 @@ impl Backend {
|
|||
buf
|
||||
}
|
||||
|
||||
fn run_command(&self, input: pt::BackendInput) -> pt::BackendOutput {
|
||||
fn run_command(&mut self, input: pt::BackendInput) -> pt::BackendOutput {
|
||||
let oval = if let Some(ival) = input.value {
|
||||
match self.run_command_inner(ival) {
|
||||
Ok(output) => output,
|
||||
|
@ -91,7 +112,7 @@ impl Backend {
|
|||
}
|
||||
|
||||
fn run_command_inner(
|
||||
&self,
|
||||
&mut self,
|
||||
ival: pt::backend_input::Value,
|
||||
) -> Result<pt::backend_output::Value> {
|
||||
use pt::backend_output::Value as OValue;
|
||||
|
@ -230,11 +251,13 @@ impl Backend {
|
|||
}
|
||||
}
|
||||
|
||||
fn add_file_to_media_folder(&self, input: pt::AddFileToMediaFolderIn) -> Result<String> {
|
||||
Ok(
|
||||
add_data_to_folder_uniquely(&self.media_folder, &input.desired_name, &input.data)?
|
||||
.into(),
|
||||
)
|
||||
fn add_file_to_media_folder(&mut self, input: pt::AddFileToMediaFolderIn) -> Result<String> {
|
||||
Ok(self
|
||||
.media_manager
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.add_file(&input.desired_name, &input.data)?
|
||||
.into())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,6 @@ use std::path::Path;
|
|||
pub(super) fn open_or_create<P: AsRef<Path>>(path: P) -> Result<Connection> {
|
||||
let mut db = Connection::open(path)?;
|
||||
|
||||
db.pragma_update(None, "locking_mode", &"exclusive")?;
|
||||
db.pragma_update(None, "page_size", &4096)?;
|
||||
db.pragma_update(None, "legacy_file_format", &false)?;
|
||||
db.pragma_update(None, "journal", &"wal")?;
|
||||
|
|
|
@ -93,6 +93,7 @@ pub fn add_data_to_folder_uniquely<'a, P>(
|
|||
folder: P,
|
||||
desired_name: &'a str,
|
||||
data: &[u8],
|
||||
sha1: [u8; 20],
|
||||
) -> io::Result<Cow<'a, str>>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
|
@ -108,14 +109,13 @@ where
|
|||
return Ok(normalized_name);
|
||||
}
|
||||
|
||||
let data_hash = sha1_of_data(data);
|
||||
if existing_file_hash.unwrap() == data_hash {
|
||||
if existing_file_hash.unwrap() == sha1 {
|
||||
// existing file has same checksum, nothing to do
|
||||
return Ok(normalized_name);
|
||||
}
|
||||
|
||||
// give it a unique name based on its hash
|
||||
let hashed_name = add_hash_suffix_to_file_stem(normalized_name.as_ref(), &data_hash);
|
||||
let hashed_name = add_hash_suffix_to_file_stem(normalized_name.as_ref(), &sha1);
|
||||
target_path.set_file_name(&hashed_name);
|
||||
|
||||
fs::write(&target_path, data)?;
|
||||
|
@ -233,7 +233,69 @@ struct FilesystemEntry {
|
|||
is_new: bool,
|
||||
}
|
||||
|
||||
fn mtime_as_i64<P: AsRef<Path>>(path: P) -> io::Result<i64> {
|
||||
Ok(path
|
||||
.as_ref()
|
||||
.metadata()?
|
||||
.modified()?
|
||||
.duration_since(time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs() as i64)
|
||||
}
|
||||
|
||||
impl MediaManager {
|
||||
/// Add a file to the media folder.
|
||||
///
|
||||
/// If a file with differing contents already exists, a hash will be
|
||||
/// appended to the name.
|
||||
///
|
||||
/// Also notes the file in the media database.
|
||||
pub fn add_file<'a>(&mut self, desired_name: &'a str, data: &[u8]) -> Result<Cow<'a, str>> {
|
||||
let pre_add_folder_mtime = mtime_as_i64(&self.media_folder)?;
|
||||
|
||||
// add file to folder
|
||||
let data_hash = sha1_of_data(data);
|
||||
let chosen_fname =
|
||||
add_data_to_folder_uniquely(&self.media_folder, desired_name, data, data_hash)?;
|
||||
let file_mtime = mtime_as_i64(self.media_folder.join(chosen_fname.as_ref()))?;
|
||||
let post_add_folder_mtime = mtime_as_i64(&self.media_folder)?;
|
||||
|
||||
// add to the media DB
|
||||
self.transact(|ctx| {
|
||||
let existing_entry = ctx.get_entry(&chosen_fname)?;
|
||||
let new_sha1 = Some(data_hash);
|
||||
|
||||
let entry_update_required = match existing_entry {
|
||||
Some(existing) if existing.sha1 == new_sha1 => false,
|
||||
_ => true,
|
||||
};
|
||||
|
||||
if entry_update_required {
|
||||
ctx.set_entry(&MediaEntry {
|
||||
fname: chosen_fname.to_string(),
|
||||
sha1: new_sha1,
|
||||
mtime: file_mtime,
|
||||
sync_required: true,
|
||||
})?;
|
||||
}
|
||||
|
||||
let mut meta = ctx.get_meta()?;
|
||||
if meta.folder_mtime == pre_add_folder_mtime {
|
||||
// if media db was in sync with folder prior to this add,
|
||||
// we can keep it in sync
|
||||
meta.folder_mtime = post_add_folder_mtime;
|
||||
ctx.set_meta(&meta)?;
|
||||
} else {
|
||||
// otherwise, leave it alone so that other pending changes
|
||||
// get picked up later
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok(chosen_fname)
|
||||
}
|
||||
|
||||
/// Note any added/changed/deleted files.
|
||||
///
|
||||
/// In the future, we could register files in the media DB as they
|
||||
|
@ -241,18 +303,12 @@ impl MediaManager {
|
|||
/// folder scan could be skipped.
|
||||
pub fn register_changes(&mut self) -> Result<()> {
|
||||
// folder mtime unchanged?
|
||||
let media_dir_modified = self
|
||||
.media_folder
|
||||
.metadata()?
|
||||
.modified()?
|
||||
.duration_since(time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs() as i64;
|
||||
let dirmod = mtime_as_i64(&self.media_folder)?;
|
||||
let mut meta = self.get_meta()?;
|
||||
if media_dir_modified == meta.folder_mtime {
|
||||
if dirmod == meta.folder_mtime {
|
||||
return Ok(());
|
||||
} else {
|
||||
meta.folder_mtime = media_dir_modified;
|
||||
meta.folder_mtime = dirmod;
|
||||
}
|
||||
|
||||
let mtimes = self.query(|ctx| ctx.all_mtimes())?;
|
||||
|
@ -438,20 +494,22 @@ mod test {
|
|||
let dpath = dir.path();
|
||||
|
||||
// no existing file case
|
||||
let h1 = sha1_of_data("hello".as_bytes());
|
||||
assert_eq!(
|
||||
add_data_to_folder_uniquely(dpath, "test.mp3", "hello".as_bytes()).unwrap(),
|
||||
add_data_to_folder_uniquely(dpath, "test.mp3", "hello".as_bytes(), h1).unwrap(),
|
||||
"test.mp3"
|
||||
);
|
||||
|
||||
// same contents case
|
||||
assert_eq!(
|
||||
add_data_to_folder_uniquely(dpath, "test.mp3", "hello".as_bytes()).unwrap(),
|
||||
add_data_to_folder_uniquely(dpath, "test.mp3", "hello".as_bytes(), h1).unwrap(),
|
||||
"test.mp3"
|
||||
);
|
||||
|
||||
// different contents
|
||||
let h2 = sha1_of_data("hello1".as_bytes());
|
||||
assert_eq!(
|
||||
add_data_to_folder_uniquely(dpath, "test.mp3", "hello1".as_bytes()).unwrap(),
|
||||
add_data_to_folder_uniquely(dpath, "test.mp3", "hello1".as_bytes(), h2).unwrap(),
|
||||
"test-88fdd585121a4ccb3d1540527aee53a77c77abb8.mp3"
|
||||
);
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use anki::backend::Backend as RustBackend;
|
||||
use anki::backend::{init_backend, Backend as RustBackend};
|
||||
use pyo3::prelude::*;
|
||||
use pyo3::types::PyBytes;
|
||||
use pyo3::wrap_pyfunction;
|
||||
use pyo3::{exceptions, wrap_pyfunction};
|
||||
|
||||
#[pyclass]
|
||||
struct Backend {
|
||||
|
@ -16,18 +16,20 @@ fn buildhash() -> &'static str {
|
|||
#[pymethods]
|
||||
impl Backend {
|
||||
#[new]
|
||||
fn init(obj: &PyRawObject, col_path: String, media_folder: String) {
|
||||
obj.init({
|
||||
Backend {
|
||||
backend: RustBackend::new(col_path, media_folder),
|
||||
fn init(obj: &PyRawObject, init_msg: &PyBytes) -> PyResult<()> {
|
||||
match init_backend(init_msg.as_bytes()) {
|
||||
Ok(backend) => {
|
||||
obj.init({ Backend { backend } });
|
||||
Ok(())
|
||||
}
|
||||
});
|
||||
Err(e) => Err(exceptions::Exception::py_err(e)),
|
||||
}
|
||||
}
|
||||
|
||||
fn command(&mut self, py: Python, input: &PyBytes) -> PyResult<PyObject> {
|
||||
fn command(&mut self, py: Python, input: &PyBytes) -> PyObject {
|
||||
let out_bytes = self.backend.run_command_bytes(input.as_bytes());
|
||||
let out_obj = PyBytes::new(py, &out_bytes);
|
||||
Ok(out_obj.into())
|
||||
out_obj.into()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue