mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 22:12:21 -04:00
make logging explicit, and support logging to a file
This commit is contained in:
parent
7482904735
commit
ba4fe71eba
17 changed files with 260 additions and 100 deletions
|
@ -12,6 +12,7 @@ message BackendInit {
|
||||||
string media_db_path = 3;
|
string media_db_path = 3;
|
||||||
repeated string preferred_langs = 4;
|
repeated string preferred_langs = 4;
|
||||||
string locale_folder_path = 5;
|
string locale_folder_path = 5;
|
||||||
|
string log_path = 6;
|
||||||
}
|
}
|
||||||
|
|
||||||
message I18nBackendInit {
|
message I18nBackendInit {
|
||||||
|
|
|
@ -179,7 +179,7 @@ def proto_progress_to_native(progress: pb.Progress) -> Progress:
|
||||||
|
|
||||||
class RustBackend:
|
class RustBackend:
|
||||||
def __init__(
|
def __init__(
|
||||||
self, col_path: str, media_folder_path: str, media_db_path: str
|
self, col_path: str, media_folder_path: str, media_db_path: str, log_path: str
|
||||||
) -> None:
|
) -> None:
|
||||||
ftl_folder = os.path.join(anki.lang.locale_folder, "fluent")
|
ftl_folder = os.path.join(anki.lang.locale_folder, "fluent")
|
||||||
init_msg = pb.BackendInit(
|
init_msg = pb.BackendInit(
|
||||||
|
@ -188,6 +188,7 @@ class RustBackend:
|
||||||
media_db_path=media_db_path,
|
media_db_path=media_db_path,
|
||||||
locale_folder_path=ftl_folder,
|
locale_folder_path=ftl_folder,
|
||||||
preferred_langs=[anki.lang.currentLang],
|
preferred_langs=[anki.lang.currentLang],
|
||||||
|
log_path=log_path,
|
||||||
)
|
)
|
||||||
self._backend = ankirspy.open_backend(init_msg.SerializeToString())
|
self._backend = ankirspy.open_backend(init_msg.SerializeToString())
|
||||||
self._backend.set_progress_callback(self._on_progress)
|
self._backend.set_progress_callback(self._on_progress)
|
||||||
|
@ -382,3 +383,8 @@ class I18nBackend:
|
||||||
return self._backend.translate(
|
return self._backend.translate(
|
||||||
translate_string_in(key, **kwargs).SerializeToString()
|
translate_string_in(key, **kwargs).SerializeToString()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# temporarily force logging of media handling
|
||||||
|
if "RUST_LOG" not in os.environ:
|
||||||
|
os.environ["RUST_LOG"] = "warn,anki::media=debug"
|
||||||
|
|
|
@ -33,7 +33,10 @@ def Collection(
|
||||||
"Open a new or existing collection. Path must be unicode."
|
"Open a new or existing collection. Path must be unicode."
|
||||||
assert path.endswith(".anki2")
|
assert path.endswith(".anki2")
|
||||||
(media_dir, media_db) = media_paths_from_col_path(path)
|
(media_dir, media_db) = media_paths_from_col_path(path)
|
||||||
backend = RustBackend(path, media_dir, media_db)
|
log_path = ""
|
||||||
|
if not server:
|
||||||
|
log_path = path.replace(".anki2", "2.log")
|
||||||
|
backend = RustBackend(path, media_dir, media_db, log_path)
|
||||||
path = os.path.abspath(path)
|
path = os.path.abspath(path)
|
||||||
create = not os.path.exists(path)
|
create = not os.path.exists(path)
|
||||||
if create:
|
if create:
|
||||||
|
|
|
@ -23,9 +23,7 @@ serde = "1.0.104"
|
||||||
serde_json = "1.0.45"
|
serde_json = "1.0.45"
|
||||||
tokio = "0.2.11"
|
tokio = "0.2.11"
|
||||||
serde_derive = "1.0.104"
|
serde_derive = "1.0.104"
|
||||||
env_logger = "0.7.1"
|
|
||||||
zip = "0.5.4"
|
zip = "0.5.4"
|
||||||
log = "0.4.8"
|
|
||||||
serde_tuple = "0.4.0"
|
serde_tuple = "0.4.0"
|
||||||
coarsetime = "=0.1.11"
|
coarsetime = "=0.1.11"
|
||||||
utime = "0.2.1"
|
utime = "0.2.1"
|
||||||
|
@ -34,6 +32,10 @@ unic-langid = { version = "0.8.0", features = ["macros"] }
|
||||||
fluent = { git = "https://github.com/ankitects/fluent-rs.git", branch="32bit-panic" }
|
fluent = { git = "https://github.com/ankitects/fluent-rs.git", branch="32bit-panic" }
|
||||||
intl-memoizer = { git = "https://github.com/ankitects/fluent-rs.git", branch="32bit-panic" }
|
intl-memoizer = { git = "https://github.com/ankitects/fluent-rs.git", branch="32bit-panic" }
|
||||||
num-format = "0.4.0"
|
num-format = "0.4.0"
|
||||||
|
slog = { version = "2.5.2", features = ["max_level_trace", "release_max_level_debug"] }
|
||||||
|
slog-term = "2.5.0"
|
||||||
|
slog-async = "2.4.0"
|
||||||
|
slog-envlogger = "2.2.0"
|
||||||
|
|
||||||
[target.'cfg(target_vendor="apple")'.dependencies]
|
[target.'cfg(target_vendor="apple")'.dependencies]
|
||||||
rusqlite = { version = "0.21.0", features = ["trace"] }
|
rusqlite = { version = "0.21.0", features = ["trace"] }
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use crate::backend_proto as pb;
|
|
||||||
use crate::backend_proto::backend_input::Value;
|
use crate::backend_proto::backend_input::Value;
|
||||||
use crate::backend_proto::{Empty, RenderedTemplateReplacement, SyncMediaIn};
|
use crate::backend_proto::{Empty, RenderedTemplateReplacement, SyncMediaIn};
|
||||||
use crate::err::{AnkiError, NetworkErrorKind, Result, SyncErrorKind};
|
use crate::err::{AnkiError, NetworkErrorKind, Result, SyncErrorKind};
|
||||||
use crate::i18n::{tr_args, FString, I18n};
|
use crate::i18n::{tr_args, FString, I18n};
|
||||||
use crate::latex::{extract_latex, extract_latex_expanding_clozes, ExtractedLatex};
|
use crate::latex::{extract_latex, extract_latex_expanding_clozes, ExtractedLatex};
|
||||||
|
use crate::log::{default_logger, Logger};
|
||||||
use crate::media::check::MediaChecker;
|
use crate::media::check::MediaChecker;
|
||||||
use crate::media::sync::MediaSyncProgress;
|
use crate::media::sync::MediaSyncProgress;
|
||||||
use crate::media::MediaManager;
|
use crate::media::MediaManager;
|
||||||
|
@ -17,10 +17,11 @@ use crate::template::{
|
||||||
RenderedNode,
|
RenderedNode,
|
||||||
};
|
};
|
||||||
use crate::text::{extract_av_tags, strip_av_tags, AVTag};
|
use crate::text::{extract_av_tags, strip_av_tags, AVTag};
|
||||||
|
use crate::{backend_proto as pb, log};
|
||||||
use fluent::FluentValue;
|
use fluent::FluentValue;
|
||||||
use prost::Message;
|
use prost::Message;
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::path::PathBuf;
|
use std::path::{Path, PathBuf};
|
||||||
use tokio::runtime::Runtime;
|
use tokio::runtime::Runtime;
|
||||||
|
|
||||||
pub type ProtoProgressCallback = Box<dyn Fn(Vec<u8>) -> bool + Send>;
|
pub type ProtoProgressCallback = Box<dyn Fn(Vec<u8>) -> bool + Send>;
|
||||||
|
@ -32,6 +33,7 @@ pub struct Backend {
|
||||||
media_db: String,
|
media_db: String,
|
||||||
progress_callback: Option<ProtoProgressCallback>,
|
progress_callback: Option<ProtoProgressCallback>,
|
||||||
i18n: I18n,
|
i18n: I18n,
|
||||||
|
log: Logger,
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Progress<'a> {
|
enum Progress<'a> {
|
||||||
|
@ -101,13 +103,28 @@ pub fn init_backend(init_msg: &[u8]) -> std::result::Result<Backend, String> {
|
||||||
Err(_) => return Err("couldn't decode init request".into()),
|
Err(_) => return Err("couldn't decode init request".into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let i18n = I18n::new(&input.preferred_langs, input.locale_folder_path);
|
let mut path = input.collection_path.clone();
|
||||||
|
path.push_str(".log");
|
||||||
|
|
||||||
|
let log_path = match input.log_path.as_str() {
|
||||||
|
"" => None,
|
||||||
|
path => Some(Path::new(path)),
|
||||||
|
};
|
||||||
|
let logger =
|
||||||
|
default_logger(log_path).map_err(|e| format!("Unable to open log file: {:?}", e))?;
|
||||||
|
|
||||||
|
let i18n = I18n::new(
|
||||||
|
&input.preferred_langs,
|
||||||
|
input.locale_folder_path,
|
||||||
|
log::terminal(),
|
||||||
|
);
|
||||||
|
|
||||||
match Backend::new(
|
match Backend::new(
|
||||||
&input.collection_path,
|
&input.collection_path,
|
||||||
&input.media_folder_path,
|
&input.media_folder_path,
|
||||||
&input.media_db_path,
|
&input.media_db_path,
|
||||||
i18n,
|
i18n,
|
||||||
|
logger,
|
||||||
) {
|
) {
|
||||||
Ok(backend) => Ok(backend),
|
Ok(backend) => Ok(backend),
|
||||||
Err(e) => Err(format!("{:?}", e)),
|
Err(e) => Err(format!("{:?}", e)),
|
||||||
|
@ -115,13 +132,20 @@ pub fn init_backend(init_msg: &[u8]) -> std::result::Result<Backend, String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Backend {
|
impl Backend {
|
||||||
pub fn new(col_path: &str, media_folder: &str, media_db: &str, i18n: I18n) -> Result<Backend> {
|
pub fn new(
|
||||||
|
col_path: &str,
|
||||||
|
media_folder: &str,
|
||||||
|
media_db: &str,
|
||||||
|
i18n: I18n,
|
||||||
|
log: Logger,
|
||||||
|
) -> Result<Backend> {
|
||||||
Ok(Backend {
|
Ok(Backend {
|
||||||
col_path: col_path.into(),
|
col_path: col_path.into(),
|
||||||
media_folder: media_folder.into(),
|
media_folder: media_folder.into(),
|
||||||
media_db: media_db.into(),
|
media_db: media_db.into(),
|
||||||
progress_callback: None,
|
progress_callback: None,
|
||||||
i18n,
|
i18n,
|
||||||
|
log,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -372,7 +396,7 @@ impl Backend {
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut rt = Runtime::new().unwrap();
|
let mut rt = Runtime::new().unwrap();
|
||||||
rt.block_on(mgr.sync_media(callback, &input.endpoint, &input.hkey))
|
rt.block_on(mgr.sync_media(callback, &input.endpoint, &input.hkey, self.log.clone()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_media(&self) -> Result<pb::MediaCheckOut> {
|
fn check_media(&self) -> Result<pb::MediaCheckOut> {
|
||||||
|
@ -380,7 +404,7 @@ impl Backend {
|
||||||
|progress: usize| self.fire_progress_callback(Progress::MediaCheck(progress as u32));
|
|progress: usize| self.fire_progress_callback(Progress::MediaCheck(progress as u32));
|
||||||
|
|
||||||
let mgr = MediaManager::new(&self.media_folder, &self.media_db)?;
|
let mgr = MediaManager::new(&self.media_folder, &self.media_db)?;
|
||||||
let mut checker = MediaChecker::new(&mgr, &self.col_path, callback, &self.i18n);
|
let mut checker = MediaChecker::new(&mgr, &self.col_path, callback, &self.i18n, &self.log);
|
||||||
let mut output = checker.check()?;
|
let mut output = checker.check()?;
|
||||||
|
|
||||||
let report = checker.summarize_output(&mut output);
|
let report = checker.summarize_output(&mut output);
|
||||||
|
@ -515,7 +539,9 @@ pub fn init_i18n_backend(init_msg: &[u8]) -> Result<I18nBackend> {
|
||||||
Err(_) => return Err(AnkiError::invalid_input("couldn't decode init msg")),
|
Err(_) => return Err(AnkiError::invalid_input("couldn't decode init msg")),
|
||||||
};
|
};
|
||||||
|
|
||||||
let i18n = I18n::new(&input.preferred_langs, input.locale_folder_path);
|
let log = log::terminal();
|
||||||
|
|
||||||
|
let i18n = I18n::new(&input.preferred_langs, input.locale_folder_path, log);
|
||||||
|
|
||||||
Ok(I18nBackend { i18n })
|
Ok(I18nBackend { i18n })
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,9 +2,9 @@
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use crate::err::Result;
|
use crate::err::Result;
|
||||||
|
use crate::log::{error, Logger};
|
||||||
use fluent::{FluentArgs, FluentBundle, FluentResource, FluentValue};
|
use fluent::{FluentArgs, FluentBundle, FluentResource, FluentValue};
|
||||||
use intl_memoizer::IntlLangMemoizer;
|
use intl_memoizer::IntlLangMemoizer;
|
||||||
use log::error;
|
|
||||||
use num_format::Locale;
|
use num_format::Locale;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
@ -167,10 +167,11 @@ fn get_bundle(
|
||||||
text: String,
|
text: String,
|
||||||
extra_text: String,
|
extra_text: String,
|
||||||
locales: &[LanguageIdentifier],
|
locales: &[LanguageIdentifier],
|
||||||
|
log: &Logger,
|
||||||
) -> Option<FluentBundle<FluentResource>> {
|
) -> Option<FluentBundle<FluentResource>> {
|
||||||
let res = FluentResource::try_new(text)
|
let res = FluentResource::try_new(text)
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
error!("Unable to parse translations file: {:?}", e);
|
error!(log, "Unable to parse translations file: {:?}", e);
|
||||||
})
|
})
|
||||||
.ok()?;
|
.ok()?;
|
||||||
|
|
||||||
|
@ -178,14 +179,14 @@ fn get_bundle(
|
||||||
bundle
|
bundle
|
||||||
.add_resource(res)
|
.add_resource(res)
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
error!("Duplicate key detected in translation file: {:?}", e);
|
error!(log, "Duplicate key detected in translation file: {:?}", e);
|
||||||
})
|
})
|
||||||
.ok()?;
|
.ok()?;
|
||||||
|
|
||||||
if !extra_text.is_empty() {
|
if !extra_text.is_empty() {
|
||||||
match FluentResource::try_new(extra_text) {
|
match FluentResource::try_new(extra_text) {
|
||||||
Ok(res) => bundle.add_resource_overriding(res),
|
Ok(res) => bundle.add_resource_overriding(res),
|
||||||
Err((_res, e)) => error!("Unable to parse translations file: {:?}", e),
|
Err((_res, e)) => error!(log, "Unable to parse translations file: {:?}", e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -206,12 +207,13 @@ fn get_bundle_with_extra(
|
||||||
lang: Option<&LanguageIdentifier>,
|
lang: Option<&LanguageIdentifier>,
|
||||||
ftl_folder: &Path,
|
ftl_folder: &Path,
|
||||||
locales: &[LanguageIdentifier],
|
locales: &[LanguageIdentifier],
|
||||||
|
log: &Logger,
|
||||||
) -> Option<FluentBundle<FluentResource>> {
|
) -> Option<FluentBundle<FluentResource>> {
|
||||||
let extra_text = if let Some(path) = lang_folder(lang, &ftl_folder) {
|
let extra_text = if let Some(path) = lang_folder(lang, &ftl_folder) {
|
||||||
match ftl_external_text(&path) {
|
match ftl_external_text(&path) {
|
||||||
Ok(text) => text,
|
Ok(text) => text,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Error reading external FTL files: {:?}", e);
|
error!(log, "Error reading external FTL files: {:?}", e);
|
||||||
"".into()
|
"".into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -219,16 +221,21 @@ fn get_bundle_with_extra(
|
||||||
"".into()
|
"".into()
|
||||||
};
|
};
|
||||||
|
|
||||||
get_bundle(text, extra_text, locales)
|
get_bundle(text, extra_text, locales, log)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct I18n {
|
pub struct I18n {
|
||||||
inner: Arc<Mutex<I18nInner>>,
|
inner: Arc<Mutex<I18nInner>>,
|
||||||
|
log: Logger,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl I18n {
|
impl I18n {
|
||||||
pub fn new<S: AsRef<str>, P: Into<PathBuf>>(locale_codes: &[S], ftl_folder: P) -> Self {
|
pub fn new<S: AsRef<str>, P: Into<PathBuf>>(
|
||||||
|
locale_codes: &[S],
|
||||||
|
ftl_folder: P,
|
||||||
|
log: Logger,
|
||||||
|
) -> Self {
|
||||||
let ftl_folder = ftl_folder.into();
|
let ftl_folder = ftl_folder.into();
|
||||||
|
|
||||||
let mut langs = vec![];
|
let mut langs = vec![];
|
||||||
|
@ -246,10 +253,12 @@ impl I18n {
|
||||||
for lang in &langs {
|
for lang in &langs {
|
||||||
// if the language is bundled in the binary
|
// if the language is bundled in the binary
|
||||||
if let Some(text) = ftl_localized_text(lang) {
|
if let Some(text) = ftl_localized_text(lang) {
|
||||||
if let Some(bundle) = get_bundle_with_extra(text, Some(lang), &ftl_folder, &langs) {
|
if let Some(bundle) =
|
||||||
|
get_bundle_with_extra(text, Some(lang), &ftl_folder, &langs, &log)
|
||||||
|
{
|
||||||
bundles.push(bundle);
|
bundles.push(bundle);
|
||||||
} else {
|
} else {
|
||||||
error!("Failed to create bundle for {:?}", lang.language())
|
error!(log, "Failed to create bundle for {:?}", lang.language())
|
||||||
}
|
}
|
||||||
|
|
||||||
// if English was listed, any further preferences are skipped,
|
// if English was listed, any further preferences are skipped,
|
||||||
|
@ -265,11 +274,12 @@ impl I18n {
|
||||||
|
|
||||||
// add English templates
|
// add English templates
|
||||||
let template_bundle =
|
let template_bundle =
|
||||||
get_bundle_with_extra(ftl_template_text(), None, &ftl_folder, &langs).unwrap();
|
get_bundle_with_extra(ftl_template_text(), None, &ftl_folder, &langs, &log).unwrap();
|
||||||
bundles.push(template_bundle);
|
bundles.push(template_bundle);
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
inner: Arc::new(Mutex::new(I18nInner { bundles })),
|
inner: Arc::new(Mutex::new(I18nInner { bundles })),
|
||||||
|
log,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -302,7 +312,7 @@ impl I18n {
|
||||||
let mut errs = vec![];
|
let mut errs = vec![];
|
||||||
let out = bundle.format_pattern(pat, args.as_ref(), &mut errs);
|
let out = bundle.format_pattern(pat, args.as_ref(), &mut errs);
|
||||||
if !errs.is_empty() {
|
if !errs.is_empty() {
|
||||||
error!("Error(s) in translation '{}': {:?}", key, errs);
|
error!(self.log, "Error(s) in translation '{}': {:?}", key, errs);
|
||||||
}
|
}
|
||||||
// clone so we can discard args
|
// clone so we can discard args
|
||||||
return out.to_string().into();
|
return out.to_string().into();
|
||||||
|
@ -389,6 +399,7 @@ impl NumberFormatter {
|
||||||
mod test {
|
mod test {
|
||||||
use crate::i18n::NumberFormatter;
|
use crate::i18n::NumberFormatter;
|
||||||
use crate::i18n::{tr_args, I18n};
|
use crate::i18n::{tr_args, I18n};
|
||||||
|
use crate::log;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use unic_langid::langid;
|
use unic_langid::langid;
|
||||||
|
|
||||||
|
@ -403,8 +414,10 @@ mod test {
|
||||||
let mut ftl_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
let mut ftl_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||||
ftl_dir.push("tests/support/ftl");
|
ftl_dir.push("tests/support/ftl");
|
||||||
|
|
||||||
|
let log = log::terminal();
|
||||||
|
|
||||||
// English template
|
// English template
|
||||||
let i18n = I18n::new(&["zz"], &ftl_dir);
|
let i18n = I18n::new(&["zz"], &ftl_dir, log.clone());
|
||||||
assert_eq!(i18n.tr_("valid-key", None), "a valid key");
|
assert_eq!(i18n.tr_("valid-key", None), "a valid key");
|
||||||
assert_eq!(i18n.tr_("invalid-key", None), "invalid-key");
|
assert_eq!(i18n.tr_("invalid-key", None), "invalid-key");
|
||||||
|
|
||||||
|
@ -427,7 +440,7 @@ mod test {
|
||||||
);
|
);
|
||||||
|
|
||||||
// Another language
|
// Another language
|
||||||
let i18n = I18n::new(&["ja_JP"], &ftl_dir);
|
let i18n = I18n::new(&["ja_JP"], &ftl_dir, log.clone());
|
||||||
assert_eq!(i18n.tr_("valid-key", None), "キー");
|
assert_eq!(i18n.tr_("valid-key", None), "キー");
|
||||||
assert_eq!(i18n.tr_("only-in-english", None), "not translated");
|
assert_eq!(i18n.tr_("only-in-english", None), "not translated");
|
||||||
assert_eq!(i18n.tr_("invalid-key", None), "invalid-key");
|
assert_eq!(i18n.tr_("invalid-key", None), "invalid-key");
|
||||||
|
@ -438,7 +451,7 @@ mod test {
|
||||||
);
|
);
|
||||||
|
|
||||||
// Decimal separator
|
// Decimal separator
|
||||||
let i18n = I18n::new(&["pl-PL"], &ftl_dir);
|
let i18n = I18n::new(&["pl-PL"], &ftl_dir, log.clone());
|
||||||
// falls back on English, but with Polish separators
|
// falls back on English, but with Polish separators
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
i18n.tr_("two-args-key", Some(tr_args!["one"=>1, "two"=>2.07])),
|
i18n.tr_("two-args-key", Some(tr_args!["one"=>1, "two"=>2.07])),
|
||||||
|
|
|
@ -14,6 +14,7 @@ pub mod cloze;
|
||||||
pub mod err;
|
pub mod err;
|
||||||
pub mod i18n;
|
pub mod i18n;
|
||||||
pub mod latex;
|
pub mod latex;
|
||||||
|
pub mod log;
|
||||||
pub mod media;
|
pub mod media;
|
||||||
pub mod sched;
|
pub mod sched;
|
||||||
pub mod template;
|
pub mod template;
|
||||||
|
|
43
rslib/src/log.rs
Normal file
43
rslib/src/log.rs
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
pub use slog::{debug, error, Logger};
|
||||||
|
use slog::{slog_o, Drain};
|
||||||
|
use slog_async::OverflowStrategy;
|
||||||
|
use std::fs::OpenOptions;
|
||||||
|
use std::io;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
pub(crate) fn terminal() -> Logger {
|
||||||
|
let decorator = slog_term::TermDecorator::new().build();
|
||||||
|
let drain = slog_term::FullFormat::new(decorator).build().fuse();
|
||||||
|
let drain = slog_envlogger::new(drain);
|
||||||
|
let drain = slog_async::Async::new(drain)
|
||||||
|
.chan_size(1_024)
|
||||||
|
.overflow_strategy(OverflowStrategy::Block)
|
||||||
|
.build()
|
||||||
|
.fuse();
|
||||||
|
Logger::root(drain, slog_o!())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn file(path: &Path) -> io::Result<Logger> {
|
||||||
|
let file = OpenOptions::new().create(true).append(true).open(path)?;
|
||||||
|
|
||||||
|
let decorator = slog_term::PlainSyncDecorator::new(file);
|
||||||
|
let drain = slog_term::FullFormat::new(decorator).build().fuse();
|
||||||
|
let drain = slog_envlogger::new(drain);
|
||||||
|
let drain = slog_async::Async::new(drain)
|
||||||
|
.chan_size(1_024)
|
||||||
|
.overflow_strategy(OverflowStrategy::Block)
|
||||||
|
.build()
|
||||||
|
.fuse();
|
||||||
|
Ok(Logger::root(drain, slog_o!()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a logger, logging to a file if a path was provided, otherwise terminal.
|
||||||
|
pub(crate) fn default_logger(path: Option<&Path>) -> io::Result<Logger> {
|
||||||
|
Ok(match path {
|
||||||
|
Some(path) => file(path)?,
|
||||||
|
None => terminal(),
|
||||||
|
})
|
||||||
|
}
|
|
@ -2,6 +2,7 @@
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use crate::err::{AnkiError, Result};
|
use crate::err::{AnkiError, Result};
|
||||||
|
use crate::log::{debug, Logger};
|
||||||
use crate::media::database::{MediaDatabaseContext, MediaEntry};
|
use crate::media::database::{MediaDatabaseContext, MediaEntry};
|
||||||
use crate::media::files::{
|
use crate::media::files::{
|
||||||
filename_if_normalized, mtime_as_i64, sha1_of_file, MEDIA_SYNC_FILESIZE_LIMIT,
|
filename_if_normalized, mtime_as_i64, sha1_of_file, MEDIA_SYNC_FILESIZE_LIMIT,
|
||||||
|
@ -25,17 +26,23 @@ where
|
||||||
media_folder: &'a Path,
|
media_folder: &'a Path,
|
||||||
progress_cb: F,
|
progress_cb: F,
|
||||||
checked: usize,
|
checked: usize,
|
||||||
|
log: &'a Logger,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<F> ChangeTracker<'_, F>
|
impl<F> ChangeTracker<'_, F>
|
||||||
where
|
where
|
||||||
F: FnMut(usize) -> bool,
|
F: FnMut(usize) -> bool,
|
||||||
{
|
{
|
||||||
pub(super) fn new(media_folder: &Path, progress: F) -> ChangeTracker<F> {
|
pub(super) fn new<'a>(
|
||||||
|
media_folder: &'a Path,
|
||||||
|
progress: F,
|
||||||
|
log: &'a Logger,
|
||||||
|
) -> ChangeTracker<'a, F> {
|
||||||
ChangeTracker {
|
ChangeTracker {
|
||||||
media_folder,
|
media_folder,
|
||||||
progress_cb: progress,
|
progress_cb: progress,
|
||||||
checked: 0,
|
checked: 0,
|
||||||
|
log,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,7 +60,9 @@ where
|
||||||
let dirmod = mtime_as_i64(self.media_folder)?;
|
let dirmod = mtime_as_i64(self.media_folder)?;
|
||||||
|
|
||||||
let mut meta = ctx.get_meta()?;
|
let mut meta = ctx.get_meta()?;
|
||||||
|
debug!(self.log, "begin change check"; "folder_mod" => dirmod, "db_mod" => meta.folder_mtime);
|
||||||
if dirmod == meta.folder_mtime {
|
if dirmod == meta.folder_mtime {
|
||||||
|
debug!(self.log, "skip check");
|
||||||
return Ok(());
|
return Ok(());
|
||||||
} else {
|
} else {
|
||||||
meta.folder_mtime = dirmod;
|
meta.folder_mtime = dirmod;
|
||||||
|
@ -106,6 +115,7 @@ where
|
||||||
Some(fname) => fname,
|
Some(fname) => fname,
|
||||||
None => {
|
None => {
|
||||||
// not normalized; skip it
|
// not normalized; skip it
|
||||||
|
debug!(self.log, "ignore non-normalized"; "fname"=>disk_fname);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -135,6 +145,7 @@ where
|
||||||
.as_secs() as i64;
|
.as_secs() as i64;
|
||||||
if let Some(previous_mtime) = previous_mtime {
|
if let Some(previous_mtime) = previous_mtime {
|
||||||
if previous_mtime == mtime {
|
if previous_mtime == mtime {
|
||||||
|
debug!(self.log, "mtime unchanged"; "fname"=>fname.as_ref());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -147,6 +158,10 @@ where
|
||||||
mtime,
|
mtime,
|
||||||
is_new: previous_mtime.is_none(),
|
is_new: previous_mtime.is_none(),
|
||||||
});
|
});
|
||||||
|
debug!(self.log, "added or changed";
|
||||||
|
"fname"=>fname.as_ref(),
|
||||||
|
"mtime"=>mtime,
|
||||||
|
"sha1"=>sha1.as_ref().map(|s| hex::encode(&s[0..4])));
|
||||||
|
|
||||||
self.checked += 1;
|
self.checked += 1;
|
||||||
if self.checked % 10 == 0 {
|
if self.checked % 10 == 0 {
|
||||||
|
@ -156,6 +171,9 @@ where
|
||||||
|
|
||||||
// any remaining entries from the database have been deleted
|
// any remaining entries from the database have been deleted
|
||||||
let removed: Vec<_> = mtimes.into_iter().map(|(k, _)| k).collect();
|
let removed: Vec<_> = mtimes.into_iter().map(|(k, _)| k).collect();
|
||||||
|
for f in &removed {
|
||||||
|
debug!(self.log, "db entry missing on disk"; "fname"=>f);
|
||||||
|
}
|
||||||
|
|
||||||
Ok((added_or_changed, removed))
|
Ok((added_or_changed, removed))
|
||||||
}
|
}
|
||||||
|
@ -264,7 +282,9 @@ mod test {
|
||||||
|
|
||||||
let progress_cb = |_n| true;
|
let progress_cb = |_n| true;
|
||||||
|
|
||||||
ChangeTracker::new(&mgr.media_folder, progress_cb).register_changes(&mut ctx)?;
|
let log = crate::log::terminal();
|
||||||
|
|
||||||
|
ChangeTracker::new(&mgr.media_folder, progress_cb, &log).register_changes(&mut ctx)?;
|
||||||
|
|
||||||
let mut entry = ctx.transact(|ctx| {
|
let mut entry = ctx.transact(|ctx| {
|
||||||
assert_eq!(ctx.count()?, 1);
|
assert_eq!(ctx.count()?, 1);
|
||||||
|
@ -299,7 +319,7 @@ mod test {
|
||||||
Ok(entry)
|
Ok(entry)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
ChangeTracker::new(&mgr.media_folder, progress_cb).register_changes(&mut ctx)?;
|
ChangeTracker::new(&mgr.media_folder, progress_cb, &log).register_changes(&mut ctx)?;
|
||||||
|
|
||||||
ctx.transact(|ctx| {
|
ctx.transact(|ctx| {
|
||||||
assert_eq!(ctx.count()?, 1);
|
assert_eq!(ctx.count()?, 1);
|
||||||
|
@ -332,7 +352,7 @@ mod test {
|
||||||
|
|
||||||
change_mtime(&media_dir);
|
change_mtime(&media_dir);
|
||||||
|
|
||||||
ChangeTracker::new(&mgr.media_folder, progress_cb).register_changes(&mut ctx)?;
|
ChangeTracker::new(&mgr.media_folder, progress_cb, &log).register_changes(&mut ctx)?;
|
||||||
|
|
||||||
assert_eq!(ctx.count()?, 0);
|
assert_eq!(ctx.count()?, 0);
|
||||||
assert!(!ctx.get_pending_uploads(1)?.is_empty());
|
assert!(!ctx.get_pending_uploads(1)?.is_empty());
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
use crate::err::{AnkiError, Result};
|
use crate::err::{AnkiError, Result};
|
||||||
use crate::i18n::{tr_args, tr_strs, FString, I18n};
|
use crate::i18n::{tr_args, tr_strs, FString, I18n};
|
||||||
use crate::latex::extract_latex_expanding_clozes;
|
use crate::latex::extract_latex_expanding_clozes;
|
||||||
|
use crate::log::{debug, Logger};
|
||||||
use crate::media::col::{
|
use crate::media::col::{
|
||||||
for_every_note, get_note_types, mark_collection_modified, open_or_create_collection_db,
|
for_every_note, get_note_types, mark_collection_modified, open_or_create_collection_db,
|
||||||
set_note, Note,
|
set_note, Note,
|
||||||
|
@ -15,7 +16,6 @@ use crate::media::files::{
|
||||||
use crate::text::{normalize_to_nfc, MediaRef};
|
use crate::text::{normalize_to_nfc, MediaRef};
|
||||||
use crate::{media::MediaManager, text::extract_media_refs};
|
use crate::{media::MediaManager, text::extract_media_refs};
|
||||||
use coarsetime::Instant;
|
use coarsetime::Instant;
|
||||||
use log::debug;
|
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::{borrow::Cow, fs, time};
|
use std::{borrow::Cow, fs, time};
|
||||||
|
@ -47,6 +47,7 @@ where
|
||||||
checked: usize,
|
checked: usize,
|
||||||
progress_updated: Instant,
|
progress_updated: Instant,
|
||||||
i18n: &'a I18n,
|
i18n: &'a I18n,
|
||||||
|
log: &'a Logger,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<P> MediaChecker<'_, P>
|
impl<P> MediaChecker<'_, P>
|
||||||
|
@ -58,6 +59,7 @@ where
|
||||||
col_path: &'a Path,
|
col_path: &'a Path,
|
||||||
progress_cb: P,
|
progress_cb: P,
|
||||||
i18n: &'a I18n,
|
i18n: &'a I18n,
|
||||||
|
log: &'a Logger,
|
||||||
) -> MediaChecker<'a, P> {
|
) -> MediaChecker<'a, P> {
|
||||||
MediaChecker {
|
MediaChecker {
|
||||||
mgr,
|
mgr,
|
||||||
|
@ -66,6 +68,7 @@ where
|
||||||
checked: 0,
|
checked: 0,
|
||||||
progress_updated: Instant::now(),
|
progress_updated: Instant::now(),
|
||||||
i18n,
|
i18n,
|
||||||
|
log,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -258,7 +261,7 @@ where
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
let fname = self.mgr.add_file(ctx, disk_fname, &data)?;
|
let fname = self.mgr.add_file(ctx, disk_fname, &data)?;
|
||||||
debug!("renamed {} to {}", disk_fname, fname);
|
debug!(self.log, "renamed"; "from"=>disk_fname, "to"=>&fname.as_ref());
|
||||||
assert_ne!(fname.as_ref(), disk_fname);
|
assert_ne!(fname.as_ref(), disk_fname);
|
||||||
|
|
||||||
// move the originally named file to the trash
|
// move the originally named file to the trash
|
||||||
|
@ -303,6 +306,7 @@ where
|
||||||
.unwrap_or(0);
|
.unwrap_or(0);
|
||||||
if elap_secs >= 7 * 86_400 {
|
if elap_secs >= 7 * 86_400 {
|
||||||
debug!(
|
debug!(
|
||||||
|
self.log,
|
||||||
"removing {:?} from trash, as 7 days have elapsed",
|
"removing {:?} from trash, as 7 days have elapsed",
|
||||||
dentry.path()
|
dentry.path()
|
||||||
);
|
);
|
||||||
|
@ -439,13 +443,15 @@ fn extract_latex_refs(note: &Note, seen_files: &mut HashSet<String>, svg: bool)
|
||||||
mod test {
|
mod test {
|
||||||
use crate::err::Result;
|
use crate::err::Result;
|
||||||
use crate::i18n::I18n;
|
use crate::i18n::I18n;
|
||||||
|
use crate::log;
|
||||||
|
use crate::log::Logger;
|
||||||
use crate::media::check::{MediaCheckOutput, MediaChecker};
|
use crate::media::check::{MediaCheckOutput, MediaChecker};
|
||||||
use crate::media::MediaManager;
|
use crate::media::MediaManager;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use tempfile::{tempdir, TempDir};
|
use tempfile::{tempdir, TempDir};
|
||||||
|
|
||||||
fn common_setup() -> Result<(TempDir, MediaManager, PathBuf)> {
|
fn common_setup() -> Result<(TempDir, MediaManager, PathBuf, Logger)> {
|
||||||
let dir = tempdir()?;
|
let dir = tempdir()?;
|
||||||
let media_dir = dir.path().join("media");
|
let media_dir = dir.path().join("media");
|
||||||
fs::create_dir(&media_dir)?;
|
fs::create_dir(&media_dir)?;
|
||||||
|
@ -458,12 +464,13 @@ mod test {
|
||||||
|
|
||||||
let mgr = MediaManager::new(&media_dir, media_db)?;
|
let mgr = MediaManager::new(&media_dir, media_db)?;
|
||||||
|
|
||||||
Ok((dir, mgr, col_path))
|
let log = log::terminal();
|
||||||
|
Ok((dir, mgr, col_path, log))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn media_check() -> Result<()> {
|
fn media_check() -> Result<()> {
|
||||||
let (_dir, mgr, col_path) = common_setup()?;
|
let (_dir, mgr, col_path, log) = common_setup()?;
|
||||||
|
|
||||||
// add some test files
|
// add some test files
|
||||||
fs::write(&mgr.media_folder.join("zerobytes"), "")?;
|
fs::write(&mgr.media_folder.join("zerobytes"), "")?;
|
||||||
|
@ -473,10 +480,10 @@ mod test {
|
||||||
fs::write(&mgr.media_folder.join("_under.jpg"), "foo")?;
|
fs::write(&mgr.media_folder.join("_under.jpg"), "foo")?;
|
||||||
fs::write(&mgr.media_folder.join("unused.jpg"), "foo")?;
|
fs::write(&mgr.media_folder.join("unused.jpg"), "foo")?;
|
||||||
|
|
||||||
let i18n = I18n::new(&["zz"], "dummy");
|
let i18n = I18n::new(&["zz"], "dummy", log.clone());
|
||||||
|
|
||||||
let progress = |_n| true;
|
let progress = |_n| true;
|
||||||
let mut checker = MediaChecker::new(&mgr, &col_path, progress, &i18n);
|
let mut checker = MediaChecker::new(&mgr, &col_path, progress, &i18n, &log);
|
||||||
let mut output = checker.check()?;
|
let mut output = checker.check()?;
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -522,14 +529,14 @@ Unused: unused.jpg
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn unicode_normalization() -> Result<()> {
|
fn unicode_normalization() -> Result<()> {
|
||||||
let (_dir, mgr, col_path) = common_setup()?;
|
let (_dir, mgr, col_path, log) = common_setup()?;
|
||||||
|
|
||||||
let i18n = I18n::new(&["zz"], "dummy");
|
let i18n = I18n::new(&["zz"], "dummy", log.clone());
|
||||||
|
|
||||||
fs::write(&mgr.media_folder.join("ぱぱ.jpg"), "nfd encoding")?;
|
fs::write(&mgr.media_folder.join("ぱぱ.jpg"), "nfd encoding")?;
|
||||||
|
|
||||||
let progress = |_n| true;
|
let progress = |_n| true;
|
||||||
let mut checker = MediaChecker::new(&mgr, &col_path, progress, &i18n);
|
let mut checker = MediaChecker::new(&mgr, &col_path, progress, &i18n, &log);
|
||||||
let mut output = checker.check()?;
|
let mut output = checker.check()?;
|
||||||
output.missing.sort();
|
output.missing.sort();
|
||||||
|
|
||||||
|
|
|
@ -2,13 +2,12 @@
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use crate::err::Result;
|
use crate::err::Result;
|
||||||
use log::debug;
|
|
||||||
use rusqlite::{params, Connection, OptionalExtension, Row, Statement, NO_PARAMS};
|
use rusqlite::{params, Connection, OptionalExtension, Row, Statement, NO_PARAMS};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
fn trace(s: &str) {
|
fn trace(s: &str) {
|
||||||
debug!("sql: {}", s)
|
println!("sql: {}", s)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn open_or_create<P: AsRef<Path>>(path: P) -> Result<Connection> {
|
pub(super) fn open_or_create<P: AsRef<Path>>(path: P) -> Result<Connection> {
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use crate::err::Result;
|
use crate::err::Result;
|
||||||
|
use crate::log::{debug, Logger};
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use log::debug;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use sha1::Sha1;
|
use sha1::Sha1;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
@ -364,6 +364,7 @@ pub(super) fn add_file_from_ankiweb(
|
||||||
media_folder: &Path,
|
media_folder: &Path,
|
||||||
fname: &str,
|
fname: &str,
|
||||||
data: &[u8],
|
data: &[u8],
|
||||||
|
log: &Logger,
|
||||||
) -> Result<AddedFile> {
|
) -> Result<AddedFile> {
|
||||||
let sha1 = sha1_of_data(data);
|
let sha1 = sha1_of_data(data);
|
||||||
let normalized = normalize_filename(fname);
|
let normalized = normalize_filename(fname);
|
||||||
|
@ -374,7 +375,7 @@ pub(super) fn add_file_from_ankiweb(
|
||||||
fs::write(&path, data)?;
|
fs::write(&path, data)?;
|
||||||
(None, path)
|
(None, path)
|
||||||
} else {
|
} else {
|
||||||
debug!("non-normalized filename received {}", fname);
|
debug!(log, "non-normalized filename received"; "fname"=>&fname);
|
||||||
// ankiweb sent us a non-normalized filename, so we'll rename it
|
// ankiweb sent us a non-normalized filename, so we'll rename it
|
||||||
let new_name = add_data_to_folder_uniquely(media_folder, fname, data, sha1)?;
|
let new_name = add_data_to_folder_uniquely(media_folder, fname, data, sha1)?;
|
||||||
(
|
(
|
||||||
|
|
|
@ -6,6 +6,7 @@ use crate::media::database::{open_or_create, MediaDatabaseContext, MediaEntry};
|
||||||
use crate::media::files::{add_data_to_folder_uniquely, mtime_as_i64, remove_files, sha1_of_data};
|
use crate::media::files::{add_data_to_folder_uniquely, mtime_as_i64, remove_files, sha1_of_data};
|
||||||
use crate::media::sync::{MediaSyncProgress, MediaSyncer};
|
use crate::media::sync::{MediaSyncProgress, MediaSyncer};
|
||||||
use rusqlite::Connection;
|
use rusqlite::Connection;
|
||||||
|
use slog::Logger;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
|
|
@ -11,10 +11,10 @@ use crate::media::MediaManager;
|
||||||
use crate::version;
|
use crate::version;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use coarsetime::Instant;
|
use coarsetime::Instant;
|
||||||
use log::debug;
|
|
||||||
use reqwest::{multipart, Client, Response};
|
use reqwest::{multipart, Client, Response};
|
||||||
use serde_derive::{Deserialize, Serialize};
|
use serde_derive::{Deserialize, Serialize};
|
||||||
use serde_tuple::Serialize_tuple;
|
use serde_tuple::Serialize_tuple;
|
||||||
|
use slog::{debug, Logger};
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io::{Read, Write};
|
use std::io::{Read, Write};
|
||||||
|
@ -46,6 +46,7 @@ where
|
||||||
progress: MediaSyncProgress,
|
progress: MediaSyncProgress,
|
||||||
progress_updated: Instant,
|
progress_updated: Instant,
|
||||||
endpoint: &'a str,
|
endpoint: &'a str,
|
||||||
|
log: Logger,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
|
@ -135,7 +136,12 @@ impl<P> MediaSyncer<'_, P>
|
||||||
where
|
where
|
||||||
P: Fn(&MediaSyncProgress) -> bool,
|
P: Fn(&MediaSyncProgress) -> bool,
|
||||||
{
|
{
|
||||||
pub fn new<'a>(mgr: &'a MediaManager, progress_cb: P, endpoint: &'a str) -> MediaSyncer<'a, P> {
|
pub fn new<'a>(
|
||||||
|
mgr: &'a MediaManager,
|
||||||
|
progress_cb: P,
|
||||||
|
endpoint: &'a str,
|
||||||
|
log: Logger,
|
||||||
|
) -> MediaSyncer<'a, P> {
|
||||||
let client = Client::builder()
|
let client = Client::builder()
|
||||||
.connect_timeout(time::Duration::from_secs(30))
|
.connect_timeout(time::Duration::from_secs(30))
|
||||||
.build()
|
.build()
|
||||||
|
@ -151,6 +157,7 @@ where
|
||||||
progress: Default::default(),
|
progress: Default::default(),
|
||||||
progress_updated: Instant::now(),
|
progress_updated: Instant::now(),
|
||||||
endpoint,
|
endpoint,
|
||||||
|
log,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -165,16 +172,19 @@ where
|
||||||
let meta = self.ctx.get_meta()?;
|
let meta = self.ctx.get_meta()?;
|
||||||
let client_usn = meta.last_sync_usn;
|
let client_usn = meta.last_sync_usn;
|
||||||
|
|
||||||
debug!("beginning media sync");
|
debug!(self.log, "begin media sync");
|
||||||
let (sync_key, server_usn) = self.sync_begin(hkey).await?;
|
let (sync_key, server_usn) = self.sync_begin(hkey).await?;
|
||||||
self.skey = Some(sync_key);
|
self.skey = Some(sync_key);
|
||||||
debug!("server usn was {}", server_usn);
|
debug!(self.log, "server usn was {}", server_usn);
|
||||||
|
|
||||||
let mut actions_performed = false;
|
let mut actions_performed = false;
|
||||||
|
|
||||||
// need to fetch changes from server?
|
// need to fetch changes from server?
|
||||||
if client_usn != server_usn {
|
if client_usn != server_usn {
|
||||||
debug!("differs from local usn {}, fetching changes", client_usn);
|
debug!(
|
||||||
|
self.log,
|
||||||
|
"differs from local usn {}, fetching changes", client_usn
|
||||||
|
);
|
||||||
self.fetch_changes(meta).await?;
|
self.fetch_changes(meta).await?;
|
||||||
actions_performed = true;
|
actions_performed = true;
|
||||||
}
|
}
|
||||||
|
@ -192,7 +202,7 @@ where
|
||||||
|
|
||||||
self.fire_progress_cb()?;
|
self.fire_progress_cb()?;
|
||||||
|
|
||||||
debug!("media sync complete");
|
debug!(self.log, "media sync complete");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -215,7 +225,7 @@ where
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
ChangeTracker::new(self.mgr.media_folder.as_path(), progress)
|
ChangeTracker::new(self.mgr.media_folder.as_path(), progress, &self.log)
|
||||||
.register_changes(&mut self.ctx)
|
.register_changes(&mut self.ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -242,11 +252,15 @@ where
|
||||||
async fn fetch_changes(&mut self, mut meta: MediaDatabaseMetadata) -> Result<()> {
|
async fn fetch_changes(&mut self, mut meta: MediaDatabaseMetadata) -> Result<()> {
|
||||||
let mut last_usn = meta.last_sync_usn;
|
let mut last_usn = meta.last_sync_usn;
|
||||||
loop {
|
loop {
|
||||||
debug!("fetching record batch starting from usn {}", last_usn);
|
debug!(
|
||||||
|
self.log,
|
||||||
|
"fetching record batch";
|
||||||
|
"start_usn"=>last_usn
|
||||||
|
);
|
||||||
|
|
||||||
let batch = self.fetch_record_batch(last_usn).await?;
|
let batch = self.fetch_record_batch(last_usn).await?;
|
||||||
if batch.is_empty() {
|
if batch.is_empty() {
|
||||||
debug!("empty batch, done");
|
debug!(self.log, "empty batch, done");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
last_usn = batch.last().unwrap().usn;
|
last_usn = batch.last().unwrap().usn;
|
||||||
|
@ -255,7 +269,7 @@ where
|
||||||
self.maybe_fire_progress_cb()?;
|
self.maybe_fire_progress_cb()?;
|
||||||
|
|
||||||
let (to_download, to_delete, to_remove_pending) =
|
let (to_download, to_delete, to_remove_pending) =
|
||||||
determine_required_changes(&mut self.ctx, &batch)?;
|
determine_required_changes(&mut self.ctx, &batch, &self.log)?;
|
||||||
|
|
||||||
// file removal
|
// file removal
|
||||||
self.mgr.remove_files(&mut self.ctx, to_delete.as_slice())?;
|
self.mgr.remove_files(&mut self.ctx, to_delete.as_slice())?;
|
||||||
|
@ -272,9 +286,12 @@ where
|
||||||
.map(ToOwned::to_owned)
|
.map(ToOwned::to_owned)
|
||||||
.collect();
|
.collect();
|
||||||
let zip_data = self.fetch_zip(batch.as_slice()).await?;
|
let zip_data = self.fetch_zip(batch.as_slice()).await?;
|
||||||
let download_batch =
|
let download_batch = extract_into_media_folder(
|
||||||
extract_into_media_folder(self.mgr.media_folder.as_path(), zip_data)?
|
self.mgr.media_folder.as_path(),
|
||||||
.into_iter();
|
zip_data,
|
||||||
|
&self.log,
|
||||||
|
)?
|
||||||
|
.into_iter();
|
||||||
let len = download_batch.len();
|
let len = download_batch.len();
|
||||||
dl_fnames = &dl_fnames[len..];
|
dl_fnames = &dl_fnames[len..];
|
||||||
downloaded.extend(download_batch);
|
downloaded.extend(download_batch);
|
||||||
|
@ -285,10 +302,11 @@ where
|
||||||
|
|
||||||
// then update the DB
|
// then update the DB
|
||||||
let dirmod = mtime_as_i64(&self.mgr.media_folder)?;
|
let dirmod = mtime_as_i64(&self.mgr.media_folder)?;
|
||||||
|
let log = &self.log;
|
||||||
self.ctx.transact(|ctx| {
|
self.ctx.transact(|ctx| {
|
||||||
record_clean(ctx, &to_remove_pending)?;
|
record_clean(ctx, &to_remove_pending, log)?;
|
||||||
record_removals(ctx, &to_delete)?;
|
record_removals(ctx, &to_delete, log)?;
|
||||||
record_additions(ctx, downloaded)?;
|
record_additions(ctx, downloaded, log)?;
|
||||||
|
|
||||||
// update usn
|
// update usn
|
||||||
meta.last_sync_usn = last_usn;
|
meta.last_sync_usn = last_usn;
|
||||||
|
@ -308,7 +326,7 @@ where
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let zip_data = zip_files(&mut self.ctx, &self.mgr.media_folder, &pending)?;
|
let zip_data = zip_files(&mut self.ctx, &self.mgr.media_folder, &pending, &self.log)?;
|
||||||
if zip_data.is_none() {
|
if zip_data.is_none() {
|
||||||
self.progress.checked += pending.len();
|
self.progress.checked += pending.len();
|
||||||
self.maybe_fire_progress_cb()?;
|
self.maybe_fire_progress_cb()?;
|
||||||
|
@ -334,14 +352,16 @@ where
|
||||||
.map(|e| &e.fname)
|
.map(|e| &e.fname)
|
||||||
.collect();
|
.collect();
|
||||||
let fname_cnt = fnames.len() as i32;
|
let fname_cnt = fnames.len() as i32;
|
||||||
|
let log = &self.log;
|
||||||
self.ctx.transact(|ctx| {
|
self.ctx.transact(|ctx| {
|
||||||
record_clean(ctx, fnames.as_slice())?;
|
record_clean(ctx, fnames.as_slice(), log)?;
|
||||||
let mut meta = ctx.get_meta()?;
|
let mut meta = ctx.get_meta()?;
|
||||||
if meta.last_sync_usn + fname_cnt == reply.current_usn {
|
if meta.last_sync_usn + fname_cnt == reply.current_usn {
|
||||||
meta.last_sync_usn = reply.current_usn;
|
meta.last_sync_usn = reply.current_usn;
|
||||||
ctx.set_meta(&meta)?;
|
ctx.set_meta(&meta)?;
|
||||||
} else {
|
} else {
|
||||||
debug!(
|
debug!(
|
||||||
|
log,
|
||||||
"server usn {} is not {}, skipping usn update",
|
"server usn {} is not {}, skipping usn update",
|
||||||
reply.current_usn,
|
reply.current_usn,
|
||||||
meta.last_sync_usn + fname_cnt
|
meta.last_sync_usn + fname_cnt
|
||||||
|
@ -411,7 +431,7 @@ where
|
||||||
async fn fetch_zip(&self, files: &[&String]) -> Result<Bytes> {
|
async fn fetch_zip(&self, files: &[&String]) -> Result<Bytes> {
|
||||||
let url = format!("{}downloadFiles", self.endpoint);
|
let url = format!("{}downloadFiles", self.endpoint);
|
||||||
|
|
||||||
debug!("requesting files: {:?}", files);
|
debug!(self.log, "requesting files: {:?}", files);
|
||||||
|
|
||||||
let req = ZipRequest { files };
|
let req = ZipRequest { files };
|
||||||
let resp = ankiweb_json_request(&self.client, &url, &req, self.skey()).await?;
|
let resp = ankiweb_json_request(&self.client, &url, &req, self.skey()).await?;
|
||||||
|
@ -470,6 +490,7 @@ fn determine_required_change(
|
||||||
fn determine_required_changes<'a>(
|
fn determine_required_changes<'a>(
|
||||||
ctx: &mut MediaDatabaseContext,
|
ctx: &mut MediaDatabaseContext,
|
||||||
records: &'a [ServerMediaRecord],
|
records: &'a [ServerMediaRecord],
|
||||||
|
log: &Logger,
|
||||||
) -> Result<(Vec<&'a String>, Vec<&'a String>, Vec<&'a String>)> {
|
) -> Result<(Vec<&'a String>, Vec<&'a String>, Vec<&'a String>)> {
|
||||||
let mut to_download = vec![];
|
let mut to_download = vec![];
|
||||||
let mut to_delete = vec![];
|
let mut to_delete = vec![];
|
||||||
|
@ -493,12 +514,13 @@ fn determine_required_changes<'a>(
|
||||||
|
|
||||||
let req_change = determine_required_change(&local_sha1, &remote.sha1, local_state);
|
let req_change = determine_required_change(&local_sha1, &remote.sha1, local_state);
|
||||||
debug!(
|
debug!(
|
||||||
"for {}, lsha={} rsha={} lstate={:?} -> {:?}",
|
log,
|
||||||
remote.fname,
|
"determine action";
|
||||||
local_sha1.chars().take(8).collect::<String>(),
|
"fname" => &remote.fname,
|
||||||
remote.sha1.chars().take(8).collect::<String>(),
|
"lsha" => local_sha1.chars().take(8).collect::<String>(),
|
||||||
local_state,
|
"rsha" => remote.sha1.chars().take(8).collect::<String>(),
|
||||||
req_change
|
"state" => ?local_state,
|
||||||
|
"action" => ?req_change
|
||||||
);
|
);
|
||||||
match req_change {
|
match req_change {
|
||||||
RequiredChange::Download => to_download.push(&remote.fname),
|
RequiredChange::Download => to_download.push(&remote.fname),
|
||||||
|
@ -556,7 +578,11 @@ async fn ankiweb_request(
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_into_media_folder(media_folder: &Path, zip: Bytes) -> Result<Vec<AddedFile>> {
|
fn extract_into_media_folder(
|
||||||
|
media_folder: &Path,
|
||||||
|
zip: Bytes,
|
||||||
|
log: &Logger,
|
||||||
|
) -> Result<Vec<AddedFile>> {
|
||||||
let reader = io::Cursor::new(zip);
|
let reader = io::Cursor::new(zip);
|
||||||
let mut zip = zip::ZipArchive::new(reader)?;
|
let mut zip = zip::ZipArchive::new(reader)?;
|
||||||
|
|
||||||
|
@ -578,9 +604,9 @@ fn extract_into_media_folder(media_folder: &Path, zip: Bytes) -> Result<Vec<Adde
|
||||||
let mut data = Vec::with_capacity(file.size() as usize);
|
let mut data = Vec::with_capacity(file.size() as usize);
|
||||||
file.read_to_end(&mut data)?;
|
file.read_to_end(&mut data)?;
|
||||||
|
|
||||||
debug!("writing {}", real_name);
|
debug!(log, "write"; "fname" => real_name);
|
||||||
|
|
||||||
let added = add_file_from_ankiweb(media_folder, real_name, &data)?;
|
let added = add_file_from_ankiweb(media_folder, real_name, &data, log)?;
|
||||||
|
|
||||||
output.push(added);
|
output.push(added);
|
||||||
}
|
}
|
||||||
|
@ -588,21 +614,29 @@ fn extract_into_media_folder(media_folder: &Path, zip: Bytes) -> Result<Vec<Adde
|
||||||
Ok(output)
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn record_removals(ctx: &mut MediaDatabaseContext, removals: &[&String]) -> Result<()> {
|
fn record_removals(
|
||||||
|
ctx: &mut MediaDatabaseContext,
|
||||||
|
removals: &[&String],
|
||||||
|
log: &Logger,
|
||||||
|
) -> Result<()> {
|
||||||
for &fname in removals {
|
for &fname in removals {
|
||||||
debug!("marking removed: {}", fname);
|
debug!(log, "mark removed"; "fname" => fname);
|
||||||
ctx.remove_entry(fname)?;
|
ctx.remove_entry(fname)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn record_additions(ctx: &mut MediaDatabaseContext, additions: Vec<AddedFile>) -> Result<()> {
|
fn record_additions(
|
||||||
|
ctx: &mut MediaDatabaseContext,
|
||||||
|
additions: Vec<AddedFile>,
|
||||||
|
log: &Logger,
|
||||||
|
) -> Result<()> {
|
||||||
for file in additions {
|
for file in additions {
|
||||||
if let Some(renamed) = file.renamed_from {
|
if let Some(renamed) = file.renamed_from {
|
||||||
// the file AnkiWeb sent us wasn't normalized, so we need to record
|
// the file AnkiWeb sent us wasn't normalized, so we need to record
|
||||||
// the old file name as a deletion
|
// the old file name as a deletion
|
||||||
debug!("marking non-normalized file as deleted: {}", renamed);
|
debug!(log, "marking non-normalized file as deleted: {}", renamed);
|
||||||
let mut entry = MediaEntry {
|
let mut entry = MediaEntry {
|
||||||
fname: renamed,
|
fname: renamed,
|
||||||
sha1: None,
|
sha1: None,
|
||||||
|
@ -611,7 +645,10 @@ fn record_additions(ctx: &mut MediaDatabaseContext, additions: Vec<AddedFile>) -
|
||||||
};
|
};
|
||||||
ctx.set_entry(&entry)?;
|
ctx.set_entry(&entry)?;
|
||||||
// and upload the new filename to ankiweb
|
// and upload the new filename to ankiweb
|
||||||
debug!("marking renamed file as needing upload: {}", file.fname);
|
debug!(
|
||||||
|
log,
|
||||||
|
"marking renamed file as needing upload: {}", file.fname
|
||||||
|
);
|
||||||
entry = MediaEntry {
|
entry = MediaEntry {
|
||||||
fname: file.fname.to_string(),
|
fname: file.fname.to_string(),
|
||||||
sha1: Some(file.sha1),
|
sha1: Some(file.sha1),
|
||||||
|
@ -627,10 +664,9 @@ fn record_additions(ctx: &mut MediaDatabaseContext, additions: Vec<AddedFile>) -
|
||||||
mtime: file.mtime,
|
mtime: file.mtime,
|
||||||
sync_required: false,
|
sync_required: false,
|
||||||
};
|
};
|
||||||
debug!(
|
debug!(log, "mark added";
|
||||||
"marking added: {} {}",
|
"fname" => &entry.fname,
|
||||||
entry.fname,
|
"sha1" => hex::encode(&entry.sha1.as_ref().unwrap()[0..4])
|
||||||
hex::encode(entry.sha1.as_ref().unwrap())
|
|
||||||
);
|
);
|
||||||
ctx.set_entry(&entry)?;
|
ctx.set_entry(&entry)?;
|
||||||
}
|
}
|
||||||
|
@ -639,12 +675,12 @@ fn record_additions(ctx: &mut MediaDatabaseContext, additions: Vec<AddedFile>) -
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn record_clean(ctx: &mut MediaDatabaseContext, clean: &[&String]) -> Result<()> {
|
fn record_clean(ctx: &mut MediaDatabaseContext, clean: &[&String], log: &Logger) -> Result<()> {
|
||||||
for &fname in clean {
|
for &fname in clean {
|
||||||
if let Some(mut entry) = ctx.get_entry(fname)? {
|
if let Some(mut entry) = ctx.get_entry(fname)? {
|
||||||
if entry.sync_required {
|
if entry.sync_required {
|
||||||
entry.sync_required = false;
|
entry.sync_required = false;
|
||||||
debug!("marking clean: {}", entry.fname);
|
debug!(log, "mark clean"; "fname"=>&entry.fname);
|
||||||
ctx.set_entry(&entry)?;
|
ctx.set_entry(&entry)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -657,6 +693,7 @@ fn zip_files<'a>(
|
||||||
ctx: &mut MediaDatabaseContext,
|
ctx: &mut MediaDatabaseContext,
|
||||||
media_folder: &Path,
|
media_folder: &Path,
|
||||||
files: &'a [MediaEntry],
|
files: &'a [MediaEntry],
|
||||||
|
log: &Logger,
|
||||||
) -> Result<Option<Vec<u8>>> {
|
) -> Result<Option<Vec<u8>>> {
|
||||||
let buf = vec![];
|
let buf = vec![];
|
||||||
let mut invalid_entries = vec![];
|
let mut invalid_entries = vec![];
|
||||||
|
@ -678,7 +715,7 @@ fn zip_files<'a>(
|
||||||
let file_data = match data_for_file(media_folder, &file.fname) {
|
let file_data = match data_for_file(media_folder, &file.fname) {
|
||||||
Ok(data) => data,
|
Ok(data) => data,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
debug!("error accessing {}: {}", &file.fname, e);
|
debug!(log, "error accessing {}: {}", &file.fname, e);
|
||||||
invalid_entries.push(&file.fname);
|
invalid_entries.push(&file.fname);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -687,7 +724,7 @@ fn zip_files<'a>(
|
||||||
if let Some(data) = &file_data {
|
if let Some(data) = &file_data {
|
||||||
let normalized = normalize_filename(&file.fname);
|
let normalized = normalize_filename(&file.fname);
|
||||||
if let Cow::Owned(o) = normalized {
|
if let Cow::Owned(o) = normalized {
|
||||||
debug!("media check required: {} should be {}", &file.fname, o);
|
debug!(log, "media check required: {} should be {}", &file.fname, o);
|
||||||
invalid_entries.push(&file.fname);
|
invalid_entries.push(&file.fname);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -706,9 +743,10 @@ fn zip_files<'a>(
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!(
|
debug!(
|
||||||
"will upload {} as {}",
|
log,
|
||||||
file.fname,
|
"will upload";
|
||||||
if file_data.is_some() {
|
"fname"=>&file.fname,
|
||||||
|
"kind"=>if file_data.is_some() {
|
||||||
"addition "
|
"addition "
|
||||||
} else {
|
} else {
|
||||||
"removal"
|
"removal"
|
||||||
|
@ -772,8 +810,10 @@ mod test {
|
||||||
true
|
true
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let log = crate::log::terminal();
|
||||||
|
|
||||||
let mgr = MediaManager::new(&media_dir, &media_db)?;
|
let mgr = MediaManager::new(&media_dir, &media_db)?;
|
||||||
mgr.sync_media(progress, "https://sync.ankiweb.net/msync/", hkey)
|
mgr.sync_media(progress, "https://sync.ankiweb.net/msync/", hkey, log)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -781,8 +821,6 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn sync() {
|
fn sync() {
|
||||||
env_logger::init();
|
|
||||||
|
|
||||||
let hkey = match std::env::var("TEST_HKEY") {
|
let hkey = match std::env::var("TEST_HKEY") {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
|
|
|
@ -176,13 +176,15 @@ impl Timespan {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use crate::i18n::I18n;
|
use crate::i18n::I18n;
|
||||||
|
use crate::log;
|
||||||
use crate::sched::timespan::{
|
use crate::sched::timespan::{
|
||||||
answer_button_time, learning_congrats, studied_today, time_span, MONTH,
|
answer_button_time, learning_congrats, studied_today, time_span, MONTH,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn answer_buttons() {
|
fn answer_buttons() {
|
||||||
let i18n = I18n::new(&["zz"], "");
|
let log = log::terminal();
|
||||||
|
let i18n = I18n::new(&["zz"], "", log);
|
||||||
assert_eq!(answer_button_time(30.0, &i18n), "30s");
|
assert_eq!(answer_button_time(30.0, &i18n), "30s");
|
||||||
assert_eq!(answer_button_time(70.0, &i18n), "1.2m");
|
assert_eq!(answer_button_time(70.0, &i18n), "1.2m");
|
||||||
assert_eq!(answer_button_time(1.1 * MONTH, &i18n), "1.1mo");
|
assert_eq!(answer_button_time(1.1 * MONTH, &i18n), "1.1mo");
|
||||||
|
@ -190,7 +192,8 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn time_spans() {
|
fn time_spans() {
|
||||||
let i18n = I18n::new(&["zz"], "");
|
let log = log::terminal();
|
||||||
|
let i18n = I18n::new(&["zz"], "", log);
|
||||||
assert_eq!(time_span(1.0, &i18n, false), "1 second");
|
assert_eq!(time_span(1.0, &i18n, false), "1 second");
|
||||||
assert_eq!(time_span(30.3, &i18n, false), "30 seconds");
|
assert_eq!(time_span(30.3, &i18n, false), "30 seconds");
|
||||||
assert_eq!(time_span(30.3, &i18n, true), "30.3 seconds");
|
assert_eq!(time_span(30.3, &i18n, true), "30.3 seconds");
|
||||||
|
@ -202,7 +205,8 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn combo() {
|
fn combo() {
|
||||||
// temporary test of fluent term handling
|
// temporary test of fluent term handling
|
||||||
let i18n = I18n::new(&["zz"], "");
|
let log = log::terminal();
|
||||||
|
let i18n = I18n::new(&["zz"], "", log);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
&studied_today(3, 13.0, &i18n).replace("\n", " "),
|
&studied_today(3, 13.0, &i18n).replace("\n", " "),
|
||||||
"Studied 3 cards in 13 seconds today (4.33s/card)"
|
"Studied 3 cards in 13 seconds today (4.33s/card)"
|
||||||
|
|
|
@ -6,8 +6,6 @@ authors = ["Ankitects Pty Ltd and contributors"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anki = { path = "../rslib" }
|
anki = { path = "../rslib" }
|
||||||
log = "0.4.8"
|
|
||||||
env_logger = "0.7.1"
|
|
||||||
tokio = "0.2.11"
|
tokio = "0.2.11"
|
||||||
|
|
||||||
[dependencies.pyo3]
|
[dependencies.pyo3]
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
use anki::backend::{
|
use anki::backend::{
|
||||||
init_backend, init_i18n_backend, Backend as RustBackend, I18nBackend as RustI18nBackend,
|
init_backend, init_i18n_backend, Backend as RustBackend, I18nBackend as RustI18nBackend,
|
||||||
};
|
};
|
||||||
use log::error;
|
|
||||||
use pyo3::prelude::*;
|
use pyo3::prelude::*;
|
||||||
use pyo3::types::PyBytes;
|
use pyo3::types::PyBytes;
|
||||||
use pyo3::{exceptions, wrap_pyfunction};
|
use pyo3::{exceptions, wrap_pyfunction};
|
||||||
|
@ -55,7 +54,7 @@ impl Backend {
|
||||||
let res: PyObject = match callback.call1(py, (out_obj,)) {
|
let res: PyObject = match callback.call1(py, (out_obj,)) {
|
||||||
Ok(res) => res,
|
Ok(res) => res,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("error calling callback:");
|
println!("error calling callback:");
|
||||||
e.print(py);
|
e.print(py);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -63,7 +62,7 @@ impl Backend {
|
||||||
match res.extract(py) {
|
match res.extract(py) {
|
||||||
Ok(cont) => cont,
|
Ok(cont) => cont,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("callback did not return bool: {:?}", e);
|
println!("callback did not return bool: {:?}", e);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -107,7 +106,5 @@ fn ankirspy(_py: Python, m: &PyModule) -> PyResult<()> {
|
||||||
m.add_wrapped(wrap_pyfunction!(open_backend)).unwrap();
|
m.add_wrapped(wrap_pyfunction!(open_backend)).unwrap();
|
||||||
m.add_wrapped(wrap_pyfunction!(open_i18n)).unwrap();
|
m.add_wrapped(wrap_pyfunction!(open_i18n)).unwrap();
|
||||||
|
|
||||||
env_logger::init();
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue