mirror of
https://github.com/ankitects/anki.git
synced 2025-09-19 06:22:22 -04:00
make logging explicit, and support logging to a file
This commit is contained in:
parent
7482904735
commit
ba4fe71eba
17 changed files with 260 additions and 100 deletions
|
@ -12,6 +12,7 @@ message BackendInit {
|
|||
string media_db_path = 3;
|
||||
repeated string preferred_langs = 4;
|
||||
string locale_folder_path = 5;
|
||||
string log_path = 6;
|
||||
}
|
||||
|
||||
message I18nBackendInit {
|
||||
|
|
|
@ -179,7 +179,7 @@ def proto_progress_to_native(progress: pb.Progress) -> Progress:
|
|||
|
||||
class RustBackend:
|
||||
def __init__(
|
||||
self, col_path: str, media_folder_path: str, media_db_path: str
|
||||
self, col_path: str, media_folder_path: str, media_db_path: str, log_path: str
|
||||
) -> None:
|
||||
ftl_folder = os.path.join(anki.lang.locale_folder, "fluent")
|
||||
init_msg = pb.BackendInit(
|
||||
|
@ -188,6 +188,7 @@ class RustBackend:
|
|||
media_db_path=media_db_path,
|
||||
locale_folder_path=ftl_folder,
|
||||
preferred_langs=[anki.lang.currentLang],
|
||||
log_path=log_path,
|
||||
)
|
||||
self._backend = ankirspy.open_backend(init_msg.SerializeToString())
|
||||
self._backend.set_progress_callback(self._on_progress)
|
||||
|
@ -382,3 +383,8 @@ class I18nBackend:
|
|||
return self._backend.translate(
|
||||
translate_string_in(key, **kwargs).SerializeToString()
|
||||
)
|
||||
|
||||
|
||||
# temporarily force logging of media handling
|
||||
if "RUST_LOG" not in os.environ:
|
||||
os.environ["RUST_LOG"] = "warn,anki::media=debug"
|
||||
|
|
|
@ -33,7 +33,10 @@ def Collection(
|
|||
"Open a new or existing collection. Path must be unicode."
|
||||
assert path.endswith(".anki2")
|
||||
(media_dir, media_db) = media_paths_from_col_path(path)
|
||||
backend = RustBackend(path, media_dir, media_db)
|
||||
log_path = ""
|
||||
if not server:
|
||||
log_path = path.replace(".anki2", "2.log")
|
||||
backend = RustBackend(path, media_dir, media_db, log_path)
|
||||
path = os.path.abspath(path)
|
||||
create = not os.path.exists(path)
|
||||
if create:
|
||||
|
|
|
@ -23,9 +23,7 @@ serde = "1.0.104"
|
|||
serde_json = "1.0.45"
|
||||
tokio = "0.2.11"
|
||||
serde_derive = "1.0.104"
|
||||
env_logger = "0.7.1"
|
||||
zip = "0.5.4"
|
||||
log = "0.4.8"
|
||||
serde_tuple = "0.4.0"
|
||||
coarsetime = "=0.1.11"
|
||||
utime = "0.2.1"
|
||||
|
@ -34,6 +32,10 @@ unic-langid = { version = "0.8.0", features = ["macros"] }
|
|||
fluent = { git = "https://github.com/ankitects/fluent-rs.git", branch="32bit-panic" }
|
||||
intl-memoizer = { git = "https://github.com/ankitects/fluent-rs.git", branch="32bit-panic" }
|
||||
num-format = "0.4.0"
|
||||
slog = { version = "2.5.2", features = ["max_level_trace", "release_max_level_debug"] }
|
||||
slog-term = "2.5.0"
|
||||
slog-async = "2.4.0"
|
||||
slog-envlogger = "2.2.0"
|
||||
|
||||
[target.'cfg(target_vendor="apple")'.dependencies]
|
||||
rusqlite = { version = "0.21.0", features = ["trace"] }
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use crate::backend_proto as pb;
|
||||
use crate::backend_proto::backend_input::Value;
|
||||
use crate::backend_proto::{Empty, RenderedTemplateReplacement, SyncMediaIn};
|
||||
use crate::err::{AnkiError, NetworkErrorKind, Result, SyncErrorKind};
|
||||
use crate::i18n::{tr_args, FString, I18n};
|
||||
use crate::latex::{extract_latex, extract_latex_expanding_clozes, ExtractedLatex};
|
||||
use crate::log::{default_logger, Logger};
|
||||
use crate::media::check::MediaChecker;
|
||||
use crate::media::sync::MediaSyncProgress;
|
||||
use crate::media::MediaManager;
|
||||
|
@ -17,10 +17,11 @@ use crate::template::{
|
|||
RenderedNode,
|
||||
};
|
||||
use crate::text::{extract_av_tags, strip_av_tags, AVTag};
|
||||
use crate::{backend_proto as pb, log};
|
||||
use fluent::FluentValue;
|
||||
use prost::Message;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
pub type ProtoProgressCallback = Box<dyn Fn(Vec<u8>) -> bool + Send>;
|
||||
|
@ -32,6 +33,7 @@ pub struct Backend {
|
|||
media_db: String,
|
||||
progress_callback: Option<ProtoProgressCallback>,
|
||||
i18n: I18n,
|
||||
log: Logger,
|
||||
}
|
||||
|
||||
enum Progress<'a> {
|
||||
|
@ -101,13 +103,28 @@ pub fn init_backend(init_msg: &[u8]) -> std::result::Result<Backend, String> {
|
|||
Err(_) => return Err("couldn't decode init request".into()),
|
||||
};
|
||||
|
||||
let i18n = I18n::new(&input.preferred_langs, input.locale_folder_path);
|
||||
let mut path = input.collection_path.clone();
|
||||
path.push_str(".log");
|
||||
|
||||
let log_path = match input.log_path.as_str() {
|
||||
"" => None,
|
||||
path => Some(Path::new(path)),
|
||||
};
|
||||
let logger =
|
||||
default_logger(log_path).map_err(|e| format!("Unable to open log file: {:?}", e))?;
|
||||
|
||||
let i18n = I18n::new(
|
||||
&input.preferred_langs,
|
||||
input.locale_folder_path,
|
||||
log::terminal(),
|
||||
);
|
||||
|
||||
match Backend::new(
|
||||
&input.collection_path,
|
||||
&input.media_folder_path,
|
||||
&input.media_db_path,
|
||||
i18n,
|
||||
logger,
|
||||
) {
|
||||
Ok(backend) => Ok(backend),
|
||||
Err(e) => Err(format!("{:?}", e)),
|
||||
|
@ -115,13 +132,20 @@ pub fn init_backend(init_msg: &[u8]) -> std::result::Result<Backend, String> {
|
|||
}
|
||||
|
||||
impl Backend {
|
||||
pub fn new(col_path: &str, media_folder: &str, media_db: &str, i18n: I18n) -> Result<Backend> {
|
||||
pub fn new(
|
||||
col_path: &str,
|
||||
media_folder: &str,
|
||||
media_db: &str,
|
||||
i18n: I18n,
|
||||
log: Logger,
|
||||
) -> Result<Backend> {
|
||||
Ok(Backend {
|
||||
col_path: col_path.into(),
|
||||
media_folder: media_folder.into(),
|
||||
media_db: media_db.into(),
|
||||
progress_callback: None,
|
||||
i18n,
|
||||
log,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -372,7 +396,7 @@ impl Backend {
|
|||
};
|
||||
|
||||
let mut rt = Runtime::new().unwrap();
|
||||
rt.block_on(mgr.sync_media(callback, &input.endpoint, &input.hkey))
|
||||
rt.block_on(mgr.sync_media(callback, &input.endpoint, &input.hkey, self.log.clone()))
|
||||
}
|
||||
|
||||
fn check_media(&self) -> Result<pb::MediaCheckOut> {
|
||||
|
@ -380,7 +404,7 @@ impl Backend {
|
|||
|progress: usize| self.fire_progress_callback(Progress::MediaCheck(progress as u32));
|
||||
|
||||
let mgr = MediaManager::new(&self.media_folder, &self.media_db)?;
|
||||
let mut checker = MediaChecker::new(&mgr, &self.col_path, callback, &self.i18n);
|
||||
let mut checker = MediaChecker::new(&mgr, &self.col_path, callback, &self.i18n, &self.log);
|
||||
let mut output = checker.check()?;
|
||||
|
||||
let report = checker.summarize_output(&mut output);
|
||||
|
@ -515,7 +539,9 @@ pub fn init_i18n_backend(init_msg: &[u8]) -> Result<I18nBackend> {
|
|||
Err(_) => return Err(AnkiError::invalid_input("couldn't decode init msg")),
|
||||
};
|
||||
|
||||
let i18n = I18n::new(&input.preferred_langs, input.locale_folder_path);
|
||||
let log = log::terminal();
|
||||
|
||||
let i18n = I18n::new(&input.preferred_langs, input.locale_folder_path, log);
|
||||
|
||||
Ok(I18nBackend { i18n })
|
||||
}
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use crate::err::Result;
|
||||
use crate::log::{error, Logger};
|
||||
use fluent::{FluentArgs, FluentBundle, FluentResource, FluentValue};
|
||||
use intl_memoizer::IntlLangMemoizer;
|
||||
use log::error;
|
||||
use num_format::Locale;
|
||||
use std::borrow::Cow;
|
||||
use std::fs;
|
||||
|
@ -167,10 +167,11 @@ fn get_bundle(
|
|||
text: String,
|
||||
extra_text: String,
|
||||
locales: &[LanguageIdentifier],
|
||||
log: &Logger,
|
||||
) -> Option<FluentBundle<FluentResource>> {
|
||||
let res = FluentResource::try_new(text)
|
||||
.map_err(|e| {
|
||||
error!("Unable to parse translations file: {:?}", e);
|
||||
error!(log, "Unable to parse translations file: {:?}", e);
|
||||
})
|
||||
.ok()?;
|
||||
|
||||
|
@ -178,14 +179,14 @@ fn get_bundle(
|
|||
bundle
|
||||
.add_resource(res)
|
||||
.map_err(|e| {
|
||||
error!("Duplicate key detected in translation file: {:?}", e);
|
||||
error!(log, "Duplicate key detected in translation file: {:?}", e);
|
||||
})
|
||||
.ok()?;
|
||||
|
||||
if !extra_text.is_empty() {
|
||||
match FluentResource::try_new(extra_text) {
|
||||
Ok(res) => bundle.add_resource_overriding(res),
|
||||
Err((_res, e)) => error!("Unable to parse translations file: {:?}", e),
|
||||
Err((_res, e)) => error!(log, "Unable to parse translations file: {:?}", e),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -206,12 +207,13 @@ fn get_bundle_with_extra(
|
|||
lang: Option<&LanguageIdentifier>,
|
||||
ftl_folder: &Path,
|
||||
locales: &[LanguageIdentifier],
|
||||
log: &Logger,
|
||||
) -> Option<FluentBundle<FluentResource>> {
|
||||
let extra_text = if let Some(path) = lang_folder(lang, &ftl_folder) {
|
||||
match ftl_external_text(&path) {
|
||||
Ok(text) => text,
|
||||
Err(e) => {
|
||||
error!("Error reading external FTL files: {:?}", e);
|
||||
error!(log, "Error reading external FTL files: {:?}", e);
|
||||
"".into()
|
||||
}
|
||||
}
|
||||
|
@ -219,16 +221,21 @@ fn get_bundle_with_extra(
|
|||
"".into()
|
||||
};
|
||||
|
||||
get_bundle(text, extra_text, locales)
|
||||
get_bundle(text, extra_text, locales, log)
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct I18n {
|
||||
inner: Arc<Mutex<I18nInner>>,
|
||||
log: Logger,
|
||||
}
|
||||
|
||||
impl I18n {
|
||||
pub fn new<S: AsRef<str>, P: Into<PathBuf>>(locale_codes: &[S], ftl_folder: P) -> Self {
|
||||
pub fn new<S: AsRef<str>, P: Into<PathBuf>>(
|
||||
locale_codes: &[S],
|
||||
ftl_folder: P,
|
||||
log: Logger,
|
||||
) -> Self {
|
||||
let ftl_folder = ftl_folder.into();
|
||||
|
||||
let mut langs = vec![];
|
||||
|
@ -246,10 +253,12 @@ impl I18n {
|
|||
for lang in &langs {
|
||||
// if the language is bundled in the binary
|
||||
if let Some(text) = ftl_localized_text(lang) {
|
||||
if let Some(bundle) = get_bundle_with_extra(text, Some(lang), &ftl_folder, &langs) {
|
||||
if let Some(bundle) =
|
||||
get_bundle_with_extra(text, Some(lang), &ftl_folder, &langs, &log)
|
||||
{
|
||||
bundles.push(bundle);
|
||||
} else {
|
||||
error!("Failed to create bundle for {:?}", lang.language())
|
||||
error!(log, "Failed to create bundle for {:?}", lang.language())
|
||||
}
|
||||
|
||||
// if English was listed, any further preferences are skipped,
|
||||
|
@ -265,11 +274,12 @@ impl I18n {
|
|||
|
||||
// add English templates
|
||||
let template_bundle =
|
||||
get_bundle_with_extra(ftl_template_text(), None, &ftl_folder, &langs).unwrap();
|
||||
get_bundle_with_extra(ftl_template_text(), None, &ftl_folder, &langs, &log).unwrap();
|
||||
bundles.push(template_bundle);
|
||||
|
||||
Self {
|
||||
inner: Arc::new(Mutex::new(I18nInner { bundles })),
|
||||
log,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -302,7 +312,7 @@ impl I18n {
|
|||
let mut errs = vec![];
|
||||
let out = bundle.format_pattern(pat, args.as_ref(), &mut errs);
|
||||
if !errs.is_empty() {
|
||||
error!("Error(s) in translation '{}': {:?}", key, errs);
|
||||
error!(self.log, "Error(s) in translation '{}': {:?}", key, errs);
|
||||
}
|
||||
// clone so we can discard args
|
||||
return out.to_string().into();
|
||||
|
@ -389,6 +399,7 @@ impl NumberFormatter {
|
|||
mod test {
|
||||
use crate::i18n::NumberFormatter;
|
||||
use crate::i18n::{tr_args, I18n};
|
||||
use crate::log;
|
||||
use std::path::PathBuf;
|
||||
use unic_langid::langid;
|
||||
|
||||
|
@ -403,8 +414,10 @@ mod test {
|
|||
let mut ftl_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
ftl_dir.push("tests/support/ftl");
|
||||
|
||||
let log = log::terminal();
|
||||
|
||||
// English template
|
||||
let i18n = I18n::new(&["zz"], &ftl_dir);
|
||||
let i18n = I18n::new(&["zz"], &ftl_dir, log.clone());
|
||||
assert_eq!(i18n.tr_("valid-key", None), "a valid key");
|
||||
assert_eq!(i18n.tr_("invalid-key", None), "invalid-key");
|
||||
|
||||
|
@ -427,7 +440,7 @@ mod test {
|
|||
);
|
||||
|
||||
// Another language
|
||||
let i18n = I18n::new(&["ja_JP"], &ftl_dir);
|
||||
let i18n = I18n::new(&["ja_JP"], &ftl_dir, log.clone());
|
||||
assert_eq!(i18n.tr_("valid-key", None), "キー");
|
||||
assert_eq!(i18n.tr_("only-in-english", None), "not translated");
|
||||
assert_eq!(i18n.tr_("invalid-key", None), "invalid-key");
|
||||
|
@ -438,7 +451,7 @@ mod test {
|
|||
);
|
||||
|
||||
// Decimal separator
|
||||
let i18n = I18n::new(&["pl-PL"], &ftl_dir);
|
||||
let i18n = I18n::new(&["pl-PL"], &ftl_dir, log.clone());
|
||||
// falls back on English, but with Polish separators
|
||||
assert_eq!(
|
||||
i18n.tr_("two-args-key", Some(tr_args!["one"=>1, "two"=>2.07])),
|
||||
|
|
|
@ -14,6 +14,7 @@ pub mod cloze;
|
|||
pub mod err;
|
||||
pub mod i18n;
|
||||
pub mod latex;
|
||||
pub mod log;
|
||||
pub mod media;
|
||||
pub mod sched;
|
||||
pub mod template;
|
||||
|
|
43
rslib/src/log.rs
Normal file
43
rslib/src/log.rs
Normal file
|
@ -0,0 +1,43 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
pub use slog::{debug, error, Logger};
|
||||
use slog::{slog_o, Drain};
|
||||
use slog_async::OverflowStrategy;
|
||||
use std::fs::OpenOptions;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
|
||||
pub(crate) fn terminal() -> Logger {
|
||||
let decorator = slog_term::TermDecorator::new().build();
|
||||
let drain = slog_term::FullFormat::new(decorator).build().fuse();
|
||||
let drain = slog_envlogger::new(drain);
|
||||
let drain = slog_async::Async::new(drain)
|
||||
.chan_size(1_024)
|
||||
.overflow_strategy(OverflowStrategy::Block)
|
||||
.build()
|
||||
.fuse();
|
||||
Logger::root(drain, slog_o!())
|
||||
}
|
||||
|
||||
fn file(path: &Path) -> io::Result<Logger> {
|
||||
let file = OpenOptions::new().create(true).append(true).open(path)?;
|
||||
|
||||
let decorator = slog_term::PlainSyncDecorator::new(file);
|
||||
let drain = slog_term::FullFormat::new(decorator).build().fuse();
|
||||
let drain = slog_envlogger::new(drain);
|
||||
let drain = slog_async::Async::new(drain)
|
||||
.chan_size(1_024)
|
||||
.overflow_strategy(OverflowStrategy::Block)
|
||||
.build()
|
||||
.fuse();
|
||||
Ok(Logger::root(drain, slog_o!()))
|
||||
}
|
||||
|
||||
/// Get a logger, logging to a file if a path was provided, otherwise terminal.
|
||||
pub(crate) fn default_logger(path: Option<&Path>) -> io::Result<Logger> {
|
||||
Ok(match path {
|
||||
Some(path) => file(path)?,
|
||||
None => terminal(),
|
||||
})
|
||||
}
|
|
@ -2,6 +2,7 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use crate::err::{AnkiError, Result};
|
||||
use crate::log::{debug, Logger};
|
||||
use crate::media::database::{MediaDatabaseContext, MediaEntry};
|
||||
use crate::media::files::{
|
||||
filename_if_normalized, mtime_as_i64, sha1_of_file, MEDIA_SYNC_FILESIZE_LIMIT,
|
||||
|
@ -25,17 +26,23 @@ where
|
|||
media_folder: &'a Path,
|
||||
progress_cb: F,
|
||||
checked: usize,
|
||||
log: &'a Logger,
|
||||
}
|
||||
|
||||
impl<F> ChangeTracker<'_, F>
|
||||
where
|
||||
F: FnMut(usize) -> bool,
|
||||
{
|
||||
pub(super) fn new(media_folder: &Path, progress: F) -> ChangeTracker<F> {
|
||||
pub(super) fn new<'a>(
|
||||
media_folder: &'a Path,
|
||||
progress: F,
|
||||
log: &'a Logger,
|
||||
) -> ChangeTracker<'a, F> {
|
||||
ChangeTracker {
|
||||
media_folder,
|
||||
progress_cb: progress,
|
||||
checked: 0,
|
||||
log,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -53,7 +60,9 @@ where
|
|||
let dirmod = mtime_as_i64(self.media_folder)?;
|
||||
|
||||
let mut meta = ctx.get_meta()?;
|
||||
debug!(self.log, "begin change check"; "folder_mod" => dirmod, "db_mod" => meta.folder_mtime);
|
||||
if dirmod == meta.folder_mtime {
|
||||
debug!(self.log, "skip check");
|
||||
return Ok(());
|
||||
} else {
|
||||
meta.folder_mtime = dirmod;
|
||||
|
@ -106,6 +115,7 @@ where
|
|||
Some(fname) => fname,
|
||||
None => {
|
||||
// not normalized; skip it
|
||||
debug!(self.log, "ignore non-normalized"; "fname"=>disk_fname);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
@ -135,6 +145,7 @@ where
|
|||
.as_secs() as i64;
|
||||
if let Some(previous_mtime) = previous_mtime {
|
||||
if previous_mtime == mtime {
|
||||
debug!(self.log, "mtime unchanged"; "fname"=>fname.as_ref());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -147,6 +158,10 @@ where
|
|||
mtime,
|
||||
is_new: previous_mtime.is_none(),
|
||||
});
|
||||
debug!(self.log, "added or changed";
|
||||
"fname"=>fname.as_ref(),
|
||||
"mtime"=>mtime,
|
||||
"sha1"=>sha1.as_ref().map(|s| hex::encode(&s[0..4])));
|
||||
|
||||
self.checked += 1;
|
||||
if self.checked % 10 == 0 {
|
||||
|
@ -156,6 +171,9 @@ where
|
|||
|
||||
// any remaining entries from the database have been deleted
|
||||
let removed: Vec<_> = mtimes.into_iter().map(|(k, _)| k).collect();
|
||||
for f in &removed {
|
||||
debug!(self.log, "db entry missing on disk"; "fname"=>f);
|
||||
}
|
||||
|
||||
Ok((added_or_changed, removed))
|
||||
}
|
||||
|
@ -264,7 +282,9 @@ mod test {
|
|||
|
||||
let progress_cb = |_n| true;
|
||||
|
||||
ChangeTracker::new(&mgr.media_folder, progress_cb).register_changes(&mut ctx)?;
|
||||
let log = crate::log::terminal();
|
||||
|
||||
ChangeTracker::new(&mgr.media_folder, progress_cb, &log).register_changes(&mut ctx)?;
|
||||
|
||||
let mut entry = ctx.transact(|ctx| {
|
||||
assert_eq!(ctx.count()?, 1);
|
||||
|
@ -299,7 +319,7 @@ mod test {
|
|||
Ok(entry)
|
||||
})?;
|
||||
|
||||
ChangeTracker::new(&mgr.media_folder, progress_cb).register_changes(&mut ctx)?;
|
||||
ChangeTracker::new(&mgr.media_folder, progress_cb, &log).register_changes(&mut ctx)?;
|
||||
|
||||
ctx.transact(|ctx| {
|
||||
assert_eq!(ctx.count()?, 1);
|
||||
|
@ -332,7 +352,7 @@ mod test {
|
|||
|
||||
change_mtime(&media_dir);
|
||||
|
||||
ChangeTracker::new(&mgr.media_folder, progress_cb).register_changes(&mut ctx)?;
|
||||
ChangeTracker::new(&mgr.media_folder, progress_cb, &log).register_changes(&mut ctx)?;
|
||||
|
||||
assert_eq!(ctx.count()?, 0);
|
||||
assert!(!ctx.get_pending_uploads(1)?.is_empty());
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
use crate::err::{AnkiError, Result};
|
||||
use crate::i18n::{tr_args, tr_strs, FString, I18n};
|
||||
use crate::latex::extract_latex_expanding_clozes;
|
||||
use crate::log::{debug, Logger};
|
||||
use crate::media::col::{
|
||||
for_every_note, get_note_types, mark_collection_modified, open_or_create_collection_db,
|
||||
set_note, Note,
|
||||
|
@ -15,7 +16,6 @@ use crate::media::files::{
|
|||
use crate::text::{normalize_to_nfc, MediaRef};
|
||||
use crate::{media::MediaManager, text::extract_media_refs};
|
||||
use coarsetime::Instant;
|
||||
use log::debug;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::Path;
|
||||
use std::{borrow::Cow, fs, time};
|
||||
|
@ -47,6 +47,7 @@ where
|
|||
checked: usize,
|
||||
progress_updated: Instant,
|
||||
i18n: &'a I18n,
|
||||
log: &'a Logger,
|
||||
}
|
||||
|
||||
impl<P> MediaChecker<'_, P>
|
||||
|
@ -58,6 +59,7 @@ where
|
|||
col_path: &'a Path,
|
||||
progress_cb: P,
|
||||
i18n: &'a I18n,
|
||||
log: &'a Logger,
|
||||
) -> MediaChecker<'a, P> {
|
||||
MediaChecker {
|
||||
mgr,
|
||||
|
@ -66,6 +68,7 @@ where
|
|||
checked: 0,
|
||||
progress_updated: Instant::now(),
|
||||
i18n,
|
||||
log,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -258,7 +261,7 @@ where
|
|||
}
|
||||
})?;
|
||||
let fname = self.mgr.add_file(ctx, disk_fname, &data)?;
|
||||
debug!("renamed {} to {}", disk_fname, fname);
|
||||
debug!(self.log, "renamed"; "from"=>disk_fname, "to"=>&fname.as_ref());
|
||||
assert_ne!(fname.as_ref(), disk_fname);
|
||||
|
||||
// move the originally named file to the trash
|
||||
|
@ -303,6 +306,7 @@ where
|
|||
.unwrap_or(0);
|
||||
if elap_secs >= 7 * 86_400 {
|
||||
debug!(
|
||||
self.log,
|
||||
"removing {:?} from trash, as 7 days have elapsed",
|
||||
dentry.path()
|
||||
);
|
||||
|
@ -439,13 +443,15 @@ fn extract_latex_refs(note: &Note, seen_files: &mut HashSet<String>, svg: bool)
|
|||
mod test {
|
||||
use crate::err::Result;
|
||||
use crate::i18n::I18n;
|
||||
use crate::log;
|
||||
use crate::log::Logger;
|
||||
use crate::media::check::{MediaCheckOutput, MediaChecker};
|
||||
use crate::media::MediaManager;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::{tempdir, TempDir};
|
||||
|
||||
fn common_setup() -> Result<(TempDir, MediaManager, PathBuf)> {
|
||||
fn common_setup() -> Result<(TempDir, MediaManager, PathBuf, Logger)> {
|
||||
let dir = tempdir()?;
|
||||
let media_dir = dir.path().join("media");
|
||||
fs::create_dir(&media_dir)?;
|
||||
|
@ -458,12 +464,13 @@ mod test {
|
|||
|
||||
let mgr = MediaManager::new(&media_dir, media_db)?;
|
||||
|
||||
Ok((dir, mgr, col_path))
|
||||
let log = log::terminal();
|
||||
Ok((dir, mgr, col_path, log))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn media_check() -> Result<()> {
|
||||
let (_dir, mgr, col_path) = common_setup()?;
|
||||
let (_dir, mgr, col_path, log) = common_setup()?;
|
||||
|
||||
// add some test files
|
||||
fs::write(&mgr.media_folder.join("zerobytes"), "")?;
|
||||
|
@ -473,10 +480,10 @@ mod test {
|
|||
fs::write(&mgr.media_folder.join("_under.jpg"), "foo")?;
|
||||
fs::write(&mgr.media_folder.join("unused.jpg"), "foo")?;
|
||||
|
||||
let i18n = I18n::new(&["zz"], "dummy");
|
||||
let i18n = I18n::new(&["zz"], "dummy", log.clone());
|
||||
|
||||
let progress = |_n| true;
|
||||
let mut checker = MediaChecker::new(&mgr, &col_path, progress, &i18n);
|
||||
let mut checker = MediaChecker::new(&mgr, &col_path, progress, &i18n, &log);
|
||||
let mut output = checker.check()?;
|
||||
|
||||
assert_eq!(
|
||||
|
@ -522,14 +529,14 @@ Unused: unused.jpg
|
|||
|
||||
#[test]
|
||||
fn unicode_normalization() -> Result<()> {
|
||||
let (_dir, mgr, col_path) = common_setup()?;
|
||||
let (_dir, mgr, col_path, log) = common_setup()?;
|
||||
|
||||
let i18n = I18n::new(&["zz"], "dummy");
|
||||
let i18n = I18n::new(&["zz"], "dummy", log.clone());
|
||||
|
||||
fs::write(&mgr.media_folder.join("ぱぱ.jpg"), "nfd encoding")?;
|
||||
|
||||
let progress = |_n| true;
|
||||
let mut checker = MediaChecker::new(&mgr, &col_path, progress, &i18n);
|
||||
let mut checker = MediaChecker::new(&mgr, &col_path, progress, &i18n, &log);
|
||||
let mut output = checker.check()?;
|
||||
output.missing.sort();
|
||||
|
||||
|
|
|
@ -2,13 +2,12 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use crate::err::Result;
|
||||
use log::debug;
|
||||
use rusqlite::{params, Connection, OptionalExtension, Row, Statement, NO_PARAMS};
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
fn trace(s: &str) {
|
||||
debug!("sql: {}", s)
|
||||
println!("sql: {}", s)
|
||||
}
|
||||
|
||||
pub(super) fn open_or_create<P: AsRef<Path>>(path: P) -> Result<Connection> {
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use crate::err::Result;
|
||||
use crate::log::{debug, Logger};
|
||||
use lazy_static::lazy_static;
|
||||
use log::debug;
|
||||
use regex::Regex;
|
||||
use sha1::Sha1;
|
||||
use std::borrow::Cow;
|
||||
|
@ -364,6 +364,7 @@ pub(super) fn add_file_from_ankiweb(
|
|||
media_folder: &Path,
|
||||
fname: &str,
|
||||
data: &[u8],
|
||||
log: &Logger,
|
||||
) -> Result<AddedFile> {
|
||||
let sha1 = sha1_of_data(data);
|
||||
let normalized = normalize_filename(fname);
|
||||
|
@ -374,7 +375,7 @@ pub(super) fn add_file_from_ankiweb(
|
|||
fs::write(&path, data)?;
|
||||
(None, path)
|
||||
} else {
|
||||
debug!("non-normalized filename received {}", fname);
|
||||
debug!(log, "non-normalized filename received"; "fname"=>&fname);
|
||||
// ankiweb sent us a non-normalized filename, so we'll rename it
|
||||
let new_name = add_data_to_folder_uniquely(media_folder, fname, data, sha1)?;
|
||||
(
|
||||
|
|
|
@ -6,6 +6,7 @@ use crate::media::database::{open_or_create, MediaDatabaseContext, MediaEntry};
|
|||
use crate::media::files::{add_data_to_folder_uniquely, mtime_as_i64, remove_files, sha1_of_data};
|
||||
use crate::media::sync::{MediaSyncProgress, MediaSyncer};
|
||||
use rusqlite::Connection;
|
||||
use slog::Logger;
|
||||
use std::borrow::Cow;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
|
|
|
@ -11,10 +11,10 @@ use crate::media::MediaManager;
|
|||
use crate::version;
|
||||
use bytes::Bytes;
|
||||
use coarsetime::Instant;
|
||||
use log::debug;
|
||||
use reqwest::{multipart, Client, Response};
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use serde_tuple::Serialize_tuple;
|
||||
use slog::{debug, Logger};
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::io::{Read, Write};
|
||||
|
@ -46,6 +46,7 @@ where
|
|||
progress: MediaSyncProgress,
|
||||
progress_updated: Instant,
|
||||
endpoint: &'a str,
|
||||
log: Logger,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
|
@ -135,7 +136,12 @@ impl<P> MediaSyncer<'_, P>
|
|||
where
|
||||
P: Fn(&MediaSyncProgress) -> bool,
|
||||
{
|
||||
pub fn new<'a>(mgr: &'a MediaManager, progress_cb: P, endpoint: &'a str) -> MediaSyncer<'a, P> {
|
||||
pub fn new<'a>(
|
||||
mgr: &'a MediaManager,
|
||||
progress_cb: P,
|
||||
endpoint: &'a str,
|
||||
log: Logger,
|
||||
) -> MediaSyncer<'a, P> {
|
||||
let client = Client::builder()
|
||||
.connect_timeout(time::Duration::from_secs(30))
|
||||
.build()
|
||||
|
@ -151,6 +157,7 @@ where
|
|||
progress: Default::default(),
|
||||
progress_updated: Instant::now(),
|
||||
endpoint,
|
||||
log,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -165,16 +172,19 @@ where
|
|||
let meta = self.ctx.get_meta()?;
|
||||
let client_usn = meta.last_sync_usn;
|
||||
|
||||
debug!("beginning media sync");
|
||||
debug!(self.log, "begin media sync");
|
||||
let (sync_key, server_usn) = self.sync_begin(hkey).await?;
|
||||
self.skey = Some(sync_key);
|
||||
debug!("server usn was {}", server_usn);
|
||||
debug!(self.log, "server usn was {}", server_usn);
|
||||
|
||||
let mut actions_performed = false;
|
||||
|
||||
// need to fetch changes from server?
|
||||
if client_usn != server_usn {
|
||||
debug!("differs from local usn {}, fetching changes", client_usn);
|
||||
debug!(
|
||||
self.log,
|
||||
"differs from local usn {}, fetching changes", client_usn
|
||||
);
|
||||
self.fetch_changes(meta).await?;
|
||||
actions_performed = true;
|
||||
}
|
||||
|
@ -192,7 +202,7 @@ where
|
|||
|
||||
self.fire_progress_cb()?;
|
||||
|
||||
debug!("media sync complete");
|
||||
debug!(self.log, "media sync complete");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -215,7 +225,7 @@ where
|
|||
}
|
||||
};
|
||||
|
||||
ChangeTracker::new(self.mgr.media_folder.as_path(), progress)
|
||||
ChangeTracker::new(self.mgr.media_folder.as_path(), progress, &self.log)
|
||||
.register_changes(&mut self.ctx)
|
||||
}
|
||||
|
||||
|
@ -242,11 +252,15 @@ where
|
|||
async fn fetch_changes(&mut self, mut meta: MediaDatabaseMetadata) -> Result<()> {
|
||||
let mut last_usn = meta.last_sync_usn;
|
||||
loop {
|
||||
debug!("fetching record batch starting from usn {}", last_usn);
|
||||
debug!(
|
||||
self.log,
|
||||
"fetching record batch";
|
||||
"start_usn"=>last_usn
|
||||
);
|
||||
|
||||
let batch = self.fetch_record_batch(last_usn).await?;
|
||||
if batch.is_empty() {
|
||||
debug!("empty batch, done");
|
||||
debug!(self.log, "empty batch, done");
|
||||
break;
|
||||
}
|
||||
last_usn = batch.last().unwrap().usn;
|
||||
|
@ -255,7 +269,7 @@ where
|
|||
self.maybe_fire_progress_cb()?;
|
||||
|
||||
let (to_download, to_delete, to_remove_pending) =
|
||||
determine_required_changes(&mut self.ctx, &batch)?;
|
||||
determine_required_changes(&mut self.ctx, &batch, &self.log)?;
|
||||
|
||||
// file removal
|
||||
self.mgr.remove_files(&mut self.ctx, to_delete.as_slice())?;
|
||||
|
@ -272,9 +286,12 @@ where
|
|||
.map(ToOwned::to_owned)
|
||||
.collect();
|
||||
let zip_data = self.fetch_zip(batch.as_slice()).await?;
|
||||
let download_batch =
|
||||
extract_into_media_folder(self.mgr.media_folder.as_path(), zip_data)?
|
||||
.into_iter();
|
||||
let download_batch = extract_into_media_folder(
|
||||
self.mgr.media_folder.as_path(),
|
||||
zip_data,
|
||||
&self.log,
|
||||
)?
|
||||
.into_iter();
|
||||
let len = download_batch.len();
|
||||
dl_fnames = &dl_fnames[len..];
|
||||
downloaded.extend(download_batch);
|
||||
|
@ -285,10 +302,11 @@ where
|
|||
|
||||
// then update the DB
|
||||
let dirmod = mtime_as_i64(&self.mgr.media_folder)?;
|
||||
let log = &self.log;
|
||||
self.ctx.transact(|ctx| {
|
||||
record_clean(ctx, &to_remove_pending)?;
|
||||
record_removals(ctx, &to_delete)?;
|
||||
record_additions(ctx, downloaded)?;
|
||||
record_clean(ctx, &to_remove_pending, log)?;
|
||||
record_removals(ctx, &to_delete, log)?;
|
||||
record_additions(ctx, downloaded, log)?;
|
||||
|
||||
// update usn
|
||||
meta.last_sync_usn = last_usn;
|
||||
|
@ -308,7 +326,7 @@ where
|
|||
break;
|
||||
}
|
||||
|
||||
let zip_data = zip_files(&mut self.ctx, &self.mgr.media_folder, &pending)?;
|
||||
let zip_data = zip_files(&mut self.ctx, &self.mgr.media_folder, &pending, &self.log)?;
|
||||
if zip_data.is_none() {
|
||||
self.progress.checked += pending.len();
|
||||
self.maybe_fire_progress_cb()?;
|
||||
|
@ -334,14 +352,16 @@ where
|
|||
.map(|e| &e.fname)
|
||||
.collect();
|
||||
let fname_cnt = fnames.len() as i32;
|
||||
let log = &self.log;
|
||||
self.ctx.transact(|ctx| {
|
||||
record_clean(ctx, fnames.as_slice())?;
|
||||
record_clean(ctx, fnames.as_slice(), log)?;
|
||||
let mut meta = ctx.get_meta()?;
|
||||
if meta.last_sync_usn + fname_cnt == reply.current_usn {
|
||||
meta.last_sync_usn = reply.current_usn;
|
||||
ctx.set_meta(&meta)?;
|
||||
} else {
|
||||
debug!(
|
||||
log,
|
||||
"server usn {} is not {}, skipping usn update",
|
||||
reply.current_usn,
|
||||
meta.last_sync_usn + fname_cnt
|
||||
|
@ -411,7 +431,7 @@ where
|
|||
async fn fetch_zip(&self, files: &[&String]) -> Result<Bytes> {
|
||||
let url = format!("{}downloadFiles", self.endpoint);
|
||||
|
||||
debug!("requesting files: {:?}", files);
|
||||
debug!(self.log, "requesting files: {:?}", files);
|
||||
|
||||
let req = ZipRequest { files };
|
||||
let resp = ankiweb_json_request(&self.client, &url, &req, self.skey()).await?;
|
||||
|
@ -470,6 +490,7 @@ fn determine_required_change(
|
|||
fn determine_required_changes<'a>(
|
||||
ctx: &mut MediaDatabaseContext,
|
||||
records: &'a [ServerMediaRecord],
|
||||
log: &Logger,
|
||||
) -> Result<(Vec<&'a String>, Vec<&'a String>, Vec<&'a String>)> {
|
||||
let mut to_download = vec![];
|
||||
let mut to_delete = vec![];
|
||||
|
@ -493,12 +514,13 @@ fn determine_required_changes<'a>(
|
|||
|
||||
let req_change = determine_required_change(&local_sha1, &remote.sha1, local_state);
|
||||
debug!(
|
||||
"for {}, lsha={} rsha={} lstate={:?} -> {:?}",
|
||||
remote.fname,
|
||||
local_sha1.chars().take(8).collect::<String>(),
|
||||
remote.sha1.chars().take(8).collect::<String>(),
|
||||
local_state,
|
||||
req_change
|
||||
log,
|
||||
"determine action";
|
||||
"fname" => &remote.fname,
|
||||
"lsha" => local_sha1.chars().take(8).collect::<String>(),
|
||||
"rsha" => remote.sha1.chars().take(8).collect::<String>(),
|
||||
"state" => ?local_state,
|
||||
"action" => ?req_change
|
||||
);
|
||||
match req_change {
|
||||
RequiredChange::Download => to_download.push(&remote.fname),
|
||||
|
@ -556,7 +578,11 @@ async fn ankiweb_request(
|
|||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
fn extract_into_media_folder(media_folder: &Path, zip: Bytes) -> Result<Vec<AddedFile>> {
|
||||
fn extract_into_media_folder(
|
||||
media_folder: &Path,
|
||||
zip: Bytes,
|
||||
log: &Logger,
|
||||
) -> Result<Vec<AddedFile>> {
|
||||
let reader = io::Cursor::new(zip);
|
||||
let mut zip = zip::ZipArchive::new(reader)?;
|
||||
|
||||
|
@ -578,9 +604,9 @@ fn extract_into_media_folder(media_folder: &Path, zip: Bytes) -> Result<Vec<Adde
|
|||
let mut data = Vec::with_capacity(file.size() as usize);
|
||||
file.read_to_end(&mut data)?;
|
||||
|
||||
debug!("writing {}", real_name);
|
||||
debug!(log, "write"; "fname" => real_name);
|
||||
|
||||
let added = add_file_from_ankiweb(media_folder, real_name, &data)?;
|
||||
let added = add_file_from_ankiweb(media_folder, real_name, &data, log)?;
|
||||
|
||||
output.push(added);
|
||||
}
|
||||
|
@ -588,21 +614,29 @@ fn extract_into_media_folder(media_folder: &Path, zip: Bytes) -> Result<Vec<Adde
|
|||
Ok(output)
|
||||
}
|
||||
|
||||
fn record_removals(ctx: &mut MediaDatabaseContext, removals: &[&String]) -> Result<()> {
|
||||
fn record_removals(
|
||||
ctx: &mut MediaDatabaseContext,
|
||||
removals: &[&String],
|
||||
log: &Logger,
|
||||
) -> Result<()> {
|
||||
for &fname in removals {
|
||||
debug!("marking removed: {}", fname);
|
||||
debug!(log, "mark removed"; "fname" => fname);
|
||||
ctx.remove_entry(fname)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn record_additions(ctx: &mut MediaDatabaseContext, additions: Vec<AddedFile>) -> Result<()> {
|
||||
fn record_additions(
|
||||
ctx: &mut MediaDatabaseContext,
|
||||
additions: Vec<AddedFile>,
|
||||
log: &Logger,
|
||||
) -> Result<()> {
|
||||
for file in additions {
|
||||
if let Some(renamed) = file.renamed_from {
|
||||
// the file AnkiWeb sent us wasn't normalized, so we need to record
|
||||
// the old file name as a deletion
|
||||
debug!("marking non-normalized file as deleted: {}", renamed);
|
||||
debug!(log, "marking non-normalized file as deleted: {}", renamed);
|
||||
let mut entry = MediaEntry {
|
||||
fname: renamed,
|
||||
sha1: None,
|
||||
|
@ -611,7 +645,10 @@ fn record_additions(ctx: &mut MediaDatabaseContext, additions: Vec<AddedFile>) -
|
|||
};
|
||||
ctx.set_entry(&entry)?;
|
||||
// and upload the new filename to ankiweb
|
||||
debug!("marking renamed file as needing upload: {}", file.fname);
|
||||
debug!(
|
||||
log,
|
||||
"marking renamed file as needing upload: {}", file.fname
|
||||
);
|
||||
entry = MediaEntry {
|
||||
fname: file.fname.to_string(),
|
||||
sha1: Some(file.sha1),
|
||||
|
@ -627,10 +664,9 @@ fn record_additions(ctx: &mut MediaDatabaseContext, additions: Vec<AddedFile>) -
|
|||
mtime: file.mtime,
|
||||
sync_required: false,
|
||||
};
|
||||
debug!(
|
||||
"marking added: {} {}",
|
||||
entry.fname,
|
||||
hex::encode(entry.sha1.as_ref().unwrap())
|
||||
debug!(log, "mark added";
|
||||
"fname" => &entry.fname,
|
||||
"sha1" => hex::encode(&entry.sha1.as_ref().unwrap()[0..4])
|
||||
);
|
||||
ctx.set_entry(&entry)?;
|
||||
}
|
||||
|
@ -639,12 +675,12 @@ fn record_additions(ctx: &mut MediaDatabaseContext, additions: Vec<AddedFile>) -
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn record_clean(ctx: &mut MediaDatabaseContext, clean: &[&String]) -> Result<()> {
|
||||
fn record_clean(ctx: &mut MediaDatabaseContext, clean: &[&String], log: &Logger) -> Result<()> {
|
||||
for &fname in clean {
|
||||
if let Some(mut entry) = ctx.get_entry(fname)? {
|
||||
if entry.sync_required {
|
||||
entry.sync_required = false;
|
||||
debug!("marking clean: {}", entry.fname);
|
||||
debug!(log, "mark clean"; "fname"=>&entry.fname);
|
||||
ctx.set_entry(&entry)?;
|
||||
}
|
||||
}
|
||||
|
@ -657,6 +693,7 @@ fn zip_files<'a>(
|
|||
ctx: &mut MediaDatabaseContext,
|
||||
media_folder: &Path,
|
||||
files: &'a [MediaEntry],
|
||||
log: &Logger,
|
||||
) -> Result<Option<Vec<u8>>> {
|
||||
let buf = vec![];
|
||||
let mut invalid_entries = vec![];
|
||||
|
@ -678,7 +715,7 @@ fn zip_files<'a>(
|
|||
let file_data = match data_for_file(media_folder, &file.fname) {
|
||||
Ok(data) => data,
|
||||
Err(e) => {
|
||||
debug!("error accessing {}: {}", &file.fname, e);
|
||||
debug!(log, "error accessing {}: {}", &file.fname, e);
|
||||
invalid_entries.push(&file.fname);
|
||||
continue;
|
||||
}
|
||||
|
@ -687,7 +724,7 @@ fn zip_files<'a>(
|
|||
if let Some(data) = &file_data {
|
||||
let normalized = normalize_filename(&file.fname);
|
||||
if let Cow::Owned(o) = normalized {
|
||||
debug!("media check required: {} should be {}", &file.fname, o);
|
||||
debug!(log, "media check required: {} should be {}", &file.fname, o);
|
||||
invalid_entries.push(&file.fname);
|
||||
continue;
|
||||
}
|
||||
|
@ -706,9 +743,10 @@ fn zip_files<'a>(
|
|||
}
|
||||
|
||||
debug!(
|
||||
"will upload {} as {}",
|
||||
file.fname,
|
||||
if file_data.is_some() {
|
||||
log,
|
||||
"will upload";
|
||||
"fname"=>&file.fname,
|
||||
"kind"=>if file_data.is_some() {
|
||||
"addition "
|
||||
} else {
|
||||
"removal"
|
||||
|
@ -772,8 +810,10 @@ mod test {
|
|||
true
|
||||
};
|
||||
|
||||
let log = crate::log::terminal();
|
||||
|
||||
let mgr = MediaManager::new(&media_dir, &media_db)?;
|
||||
mgr.sync_media(progress, "https://sync.ankiweb.net/msync/", hkey)
|
||||
mgr.sync_media(progress, "https://sync.ankiweb.net/msync/", hkey, log)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
|
@ -781,8 +821,6 @@ mod test {
|
|||
|
||||
#[test]
|
||||
fn sync() {
|
||||
env_logger::init();
|
||||
|
||||
let hkey = match std::env::var("TEST_HKEY") {
|
||||
Ok(s) => s,
|
||||
Err(_) => {
|
||||
|
|
|
@ -176,13 +176,15 @@ impl Timespan {
|
|||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::i18n::I18n;
|
||||
use crate::log;
|
||||
use crate::sched::timespan::{
|
||||
answer_button_time, learning_congrats, studied_today, time_span, MONTH,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn answer_buttons() {
|
||||
let i18n = I18n::new(&["zz"], "");
|
||||
let log = log::terminal();
|
||||
let i18n = I18n::new(&["zz"], "", log);
|
||||
assert_eq!(answer_button_time(30.0, &i18n), "30s");
|
||||
assert_eq!(answer_button_time(70.0, &i18n), "1.2m");
|
||||
assert_eq!(answer_button_time(1.1 * MONTH, &i18n), "1.1mo");
|
||||
|
@ -190,7 +192,8 @@ mod test {
|
|||
|
||||
#[test]
|
||||
fn time_spans() {
|
||||
let i18n = I18n::new(&["zz"], "");
|
||||
let log = log::terminal();
|
||||
let i18n = I18n::new(&["zz"], "", log);
|
||||
assert_eq!(time_span(1.0, &i18n, false), "1 second");
|
||||
assert_eq!(time_span(30.3, &i18n, false), "30 seconds");
|
||||
assert_eq!(time_span(30.3, &i18n, true), "30.3 seconds");
|
||||
|
@ -202,7 +205,8 @@ mod test {
|
|||
#[test]
|
||||
fn combo() {
|
||||
// temporary test of fluent term handling
|
||||
let i18n = I18n::new(&["zz"], "");
|
||||
let log = log::terminal();
|
||||
let i18n = I18n::new(&["zz"], "", log);
|
||||
assert_eq!(
|
||||
&studied_today(3, 13.0, &i18n).replace("\n", " "),
|
||||
"Studied 3 cards in 13 seconds today (4.33s/card)"
|
||||
|
|
|
@ -6,8 +6,6 @@ authors = ["Ankitects Pty Ltd and contributors"]
|
|||
|
||||
[dependencies]
|
||||
anki = { path = "../rslib" }
|
||||
log = "0.4.8"
|
||||
env_logger = "0.7.1"
|
||||
tokio = "0.2.11"
|
||||
|
||||
[dependencies.pyo3]
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
use anki::backend::{
|
||||
init_backend, init_i18n_backend, Backend as RustBackend, I18nBackend as RustI18nBackend,
|
||||
};
|
||||
use log::error;
|
||||
use pyo3::prelude::*;
|
||||
use pyo3::types::PyBytes;
|
||||
use pyo3::{exceptions, wrap_pyfunction};
|
||||
|
@ -55,7 +54,7 @@ impl Backend {
|
|||
let res: PyObject = match callback.call1(py, (out_obj,)) {
|
||||
Ok(res) => res,
|
||||
Err(e) => {
|
||||
error!("error calling callback:");
|
||||
println!("error calling callback:");
|
||||
e.print(py);
|
||||
return false;
|
||||
}
|
||||
|
@ -63,7 +62,7 @@ impl Backend {
|
|||
match res.extract(py) {
|
||||
Ok(cont) => cont,
|
||||
Err(e) => {
|
||||
error!("callback did not return bool: {:?}", e);
|
||||
println!("callback did not return bool: {:?}", e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -107,7 +106,5 @@ fn ankirspy(_py: Python, m: &PyModule) -> PyResult<()> {
|
|||
m.add_wrapped(wrap_pyfunction!(open_backend)).unwrap();
|
||||
m.add_wrapped(wrap_pyfunction!(open_i18n)).unwrap();
|
||||
|
||||
env_logger::init();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue