mirror of
https://github.com/ankitects/anki.git
synced 2025-09-19 06:22:22 -04:00
move progress and search backend code into separate files
This commit is contained in:
parent
96940f0527
commit
30865eae51
3 changed files with 364 additions and 333 deletions
|
@ -7,31 +7,29 @@ mod config;
|
||||||
mod dbproxy;
|
mod dbproxy;
|
||||||
mod generic;
|
mod generic;
|
||||||
mod http_sync_server;
|
mod http_sync_server;
|
||||||
|
mod progress;
|
||||||
mod scheduler;
|
mod scheduler;
|
||||||
|
mod search;
|
||||||
|
|
||||||
pub use crate::backend_proto::BackendMethod;
|
pub use crate::backend_proto::BackendMethod;
|
||||||
use crate::{
|
use crate::{
|
||||||
backend::dbproxy::db_command_bytes,
|
backend::dbproxy::db_command_bytes,
|
||||||
backend_proto as pb,
|
backend_proto as pb,
|
||||||
backend_proto::{
|
backend_proto::{
|
||||||
sort_order::builtin::Kind as SortKindProto, sort_order::Value as SortOrderProto,
|
|
||||||
AddOrUpdateDeckConfigLegacyIn, BackendResult, Empty, RenderedTemplateReplacement,
|
AddOrUpdateDeckConfigLegacyIn, BackendResult, Empty, RenderedTemplateReplacement,
|
||||||
},
|
},
|
||||||
card::{Card, CardID},
|
card::{Card, CardID},
|
||||||
cloze::add_cloze_numbers_in_string,
|
cloze::add_cloze_numbers_in_string,
|
||||||
collection::{open_collection, Collection},
|
collection::{open_collection, Collection},
|
||||||
config::SortKind,
|
|
||||||
dbcheck::DatabaseCheckProgress,
|
|
||||||
deckconf::{DeckConf, DeckConfSchema11},
|
deckconf::{DeckConf, DeckConfSchema11},
|
||||||
decks::{Deck, DeckID, DeckSchema11},
|
decks::{Deck, DeckID, DeckSchema11},
|
||||||
err::{AnkiError, NetworkErrorKind, Result, SyncErrorKind},
|
err::{AnkiError, NetworkErrorKind, Result, SyncErrorKind},
|
||||||
i18n::{tr_args, I18n, TR},
|
i18n::I18n,
|
||||||
latex::{extract_latex, extract_latex_expanding_clozes, ExtractedLatex},
|
latex::{extract_latex, extract_latex_expanding_clozes, ExtractedLatex},
|
||||||
log,
|
log,
|
||||||
log::default_logger,
|
log::default_logger,
|
||||||
markdown::render_markdown,
|
markdown::render_markdown,
|
||||||
media::check::MediaChecker,
|
media::check::MediaChecker,
|
||||||
media::sync::MediaSyncProgress,
|
|
||||||
media::MediaManager,
|
media::MediaManager,
|
||||||
notes::{Note, NoteID},
|
notes::{Note, NoteID},
|
||||||
notetype::{
|
notetype::{
|
||||||
|
@ -43,31 +41,26 @@ use crate::{
|
||||||
states::{CardState, NextCardStates},
|
states::{CardState, NextCardStates},
|
||||||
timespan::{answer_button_time, time_span},
|
timespan::{answer_button_time, time_span},
|
||||||
},
|
},
|
||||||
search::{
|
search::{concatenate_searches, replace_search_node, write_nodes, Node},
|
||||||
concatenate_searches, parse_search, replace_search_node, write_nodes, BoolSeparator, Node,
|
|
||||||
PropertyKind, RatingKind, SearchNode, SortMode, StateKind, TemplateKind,
|
|
||||||
},
|
|
||||||
stats::studied_today,
|
stats::studied_today,
|
||||||
sync::{
|
sync::{
|
||||||
get_remote_sync_meta, http::SyncRequest, sync_abort, sync_login, FullSyncProgress,
|
get_remote_sync_meta, http::SyncRequest, sync_abort, sync_login, FullSyncProgress,
|
||||||
LocalServer, NormalSyncProgress, SyncActionRequired, SyncAuth, SyncMeta, SyncOutput,
|
LocalServer, NormalSyncProgress, SyncActionRequired, SyncAuth, SyncMeta, SyncOutput,
|
||||||
SyncStage,
|
|
||||||
},
|
},
|
||||||
template::RenderedNode,
|
template::RenderedNode,
|
||||||
text::{escape_anki_wildcards, extract_av_tags, sanitize_html_no_images, strip_av_tags, AVTag},
|
text::{extract_av_tags, sanitize_html_no_images, strip_av_tags, AVTag},
|
||||||
timestamp::TimestampSecs,
|
timestamp::TimestampSecs,
|
||||||
undo::UndoableOpKind,
|
undo::UndoableOpKind,
|
||||||
};
|
};
|
||||||
use fluent::FluentValue;
|
use fluent::FluentValue;
|
||||||
use futures::future::{AbortHandle, AbortRegistration, Abortable};
|
use futures::future::{AbortHandle, AbortRegistration, Abortable};
|
||||||
use itertools::Itertools;
|
|
||||||
use log::error;
|
use log::error;
|
||||||
use once_cell::sync::OnceCell;
|
use once_cell::sync::OnceCell;
|
||||||
use pb::{sync_status_out, BackendService};
|
use pb::{sync_status_out, BackendService};
|
||||||
|
use progress::{AbortHandleSlot, Progress};
|
||||||
use prost::Message;
|
use prost::Message;
|
||||||
use serde_json::Value as JsonValue;
|
use serde_json::Value as JsonValue;
|
||||||
use slog::warn;
|
use slog::warn;
|
||||||
use std::convert::TryFrom;
|
|
||||||
use std::{collections::HashSet, convert::TryInto};
|
use std::{collections::HashSet, convert::TryInto};
|
||||||
use std::{
|
use std::{
|
||||||
result,
|
result,
|
||||||
|
@ -75,34 +68,7 @@ use std::{
|
||||||
};
|
};
|
||||||
use tokio::runtime::{self, Runtime};
|
use tokio::runtime::{self, Runtime};
|
||||||
|
|
||||||
struct ThrottlingProgressHandler {
|
use self::progress::{progress_to_proto, ProgressState};
|
||||||
state: Arc<Mutex<ProgressState>>,
|
|
||||||
last_update: coarsetime::Instant,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ThrottlingProgressHandler {
|
|
||||||
/// Returns true if should continue.
|
|
||||||
fn update(&mut self, progress: impl Into<Progress>, throttle: bool) -> bool {
|
|
||||||
let now = coarsetime::Instant::now();
|
|
||||||
if throttle && now.duration_since(self.last_update).as_f64() < 0.1 {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
self.last_update = now;
|
|
||||||
let mut guard = self.state.lock().unwrap();
|
|
||||||
guard.last_progress.replace(progress.into());
|
|
||||||
let want_abort = guard.want_abort;
|
|
||||||
guard.want_abort = false;
|
|
||||||
!want_abort
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct ProgressState {
|
|
||||||
want_abort: bool,
|
|
||||||
last_progress: Option<Progress>,
|
|
||||||
}
|
|
||||||
|
|
||||||
// fixme: this should support multiple abort handles.
|
|
||||||
type AbortHandleSlot = Arc<Mutex<Option<AbortHandle>>>;
|
|
||||||
|
|
||||||
pub struct Backend {
|
pub struct Backend {
|
||||||
col: Arc<Mutex<Option<Collection>>>,
|
col: Arc<Mutex<Option<Collection>>>,
|
||||||
|
@ -136,15 +102,6 @@ impl RemoteSyncStatus {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
|
||||||
enum Progress {
|
|
||||||
MediaSync(MediaSyncProgress),
|
|
||||||
MediaCheck(u32),
|
|
||||||
FullSync(FullSyncProgress),
|
|
||||||
NormalSync(NormalSyncProgress),
|
|
||||||
DatabaseCheck(DatabaseCheckProgress),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert an Anki error to a protobuf error.
|
/// Convert an Anki error to a protobuf error.
|
||||||
fn anki_error_to_proto_error(err: AnkiError, i18n: &I18n) -> pb::BackendError {
|
fn anki_error_to_proto_error(err: AnkiError, i18n: &I18n) -> pb::BackendError {
|
||||||
use pb::backend_error::Value as V;
|
use pb::backend_error::Value as V;
|
||||||
|
@ -222,135 +179,6 @@ pub fn init_backend(init_msg: &[u8]) -> std::result::Result<Backend, String> {
|
||||||
Ok(Backend::new(i18n, input.server))
|
Ok(Backend::new(i18n, input.server))
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<pb::SearchNode> for Node {
|
|
||||||
type Error = AnkiError;
|
|
||||||
|
|
||||||
fn try_from(msg: pb::SearchNode) -> std::result::Result<Self, Self::Error> {
|
|
||||||
use pb::search_node::group::Joiner;
|
|
||||||
use pb::search_node::Filter;
|
|
||||||
use pb::search_node::Flag;
|
|
||||||
Ok(if let Some(filter) = msg.filter {
|
|
||||||
match filter {
|
|
||||||
Filter::Tag(s) => Node::Search(SearchNode::Tag(escape_anki_wildcards(&s))),
|
|
||||||
Filter::Deck(s) => Node::Search(SearchNode::Deck(if s == "*" {
|
|
||||||
s
|
|
||||||
} else {
|
|
||||||
escape_anki_wildcards(&s)
|
|
||||||
})),
|
|
||||||
Filter::Note(s) => Node::Search(SearchNode::NoteType(escape_anki_wildcards(&s))),
|
|
||||||
Filter::Template(u) => {
|
|
||||||
Node::Search(SearchNode::CardTemplate(TemplateKind::Ordinal(u as u16)))
|
|
||||||
}
|
|
||||||
Filter::Nid(nid) => Node::Search(SearchNode::NoteIDs(nid.to_string())),
|
|
||||||
Filter::Nids(nids) => Node::Search(SearchNode::NoteIDs(nids.into_id_string())),
|
|
||||||
Filter::Dupe(dupe) => Node::Search(SearchNode::Duplicates {
|
|
||||||
note_type_id: dupe.notetype_id.into(),
|
|
||||||
text: dupe.first_field,
|
|
||||||
}),
|
|
||||||
Filter::FieldName(s) => Node::Search(SearchNode::SingleField {
|
|
||||||
field: escape_anki_wildcards(&s),
|
|
||||||
text: "*".to_string(),
|
|
||||||
is_re: false,
|
|
||||||
}),
|
|
||||||
Filter::Rated(rated) => Node::Search(SearchNode::Rated {
|
|
||||||
days: rated.days,
|
|
||||||
ease: rated.rating().into(),
|
|
||||||
}),
|
|
||||||
Filter::AddedInDays(u) => Node::Search(SearchNode::AddedInDays(u)),
|
|
||||||
Filter::DueInDays(i) => Node::Search(SearchNode::Property {
|
|
||||||
operator: "<=".to_string(),
|
|
||||||
kind: PropertyKind::Due(i),
|
|
||||||
}),
|
|
||||||
Filter::DueOnDay(i) => Node::Search(SearchNode::Property {
|
|
||||||
operator: "=".to_string(),
|
|
||||||
kind: PropertyKind::Due(i),
|
|
||||||
}),
|
|
||||||
Filter::EditedInDays(u) => Node::Search(SearchNode::EditedInDays(u)),
|
|
||||||
Filter::CardState(state) => Node::Search(SearchNode::State(
|
|
||||||
pb::search_node::CardState::from_i32(state)
|
|
||||||
.unwrap_or_default()
|
|
||||||
.into(),
|
|
||||||
)),
|
|
||||||
Filter::Flag(flag) => match Flag::from_i32(flag).unwrap_or(Flag::Any) {
|
|
||||||
Flag::None => Node::Search(SearchNode::Flag(0)),
|
|
||||||
Flag::Any => Node::Not(Box::new(Node::Search(SearchNode::Flag(0)))),
|
|
||||||
Flag::Red => Node::Search(SearchNode::Flag(1)),
|
|
||||||
Flag::Orange => Node::Search(SearchNode::Flag(2)),
|
|
||||||
Flag::Green => Node::Search(SearchNode::Flag(3)),
|
|
||||||
Flag::Blue => Node::Search(SearchNode::Flag(4)),
|
|
||||||
},
|
|
||||||
Filter::Negated(term) => Node::try_from(*term)?.negated(),
|
|
||||||
Filter::Group(mut group) => {
|
|
||||||
match group.nodes.len() {
|
|
||||||
0 => return Err(AnkiError::invalid_input("empty group")),
|
|
||||||
// a group of 1 doesn't need to be a group
|
|
||||||
1 => group.nodes.pop().unwrap().try_into()?,
|
|
||||||
// 2+ nodes
|
|
||||||
_ => {
|
|
||||||
let joiner = match group.joiner() {
|
|
||||||
Joiner::And => Node::And,
|
|
||||||
Joiner::Or => Node::Or,
|
|
||||||
};
|
|
||||||
let parsed: Vec<_> = group
|
|
||||||
.nodes
|
|
||||||
.into_iter()
|
|
||||||
.map(TryFrom::try_from)
|
|
||||||
.collect::<Result<_>>()?;
|
|
||||||
let joined = parsed.into_iter().intersperse(joiner).collect();
|
|
||||||
Node::Group(joined)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Filter::ParsableText(text) => {
|
|
||||||
let mut nodes = parse_search(&text)?;
|
|
||||||
if nodes.len() == 1 {
|
|
||||||
nodes.pop().unwrap()
|
|
||||||
} else {
|
|
||||||
Node::Group(nodes)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Node::Search(SearchNode::WholeCollection)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<pb::search_node::group::Joiner> for BoolSeparator {
|
|
||||||
fn from(sep: pb::search_node::group::Joiner) -> Self {
|
|
||||||
match sep {
|
|
||||||
pb::search_node::group::Joiner::And => BoolSeparator::And,
|
|
||||||
pb::search_node::group::Joiner::Or => BoolSeparator::Or,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<pb::search_node::Rating> for RatingKind {
|
|
||||||
fn from(r: pb::search_node::Rating) -> Self {
|
|
||||||
match r {
|
|
||||||
pb::search_node::Rating::Again => RatingKind::AnswerButton(1),
|
|
||||||
pb::search_node::Rating::Hard => RatingKind::AnswerButton(2),
|
|
||||||
pb::search_node::Rating::Good => RatingKind::AnswerButton(3),
|
|
||||||
pb::search_node::Rating::Easy => RatingKind::AnswerButton(4),
|
|
||||||
pb::search_node::Rating::Any => RatingKind::AnyAnswerButton,
|
|
||||||
pb::search_node::Rating::ByReschedule => RatingKind::ManualReschedule,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<pb::search_node::CardState> for StateKind {
|
|
||||||
fn from(k: pb::search_node::CardState) -> Self {
|
|
||||||
match k {
|
|
||||||
pb::search_node::CardState::New => StateKind::New,
|
|
||||||
pb::search_node::CardState::Learn => StateKind::Learning,
|
|
||||||
pb::search_node::CardState::Review => StateKind::Review,
|
|
||||||
pb::search_node::CardState::Due => StateKind::Due,
|
|
||||||
pb::search_node::CardState::Suspended => StateKind::Suspended,
|
|
||||||
pb::search_node::CardState::Buried => StateKind::Buried,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BackendService for Backend {
|
impl BackendService for Backend {
|
||||||
fn latest_progress(&self, _input: Empty) -> BackendResult<pb::Progress> {
|
fn latest_progress(&self, _input: Empty) -> BackendResult<pb::Progress> {
|
||||||
let progress = self.progress_state.lock().unwrap().last_progress;
|
let progress = self.progress_state.lock().unwrap().last_progress;
|
||||||
|
@ -1614,18 +1442,6 @@ impl Backend {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new_progress_handler(&self) -> ThrottlingProgressHandler {
|
|
||||||
{
|
|
||||||
let mut guard = self.progress_state.lock().unwrap();
|
|
||||||
guard.want_abort = false;
|
|
||||||
guard.last_progress = None;
|
|
||||||
}
|
|
||||||
ThrottlingProgressHandler {
|
|
||||||
state: Arc::clone(&self.progress_state),
|
|
||||||
last_update: coarsetime::Instant::now(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn runtime_handle(&self) -> runtime::Handle {
|
fn runtime_handle(&self) -> runtime::Handle {
|
||||||
self.runtime
|
self.runtime
|
||||||
.get_or_init(|| {
|
.get_or_init(|| {
|
||||||
|
@ -1939,120 +1755,6 @@ impl From<RenderCardOutput> for pb::RenderCardOut {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn progress_to_proto(progress: Option<Progress>, i18n: &I18n) -> pb::Progress {
|
|
||||||
let progress = if let Some(progress) = progress {
|
|
||||||
match progress {
|
|
||||||
Progress::MediaSync(p) => pb::progress::Value::MediaSync(media_sync_progress(p, i18n)),
|
|
||||||
Progress::MediaCheck(n) => {
|
|
||||||
let s = i18n.trn(TR::MediaCheckChecked, tr_args!["count"=>n]);
|
|
||||||
pb::progress::Value::MediaCheck(s)
|
|
||||||
}
|
|
||||||
Progress::FullSync(p) => pb::progress::Value::FullSync(pb::progress::FullSync {
|
|
||||||
transferred: p.transferred_bytes as u32,
|
|
||||||
total: p.total_bytes as u32,
|
|
||||||
}),
|
|
||||||
Progress::NormalSync(p) => {
|
|
||||||
let stage = match p.stage {
|
|
||||||
SyncStage::Connecting => i18n.tr(TR::SyncSyncing),
|
|
||||||
SyncStage::Syncing => i18n.tr(TR::SyncSyncing),
|
|
||||||
SyncStage::Finalizing => i18n.tr(TR::SyncChecking),
|
|
||||||
}
|
|
||||||
.to_string();
|
|
||||||
let added = i18n.trn(
|
|
||||||
TR::SyncAddedUpdatedCount,
|
|
||||||
tr_args![
|
|
||||||
"up"=>p.local_update, "down"=>p.remote_update],
|
|
||||||
);
|
|
||||||
let removed = i18n.trn(
|
|
||||||
TR::SyncMediaRemovedCount,
|
|
||||||
tr_args![
|
|
||||||
"up"=>p.local_remove, "down"=>p.remote_remove],
|
|
||||||
);
|
|
||||||
pb::progress::Value::NormalSync(pb::progress::NormalSync {
|
|
||||||
stage,
|
|
||||||
added,
|
|
||||||
removed,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
Progress::DatabaseCheck(p) => {
|
|
||||||
let mut stage_total = 0;
|
|
||||||
let mut stage_current = 0;
|
|
||||||
let stage = match p {
|
|
||||||
DatabaseCheckProgress::Integrity => i18n.tr(TR::DatabaseCheckCheckingIntegrity),
|
|
||||||
DatabaseCheckProgress::Optimize => i18n.tr(TR::DatabaseCheckRebuilding),
|
|
||||||
DatabaseCheckProgress::Cards => i18n.tr(TR::DatabaseCheckCheckingCards),
|
|
||||||
DatabaseCheckProgress::Notes { current, total } => {
|
|
||||||
stage_total = total;
|
|
||||||
stage_current = current;
|
|
||||||
i18n.tr(TR::DatabaseCheckCheckingNotes)
|
|
||||||
}
|
|
||||||
DatabaseCheckProgress::History => i18n.tr(TR::DatabaseCheckCheckingHistory),
|
|
||||||
}
|
|
||||||
.to_string();
|
|
||||||
pb::progress::Value::DatabaseCheck(pb::progress::DatabaseCheck {
|
|
||||||
stage,
|
|
||||||
stage_current,
|
|
||||||
stage_total,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
pb::progress::Value::None(pb::Empty {})
|
|
||||||
};
|
|
||||||
pb::Progress {
|
|
||||||
value: Some(progress),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn media_sync_progress(p: MediaSyncProgress, i18n: &I18n) -> pb::progress::MediaSync {
|
|
||||||
pb::progress::MediaSync {
|
|
||||||
checked: i18n.trn(TR::SyncMediaCheckedCount, tr_args!["count"=>p.checked]),
|
|
||||||
added: i18n.trn(
|
|
||||||
TR::SyncMediaAddedCount,
|
|
||||||
tr_args!["up"=>p.uploaded_files,"down"=>p.downloaded_files],
|
|
||||||
),
|
|
||||||
removed: i18n.trn(
|
|
||||||
TR::SyncMediaRemovedCount,
|
|
||||||
tr_args!["up"=>p.uploaded_deletions,"down"=>p.downloaded_deletions],
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<SortKindProto> for SortKind {
|
|
||||||
fn from(kind: SortKindProto) -> Self {
|
|
||||||
match kind {
|
|
||||||
SortKindProto::NoteCreation => SortKind::NoteCreation,
|
|
||||||
SortKindProto::NoteMod => SortKind::NoteMod,
|
|
||||||
SortKindProto::NoteField => SortKind::NoteField,
|
|
||||||
SortKindProto::NoteTags => SortKind::NoteTags,
|
|
||||||
SortKindProto::NoteType => SortKind::NoteType,
|
|
||||||
SortKindProto::CardMod => SortKind::CardMod,
|
|
||||||
SortKindProto::CardReps => SortKind::CardReps,
|
|
||||||
SortKindProto::CardDue => SortKind::CardDue,
|
|
||||||
SortKindProto::CardEase => SortKind::CardEase,
|
|
||||||
SortKindProto::CardLapses => SortKind::CardLapses,
|
|
||||||
SortKindProto::CardInterval => SortKind::CardInterval,
|
|
||||||
SortKindProto::CardDeck => SortKind::CardDeck,
|
|
||||||
SortKindProto::CardTemplate => SortKind::CardTemplate,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Option<SortOrderProto>> for SortMode {
|
|
||||||
fn from(order: Option<SortOrderProto>) -> Self {
|
|
||||||
use pb::sort_order::Value as V;
|
|
||||||
match order.unwrap_or(V::FromConfig(pb::Empty {})) {
|
|
||||||
V::None(_) => SortMode::NoOrder,
|
|
||||||
V::Custom(s) => SortMode::Custom(s),
|
|
||||||
V::FromConfig(_) => SortMode::FromConfig,
|
|
||||||
V::Builtin(b) => SortMode::Builtin {
|
|
||||||
kind: b.kind().into(),
|
|
||||||
reverse: b.reverse,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Card> for pb::Card {
|
impl From<Card> for pb::Card {
|
||||||
fn from(c: Card) -> Self {
|
fn from(c: Card) -> Self {
|
||||||
pb::Card {
|
pb::Card {
|
||||||
|
@ -2124,31 +1826,3 @@ impl From<pb::SyncAuth> for SyncAuth {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<FullSyncProgress> for Progress {
|
|
||||||
fn from(p: FullSyncProgress) -> Self {
|
|
||||||
Progress::FullSync(p)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<MediaSyncProgress> for Progress {
|
|
||||||
fn from(p: MediaSyncProgress) -> Self {
|
|
||||||
Progress::MediaSync(p)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<NormalSyncProgress> for Progress {
|
|
||||||
fn from(p: NormalSyncProgress) -> Self {
|
|
||||||
Progress::NormalSync(p)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl pb::search_node::IdList {
|
|
||||||
fn into_id_string(self) -> String {
|
|
||||||
self.ids
|
|
||||||
.iter()
|
|
||||||
.map(|i| i.to_string())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(",")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
164
rslib/src/backend/progress.rs
Normal file
164
rslib/src/backend/progress.rs
Normal file
|
@ -0,0 +1,164 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use futures::future::AbortHandle;
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
backend_proto as pb,
|
||||||
|
dbcheck::DatabaseCheckProgress,
|
||||||
|
i18n::{tr_args, I18n, TR},
|
||||||
|
media::sync::MediaSyncProgress,
|
||||||
|
sync::{FullSyncProgress, NormalSyncProgress, SyncStage},
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::Backend;
|
||||||
|
|
||||||
|
pub(super) struct ThrottlingProgressHandler {
|
||||||
|
pub state: Arc<Mutex<ProgressState>>,
|
||||||
|
pub last_update: coarsetime::Instant,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ThrottlingProgressHandler {
|
||||||
|
/// Returns true if should continue.
|
||||||
|
pub(super) fn update(&mut self, progress: impl Into<Progress>, throttle: bool) -> bool {
|
||||||
|
let now = coarsetime::Instant::now();
|
||||||
|
if throttle && now.duration_since(self.last_update).as_f64() < 0.1 {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
self.last_update = now;
|
||||||
|
let mut guard = self.state.lock().unwrap();
|
||||||
|
guard.last_progress.replace(progress.into());
|
||||||
|
let want_abort = guard.want_abort;
|
||||||
|
guard.want_abort = false;
|
||||||
|
!want_abort
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) struct ProgressState {
|
||||||
|
pub want_abort: bool,
|
||||||
|
pub last_progress: Option<Progress>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// fixme: this should support multiple abort handles.
|
||||||
|
pub(super) type AbortHandleSlot = Arc<Mutex<Option<AbortHandle>>>;
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub(super) enum Progress {
|
||||||
|
MediaSync(MediaSyncProgress),
|
||||||
|
MediaCheck(u32),
|
||||||
|
FullSync(FullSyncProgress),
|
||||||
|
NormalSync(NormalSyncProgress),
|
||||||
|
DatabaseCheck(DatabaseCheckProgress),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn progress_to_proto(progress: Option<Progress>, i18n: &I18n) -> pb::Progress {
|
||||||
|
let progress = if let Some(progress) = progress {
|
||||||
|
match progress {
|
||||||
|
Progress::MediaSync(p) => pb::progress::Value::MediaSync(media_sync_progress(p, i18n)),
|
||||||
|
Progress::MediaCheck(n) => {
|
||||||
|
let s = i18n.trn(TR::MediaCheckChecked, tr_args!["count"=>n]);
|
||||||
|
pb::progress::Value::MediaCheck(s)
|
||||||
|
}
|
||||||
|
Progress::FullSync(p) => pb::progress::Value::FullSync(pb::progress::FullSync {
|
||||||
|
transferred: p.transferred_bytes as u32,
|
||||||
|
total: p.total_bytes as u32,
|
||||||
|
}),
|
||||||
|
Progress::NormalSync(p) => {
|
||||||
|
let stage = match p.stage {
|
||||||
|
SyncStage::Connecting => i18n.tr(TR::SyncSyncing),
|
||||||
|
SyncStage::Syncing => i18n.tr(TR::SyncSyncing),
|
||||||
|
SyncStage::Finalizing => i18n.tr(TR::SyncChecking),
|
||||||
|
}
|
||||||
|
.to_string();
|
||||||
|
let added = i18n.trn(
|
||||||
|
TR::SyncAddedUpdatedCount,
|
||||||
|
tr_args![
|
||||||
|
"up"=>p.local_update, "down"=>p.remote_update],
|
||||||
|
);
|
||||||
|
let removed = i18n.trn(
|
||||||
|
TR::SyncMediaRemovedCount,
|
||||||
|
tr_args![
|
||||||
|
"up"=>p.local_remove, "down"=>p.remote_remove],
|
||||||
|
);
|
||||||
|
pb::progress::Value::NormalSync(pb::progress::NormalSync {
|
||||||
|
stage,
|
||||||
|
added,
|
||||||
|
removed,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Progress::DatabaseCheck(p) => {
|
||||||
|
let mut stage_total = 0;
|
||||||
|
let mut stage_current = 0;
|
||||||
|
let stage = match p {
|
||||||
|
DatabaseCheckProgress::Integrity => i18n.tr(TR::DatabaseCheckCheckingIntegrity),
|
||||||
|
DatabaseCheckProgress::Optimize => i18n.tr(TR::DatabaseCheckRebuilding),
|
||||||
|
DatabaseCheckProgress::Cards => i18n.tr(TR::DatabaseCheckCheckingCards),
|
||||||
|
DatabaseCheckProgress::Notes { current, total } => {
|
||||||
|
stage_total = total;
|
||||||
|
stage_current = current;
|
||||||
|
i18n.tr(TR::DatabaseCheckCheckingNotes)
|
||||||
|
}
|
||||||
|
DatabaseCheckProgress::History => i18n.tr(TR::DatabaseCheckCheckingHistory),
|
||||||
|
}
|
||||||
|
.to_string();
|
||||||
|
pb::progress::Value::DatabaseCheck(pb::progress::DatabaseCheck {
|
||||||
|
stage,
|
||||||
|
stage_current,
|
||||||
|
stage_total,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
pb::progress::Value::None(pb::Empty {})
|
||||||
|
};
|
||||||
|
pb::Progress {
|
||||||
|
value: Some(progress),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn media_sync_progress(p: MediaSyncProgress, i18n: &I18n) -> pb::progress::MediaSync {
|
||||||
|
pb::progress::MediaSync {
|
||||||
|
checked: i18n.trn(TR::SyncMediaCheckedCount, tr_args!["count"=>p.checked]),
|
||||||
|
added: i18n.trn(
|
||||||
|
TR::SyncMediaAddedCount,
|
||||||
|
tr_args!["up"=>p.uploaded_files,"down"=>p.downloaded_files],
|
||||||
|
),
|
||||||
|
removed: i18n.trn(
|
||||||
|
TR::SyncMediaRemovedCount,
|
||||||
|
tr_args!["up"=>p.uploaded_deletions,"down"=>p.downloaded_deletions],
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<FullSyncProgress> for Progress {
|
||||||
|
fn from(p: FullSyncProgress) -> Self {
|
||||||
|
Progress::FullSync(p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<MediaSyncProgress> for Progress {
|
||||||
|
fn from(p: MediaSyncProgress) -> Self {
|
||||||
|
Progress::MediaSync(p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<NormalSyncProgress> for Progress {
|
||||||
|
fn from(p: NormalSyncProgress) -> Self {
|
||||||
|
Progress::NormalSync(p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Backend {
|
||||||
|
pub(super) fn new_progress_handler(&self) -> ThrottlingProgressHandler {
|
||||||
|
{
|
||||||
|
let mut guard = self.progress_state.lock().unwrap();
|
||||||
|
guard.want_abort = false;
|
||||||
|
guard.last_progress = None;
|
||||||
|
}
|
||||||
|
ThrottlingProgressHandler {
|
||||||
|
state: Arc::clone(&self.progress_state),
|
||||||
|
last_update: coarsetime::Instant::now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
193
rslib/src/backend/search.rs
Normal file
193
rslib/src/backend/search.rs
Normal file
|
@ -0,0 +1,193 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use itertools::Itertools;
|
||||||
|
use std::convert::{TryFrom, TryInto};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
backend_proto as pb,
|
||||||
|
backend_proto::{
|
||||||
|
sort_order::builtin::Kind as SortKindProto, sort_order::Value as SortOrderProto,
|
||||||
|
},
|
||||||
|
config::SortKind,
|
||||||
|
prelude::*,
|
||||||
|
search::{
|
||||||
|
parse_search, BoolSeparator, Node, PropertyKind, RatingKind, SearchNode, SortMode,
|
||||||
|
StateKind, TemplateKind,
|
||||||
|
},
|
||||||
|
text::escape_anki_wildcards,
|
||||||
|
};
|
||||||
|
|
||||||
|
impl TryFrom<pb::SearchNode> for Node {
|
||||||
|
type Error = AnkiError;
|
||||||
|
|
||||||
|
fn try_from(msg: pb::SearchNode) -> std::result::Result<Self, Self::Error> {
|
||||||
|
use pb::search_node::group::Joiner;
|
||||||
|
use pb::search_node::Filter;
|
||||||
|
use pb::search_node::Flag;
|
||||||
|
Ok(if let Some(filter) = msg.filter {
|
||||||
|
match filter {
|
||||||
|
Filter::Tag(s) => Node::Search(SearchNode::Tag(escape_anki_wildcards(&s))),
|
||||||
|
Filter::Deck(s) => Node::Search(SearchNode::Deck(if s == "*" {
|
||||||
|
s
|
||||||
|
} else {
|
||||||
|
escape_anki_wildcards(&s)
|
||||||
|
})),
|
||||||
|
Filter::Note(s) => Node::Search(SearchNode::NoteType(escape_anki_wildcards(&s))),
|
||||||
|
Filter::Template(u) => {
|
||||||
|
Node::Search(SearchNode::CardTemplate(TemplateKind::Ordinal(u as u16)))
|
||||||
|
}
|
||||||
|
Filter::Nid(nid) => Node::Search(SearchNode::NoteIDs(nid.to_string())),
|
||||||
|
Filter::Nids(nids) => Node::Search(SearchNode::NoteIDs(nids.into_id_string())),
|
||||||
|
Filter::Dupe(dupe) => Node::Search(SearchNode::Duplicates {
|
||||||
|
note_type_id: dupe.notetype_id.into(),
|
||||||
|
text: dupe.first_field,
|
||||||
|
}),
|
||||||
|
Filter::FieldName(s) => Node::Search(SearchNode::SingleField {
|
||||||
|
field: escape_anki_wildcards(&s),
|
||||||
|
text: "*".to_string(),
|
||||||
|
is_re: false,
|
||||||
|
}),
|
||||||
|
Filter::Rated(rated) => Node::Search(SearchNode::Rated {
|
||||||
|
days: rated.days,
|
||||||
|
ease: rated.rating().into(),
|
||||||
|
}),
|
||||||
|
Filter::AddedInDays(u) => Node::Search(SearchNode::AddedInDays(u)),
|
||||||
|
Filter::DueInDays(i) => Node::Search(SearchNode::Property {
|
||||||
|
operator: "<=".to_string(),
|
||||||
|
kind: PropertyKind::Due(i),
|
||||||
|
}),
|
||||||
|
Filter::DueOnDay(i) => Node::Search(SearchNode::Property {
|
||||||
|
operator: "=".to_string(),
|
||||||
|
kind: PropertyKind::Due(i),
|
||||||
|
}),
|
||||||
|
Filter::EditedInDays(u) => Node::Search(SearchNode::EditedInDays(u)),
|
||||||
|
Filter::CardState(state) => Node::Search(SearchNode::State(
|
||||||
|
pb::search_node::CardState::from_i32(state)
|
||||||
|
.unwrap_or_default()
|
||||||
|
.into(),
|
||||||
|
)),
|
||||||
|
Filter::Flag(flag) => match Flag::from_i32(flag).unwrap_or(Flag::Any) {
|
||||||
|
Flag::None => Node::Search(SearchNode::Flag(0)),
|
||||||
|
Flag::Any => Node::Not(Box::new(Node::Search(SearchNode::Flag(0)))),
|
||||||
|
Flag::Red => Node::Search(SearchNode::Flag(1)),
|
||||||
|
Flag::Orange => Node::Search(SearchNode::Flag(2)),
|
||||||
|
Flag::Green => Node::Search(SearchNode::Flag(3)),
|
||||||
|
Flag::Blue => Node::Search(SearchNode::Flag(4)),
|
||||||
|
},
|
||||||
|
Filter::Negated(term) => Node::try_from(*term)?.negated(),
|
||||||
|
Filter::Group(mut group) => {
|
||||||
|
match group.nodes.len() {
|
||||||
|
0 => return Err(AnkiError::invalid_input("empty group")),
|
||||||
|
// a group of 1 doesn't need to be a group
|
||||||
|
1 => group.nodes.pop().unwrap().try_into()?,
|
||||||
|
// 2+ nodes
|
||||||
|
_ => {
|
||||||
|
let joiner = match group.joiner() {
|
||||||
|
Joiner::And => Node::And,
|
||||||
|
Joiner::Or => Node::Or,
|
||||||
|
};
|
||||||
|
let parsed: Vec<_> = group
|
||||||
|
.nodes
|
||||||
|
.into_iter()
|
||||||
|
.map(TryFrom::try_from)
|
||||||
|
.collect::<Result<_>>()?;
|
||||||
|
let joined = parsed.into_iter().intersperse(joiner).collect();
|
||||||
|
Node::Group(joined)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Filter::ParsableText(text) => {
|
||||||
|
let mut nodes = parse_search(&text)?;
|
||||||
|
if nodes.len() == 1 {
|
||||||
|
nodes.pop().unwrap()
|
||||||
|
} else {
|
||||||
|
Node::Group(nodes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Node::Search(SearchNode::WholeCollection)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<pb::search_node::group::Joiner> for BoolSeparator {
|
||||||
|
fn from(sep: pb::search_node::group::Joiner) -> Self {
|
||||||
|
match sep {
|
||||||
|
pb::search_node::group::Joiner::And => BoolSeparator::And,
|
||||||
|
pb::search_node::group::Joiner::Or => BoolSeparator::Or,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<pb::search_node::Rating> for RatingKind {
|
||||||
|
fn from(r: pb::search_node::Rating) -> Self {
|
||||||
|
match r {
|
||||||
|
pb::search_node::Rating::Again => RatingKind::AnswerButton(1),
|
||||||
|
pb::search_node::Rating::Hard => RatingKind::AnswerButton(2),
|
||||||
|
pb::search_node::Rating::Good => RatingKind::AnswerButton(3),
|
||||||
|
pb::search_node::Rating::Easy => RatingKind::AnswerButton(4),
|
||||||
|
pb::search_node::Rating::Any => RatingKind::AnyAnswerButton,
|
||||||
|
pb::search_node::Rating::ByReschedule => RatingKind::ManualReschedule,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<pb::search_node::CardState> for StateKind {
|
||||||
|
fn from(k: pb::search_node::CardState) -> Self {
|
||||||
|
match k {
|
||||||
|
pb::search_node::CardState::New => StateKind::New,
|
||||||
|
pb::search_node::CardState::Learn => StateKind::Learning,
|
||||||
|
pb::search_node::CardState::Review => StateKind::Review,
|
||||||
|
pb::search_node::CardState::Due => StateKind::Due,
|
||||||
|
pb::search_node::CardState::Suspended => StateKind::Suspended,
|
||||||
|
pb::search_node::CardState::Buried => StateKind::Buried,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl pb::search_node::IdList {
|
||||||
|
fn into_id_string(self) -> String {
|
||||||
|
self.ids
|
||||||
|
.iter()
|
||||||
|
.map(|i| i.to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(",")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<SortKindProto> for SortKind {
|
||||||
|
fn from(kind: SortKindProto) -> Self {
|
||||||
|
match kind {
|
||||||
|
SortKindProto::NoteCreation => SortKind::NoteCreation,
|
||||||
|
SortKindProto::NoteMod => SortKind::NoteMod,
|
||||||
|
SortKindProto::NoteField => SortKind::NoteField,
|
||||||
|
SortKindProto::NoteTags => SortKind::NoteTags,
|
||||||
|
SortKindProto::NoteType => SortKind::NoteType,
|
||||||
|
SortKindProto::CardMod => SortKind::CardMod,
|
||||||
|
SortKindProto::CardReps => SortKind::CardReps,
|
||||||
|
SortKindProto::CardDue => SortKind::CardDue,
|
||||||
|
SortKindProto::CardEase => SortKind::CardEase,
|
||||||
|
SortKindProto::CardLapses => SortKind::CardLapses,
|
||||||
|
SortKindProto::CardInterval => SortKind::CardInterval,
|
||||||
|
SortKindProto::CardDeck => SortKind::CardDeck,
|
||||||
|
SortKindProto::CardTemplate => SortKind::CardTemplate,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Option<SortOrderProto>> for SortMode {
|
||||||
|
fn from(order: Option<SortOrderProto>) -> Self {
|
||||||
|
use pb::sort_order::Value as V;
|
||||||
|
match order.unwrap_or(V::FromConfig(pb::Empty {})) {
|
||||||
|
V::None(_) => SortMode::NoOrder,
|
||||||
|
V::Custom(s) => SortMode::Custom(s),
|
||||||
|
V::FromConfig(_) => SortMode::FromConfig,
|
||||||
|
V::Builtin(b) => SortMode::Builtin {
|
||||||
|
kind: b.kind().into(),
|
||||||
|
reverse: b.reverse,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in a new issue