diff --git a/Cargo.lock b/Cargo.lock index a7bc7596d..554df33f7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -130,7 +130,6 @@ dependencies = [ "scopeguard", "serde", "serde-aux", - "serde_derive", "serde_json", "serde_repr", "serde_tuple", @@ -191,9 +190,14 @@ version = "0.0.0" dependencies = [ "anyhow", "inflections", + "num_enum", + "prost", "prost-build", "prost-reflect", "prost-types", + "serde", + "snafu", + "strum", ] [[package]] @@ -2497,23 +2501,23 @@ dependencies = [ [[package]] name = "num_enum" -version = "0.5.11" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" +checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1" dependencies = [ "num_enum_derive", ] [[package]] name = "num_enum_derive" -version = "0.5.11" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" +checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.12", ] [[package]] @@ -3557,9 +3561,9 @@ checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" [[package]] name = "serde" -version = "1.0.159" +version = "1.0.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c04e8343c3daeec41f58990b9d77068df31209f2af111e059e9fe9646693065" +checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d" dependencies = [ "serde_derive", ] @@ -3577,9 +3581,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.159" +version = "1.0.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c614d17805b093df4b147b51339e7e44bf05ef59fba1e45d83500bcfb4d8585" +checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" dependencies = [ "proc-macro2", "quote", diff --git a/proto/anki/cards.proto b/proto/anki/cards.proto index afb196b38..a7c6400ea 100644 --- a/proto/anki/cards.proto +++ b/proto/anki/cards.proto @@ -17,6 +17,7 @@ service CardsService { rpc SetDeck(SetDeckRequest) returns (collection.OpChangesWithCount); rpc SetFlag(SetFlagRequest) returns (collection.OpChangesWithCount); } + message CardId { int64 cid = 1; } diff --git a/rslib/Cargo.toml b/rslib/Cargo.toml index 4c7fc20f7..6e9d73c14 100644 --- a/rslib/Cargo.toml +++ b/rslib/Cargo.toml @@ -80,7 +80,7 @@ lazy_static = "1.4.0" nom = "7.1.3" num-integer = "0.1.45" num_cpus = "1.15.0" -num_enum = "0.5.11" +num_enum = "0.6.1" once_cell = "1.17.1" pin-project = "1.0.12" prost = "0.11.8" @@ -89,9 +89,8 @@ rand = "0.8.5" regex = "1.7.3" rusqlite = { version = "0.29.0", features = ["trace", "functions", "collation", "bundled"] } scopeguard = "1.1.0" -serde = "1.0.159" +serde = { version = "1.0.159", features = ["derive"] } serde-aux = "4.1.2" -serde_derive = "1.0.159" serde_json = "1.0.95" serde_repr = "0.1.12" serde_tuple = "0.5.0" diff --git a/rslib/linkchecker/tests/links.rs b/rslib/linkchecker/tests/links.rs index 0b0d1001e..f190de7e1 100644 --- a/rslib/linkchecker/tests/links.rs +++ b/rslib/linkchecker/tests/links.rs @@ -7,6 +7,8 @@ use std::borrow::Cow; use std::env; use std::iter; +use anki::links::help_page_link_suffix; +use anki::links::help_page_to_link; use anki::links::HelpPage; use futures::StreamExt; use itertools::Itertools; @@ -38,14 +40,14 @@ enum CheckableUrl { impl CheckableUrl { fn url(&self) -> Cow { match *self { - Self::HelpPage(page) => page.to_link().into(), + Self::HelpPage(page) => help_page_to_link(page).into(), Self::String(s) => s.into(), } } fn anchor(&self) -> Cow { match *self { - Self::HelpPage(page) => page.to_link_suffix().into(), + Self::HelpPage(page) => help_page_link_suffix(page).into(), Self::String(s) => s.split('#').last().unwrap_or_default().into(), } } diff --git a/rslib/proto/Cargo.toml b/rslib/proto/Cargo.toml index c34d4c191..c3109e19e 100644 --- a/rslib/proto/Cargo.toml +++ b/rslib/proto/Cargo.toml @@ -15,3 +15,10 @@ inflections = "1.1.1" prost-build = "0.11.9" prost-reflect = "0.11.4" prost-types = "0.11.9" + +[dependencies] +num_enum = "0.6.1" +prost = "0.11.9" +serde = { version = "1.0.164", features = ["derive"] } +snafu = "0.7.4" +strum = { version = "0.24.1", features = ["derive"] } diff --git a/rslib/proto/rust.rs b/rslib/proto/rust.rs index bdd3b4c8c..6762a9a4c 100644 --- a/rslib/proto/rust.rs +++ b/rslib/proto/rust.rs @@ -16,11 +16,16 @@ pub fn write_backend_proto_rs(descriptors_path: &Path) -> Result set_protoc_path(); let proto_dir = PathBuf::from("../../proto"); let paths = gather_proto_paths(&proto_dir)?; - let out_dir = Path::new("../../out/rslib/proto"); - fs::create_dir_all(out_dir).with_context(|| format!("{:?}", out_dir))?; + let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); + fs::create_dir_all( + descriptors_path + .parent() + .context("no parent found for descriptors path")?, + ) + .with_context(|| format!("creating {descriptors_path:?}"))?; prost_build::Config::new() - .out_dir(out_dir) + .out_dir(&out_dir) .file_descriptor_set_path(descriptors_path) .service_generator(RustCodeGenerator::boxed()) .type_attribute( @@ -29,26 +34,26 @@ pub fn write_backend_proto_rs(descriptors_path: &Path) -> Result ) .type_attribute( "Deck.Normal.DayLimit", - "#[derive(Copy, Eq, serde_derive::Deserialize, serde_derive::Serialize)]", + "#[derive(Copy, Eq, serde::Deserialize, serde::Serialize)]", ) .type_attribute("HelpPageLinkRequest.HelpPage", "#[derive(strum::EnumIter)]") .type_attribute("CsvMetadata.Delimiter", "#[derive(strum::EnumIter)]") .type_attribute( "Preferences.BackupLimits", - "#[derive(Copy, serde_derive::Deserialize, serde_derive::Serialize)]", + "#[derive(Copy, serde::Deserialize, serde::Serialize)]", ) .type_attribute( "CsvMetadata.DupeResolution", - "#[derive(serde_derive::Deserialize, serde_derive::Serialize)]", + "#[derive(serde::Deserialize, serde::Serialize)]", ) .type_attribute( "CsvMetadata.MatchScope", - "#[derive(serde_derive::Deserialize, serde_derive::Serialize)]", + "#[derive(serde::Deserialize, serde::Serialize)]", ) .compile_protos(paths.as_slice(), &[proto_dir]) .context("prost build")?; - write_service_index(out_dir, descriptors_path) + write_service_index(&out_dir, descriptors_path) } fn write_service_index(out_dir: &Path, descriptors_path: &Path) -> Result { @@ -115,7 +120,9 @@ impl RustCodeGenerator { buf.push_str( r#" pub trait Service { - fn run_method(&self, method: u32, input: &[u8]) -> Result> { + type Error: From; + + fn run_method(&self, method: u32, input: &[u8]) -> Result, Self::Error> { match method { "#, ); @@ -123,9 +130,9 @@ pub trait Service { write!( buf, concat!(" ", - "{idx} => {{ let input = super::{input_type}::decode(input)?;\n", + "{idx} => {{ let input = super::{input_type}::decode(input).map_err(crate::ProtoError::from)?;\n", "let output = self.{rust_method}(input)?;\n", - "let mut out_bytes = Vec::new(); output.encode(&mut out_bytes)?; Ok(out_bytes) }}, "), + "let mut out_bytes = Vec::new(); output.encode(&mut out_bytes).map_err(crate::ProtoError::from)?; Ok(out_bytes) }}, "), idx = idx, input_type = method.input_type, rust_method = method.name @@ -134,7 +141,7 @@ pub trait Service { } buf.push_str( r#" - _ => crate::invalid_input!("invalid command"), + _ => Err(crate::ProtoError::InvalidMethodIndex.into()), } } "#, @@ -145,7 +152,7 @@ pub trait Service { buf, concat!( " fn {method_name}(&self, input: super::{input_type}) -> ", - "Result;\n" + "Result;\n" ), method_name = method.name, input_type = method.input_type, @@ -163,7 +170,6 @@ impl ServiceGenerator for RustCodeGenerator { buf, "pub mod {name}_service {{ use prost::Message; - use crate::error::Result; ", name = service.name.replace("Service", "").to_ascii_lowercase() ) diff --git a/rslib/proto/src/generic_helpers.rs b/rslib/proto/src/generic_helpers.rs new file mode 100644 index 000000000..ad1955b63 --- /dev/null +++ b/rslib/proto/src/generic_helpers.rs @@ -0,0 +1,56 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +impl From> for crate::generic::Json { + fn from(json: Vec) -> Self { + crate::generic::Json { json } + } +} + +impl From for crate::generic::String { + fn from(val: String) -> Self { + crate::generic::String { val } + } +} + +impl From> for crate::generic::StringList { + fn from(vals: Vec) -> Self { + crate::generic::StringList { vals } + } +} + +impl From for crate::generic::Bool { + fn from(val: bool) -> Self { + crate::generic::Bool { val } + } +} + +impl From for crate::generic::Int32 { + fn from(val: i32) -> Self { + crate::generic::Int32 { val } + } +} + +impl From for crate::generic::Int64 { + fn from(val: i64) -> Self { + crate::generic::Int64 { val } + } +} + +impl From for crate::generic::UInt32 { + fn from(val: u32) -> Self { + crate::generic::UInt32 { val } + } +} + +impl From for crate::generic::UInt32 { + fn from(val: usize) -> Self { + crate::generic::UInt32 { val: val as u32 } + } +} + +impl From<()> for crate::generic::Empty { + fn from(_val: ()) -> Self { + crate::generic::Empty {} + } +} diff --git a/rslib/proto/src/lib.rs b/rslib/proto/src/lib.rs index cf499d047..ac2bc8d60 100644 --- a/rslib/proto/src/lib.rs +++ b/rslib/proto/src/lib.rs @@ -1,2 +1,51 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +mod generic_helpers; + +macro_rules! protobuf { + ($ident:ident, $name:literal) => { + pub mod $ident { + include!(concat!(env!("OUT_DIR"), "/anki.", $name, ".rs")); + } + }; +} + +use snafu::Snafu; + +#[derive(Debug, Snafu)] +pub enum ProtoError { + InvalidMethodIndex, + #[snafu(context(false))] + DecodeError { + source: prost::DecodeError, + }, + #[snafu(context(false))] + EncodeError { + source: prost::EncodeError, + }, +} + +include!(concat!(env!("OUT_DIR"), "/service_index.rs")); + +protobuf!(ankidroid, "ankidroid"); +protobuf!(backend, "backend"); +protobuf!(card_rendering, "card_rendering"); +protobuf!(cards, "cards"); +protobuf!(collection, "collection"); +protobuf!(config, "config"); +protobuf!(deckconfig, "deckconfig"); +protobuf!(decks, "decks"); +protobuf!(generic, "generic"); +protobuf!(i18n, "i18n"); +protobuf!(image_occlusion, "image_occlusion"); +protobuf!(import_export, "import_export"); +protobuf!(links, "links"); +protobuf!(media, "media"); +protobuf!(notes, "notes"); +protobuf!(notetypes, "notetypes"); +protobuf!(scheduler, "scheduler"); +protobuf!(search, "search"); +protobuf!(stats, "stats"); +protobuf!(sync, "sync"); +protobuf!(tags, "tags"); diff --git a/rslib/src/backend/adding.rs b/rslib/src/backend/adding.rs index b0f8da28f..b041b3189 100644 --- a/rslib/src/backend/adding.rs +++ b/rslib/src/backend/adding.rs @@ -1,8 +1,9 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::notes::DeckAndNotetype as DeckAndNotetypeProto; + use crate::adding::DeckAndNotetype; -use crate::pb::notes::DeckAndNotetype as DeckAndNotetypeProto; impl From for DeckAndNotetypeProto { fn from(s: DeckAndNotetype) -> Self { diff --git a/rslib/src/backend/ankidroid/db.rs b/rslib/src/backend/ankidroid/db.rs index ca73d674b..67e4f4737 100644 --- a/rslib/src/backend/ankidroid/db.rs +++ b/rslib/src/backend/ankidroid/db.rs @@ -7,21 +7,21 @@ use std::sync::atomic::AtomicI32; use std::sync::atomic::Ordering; use std::sync::Mutex; +use anki_proto::ankidroid::sql_value::Data; +use anki_proto::ankidroid::DbResponse; +use anki_proto::ankidroid::DbResult; +use anki_proto::ankidroid::Row; +use anki_proto::ankidroid::SqlValue; use itertools::FoldWhile; use itertools::FoldWhile::Continue; use itertools::FoldWhile::Done; use itertools::Itertools; use lazy_static::lazy_static; use rusqlite::ToSql; -use serde_derive::Deserialize; +use serde::Deserialize; use crate::collection::Collection; use crate::error::Result; -use crate::pb::ankidroid::sql_value::Data; -use crate::pb::ankidroid::DbResponse; -use crate::pb::ankidroid::DbResult; -use crate::pb::ankidroid::Row; -use crate::pb::ankidroid::SqlValue; /// A pointer to the SqliteStorage object stored in a collection, used to /// uniquely index results from multiple open collections at once. @@ -279,12 +279,13 @@ pub(crate) fn execute_for_row_count(col: &Collection, req: &[u8]) -> Result #[cfg(test)] mod tests { + use anki_proto::ankidroid::sql_value; + use anki_proto::ankidroid::Row; + use anki_proto::ankidroid::SqlValue; + use super::*; use crate::backend::ankidroid::db::select_slice_of_size; use crate::backend::ankidroid::db::Sizable; - use crate::pb::ankidroid::sql_value; - use crate::pb::ankidroid::Row; - use crate::pb::ankidroid::SqlValue; fn gen_data() -> Vec { vec![ diff --git a/rslib/src/backend/ankidroid/mod.rs b/rslib/src/backend/ankidroid/mod.rs index 72e52681e..1a4df3bfe 100644 --- a/rslib/src/backend/ankidroid/mod.rs +++ b/rslib/src/backend/ankidroid/mod.rs @@ -4,6 +4,12 @@ pub(crate) mod db; pub(crate) mod error; +pub(super) use anki_proto::ankidroid::ankidroid_service::Service as AnkidroidService; +use anki_proto::ankidroid::DbResponse; +use anki_proto::ankidroid::GetActiveSequenceNumbersResponse; +use anki_proto::ankidroid::GetNextResultPageRequest; +use anki_proto::generic; + use self::db::active_sequences; use self::error::debug_produce_error; use super::dbproxy::db_command_bytes; @@ -11,24 +17,17 @@ use super::dbproxy::db_command_proto; use super::Backend; use crate::backend::ankidroid::db::execute_for_row_count; use crate::backend::ankidroid::db::insert_for_id; -use crate::pb; -pub(super) use crate::pb::ankidroid::ankidroid_service::Service as AnkidroidService; -use crate::pb::ankidroid::DbResponse; -use crate::pb::ankidroid::GetActiveSequenceNumbersResponse; -use crate::pb::ankidroid::GetNextResultPageRequest; -use crate::pb::generic; -use crate::pb::generic::Empty; -use crate::pb::generic::Int32; -use crate::pb::generic::Json; use crate::prelude::*; use crate::scheduler::timing; use crate::scheduler::timing::fixed_offset_from_minutes; impl AnkidroidService for Backend { + type Error = AnkiError; + fn sched_timing_today_legacy( &self, - input: pb::ankidroid::SchedTimingTodayLegacyRequest, - ) -> Result { + input: anki_proto::ankidroid::SchedTimingTodayLegacyRequest, + ) -> Result { let result = timing::sched_timing_today( TimestampSecs::from(input.created_secs), TimestampSecs::from(input.now_secs), @@ -36,40 +35,42 @@ impl AnkidroidService for Backend { fixed_offset_from_minutes(input.now_mins_west), Some(input.rollover_hour as u8), )?; - Ok(pb::scheduler::SchedTimingTodayResponse::from(result)) + Ok(anki_proto::scheduler::SchedTimingTodayResponse::from( + result, + )) } - fn local_minutes_west_legacy(&self, input: pb::generic::Int64) -> Result { - Ok(pb::generic::Int32 { + fn local_minutes_west_legacy(&self, input: generic::Int64) -> Result { + Ok(generic::Int32 { val: timing::local_minutes_west_for_stamp(input.val.into())?, }) } - fn run_db_command(&self, input: Json) -> Result { + fn run_db_command(&self, input: generic::Json) -> Result { self.with_col(|col| db_command_bytes(col, &input.json)) - .map(|json| Json { json }) + .map(|json| generic::Json { json }) } - fn run_db_command_proto(&self, input: Json) -> Result { + fn run_db_command_proto(&self, input: generic::Json) -> Result { self.with_col(|col| db_command_proto(col, &input.json)) } - fn run_db_command_for_row_count(&self, input: Json) -> Result { + fn run_db_command_for_row_count(&self, input: generic::Json) -> Result { self.with_col(|col| execute_for_row_count(col, &input.json)) - .map(|val| pb::generic::Int64 { val }) + .map(|val| generic::Int64 { val }) } - fn flush_all_queries(&self, _input: Empty) -> Result { + fn flush_all_queries(&self, _input: generic::Empty) -> Result { self.with_col(|col| { db::flush_collection(col); - Ok(Empty {}) + Ok(generic::Empty {}) }) } - fn flush_query(&self, input: Int32) -> Result { + fn flush_query(&self, input: generic::Int32) -> Result { self.with_col(|col| { db::flush_single_result(col, input.val); - Ok(Empty {}) + Ok(generic::Empty {}) }) } @@ -79,11 +80,11 @@ impl AnkidroidService for Backend { }) } - fn insert_for_id(&self, input: Json) -> Result { + fn insert_for_id(&self, input: generic::Json) -> Result { self.with_col(|col| insert_for_id(col, &input.json).map(Into::into)) } - fn set_page_size(&self, input: pb::generic::Int64) -> Result { + fn set_page_size(&self, input: generic::Int64) -> Result { // we don't require an open collection, but should avoid modifying this // concurrently let _guard = self.col.lock(); @@ -91,10 +92,7 @@ impl AnkidroidService for Backend { Ok(().into()) } - fn get_column_names_from_query( - &self, - input: generic::String, - ) -> Result { + fn get_column_names_from_query(&self, input: generic::String) -> Result { self.with_col(|col| { let stmt = col.storage.db.prepare(&input.val)?; let names = stmt.column_names(); @@ -105,7 +103,7 @@ impl AnkidroidService for Backend { fn get_active_sequence_numbers( &self, - _input: Empty, + _input: generic::Empty, ) -> Result { self.with_col(|col| { Ok(GetActiveSequenceNumbersResponse { @@ -114,7 +112,20 @@ impl AnkidroidService for Backend { }) } - fn debug_produce_error(&self, input: generic::String) -> Result { + fn debug_produce_error(&self, input: generic::String) -> Result { Err(debug_produce_error(&input.val)) } } + +impl From + for anki_proto::scheduler::SchedTimingTodayResponse +{ + fn from( + t: crate::scheduler::timing::SchedTimingToday, + ) -> anki_proto::scheduler::SchedTimingTodayResponse { + anki_proto::scheduler::SchedTimingTodayResponse { + days_elapsed: t.days_elapsed, + next_day_at: t.next_day_at.0, + } + } +} diff --git a/rslib/src/backend/card.rs b/rslib/src/backend/card.rs index a8adbcc0b..84e849418 100644 --- a/rslib/src/backend/card.rs +++ b/rslib/src/backend/card.rs @@ -1,15 +1,18 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +pub(super) use anki_proto::cards::cards_service::Service as CardsService; +use anki_proto::generic; + use super::Backend; use crate::card::CardQueue; use crate::card::CardType; -use crate::pb; -pub(super) use crate::pb::cards::cards_service::Service as CardsService; use crate::prelude::*; impl CardsService for Backend { - fn get_card(&self, input: pb::cards::CardId) -> Result { + type Error = AnkiError; + + fn get_card(&self, input: anki_proto::cards::CardId) -> Result { let cid = input.into(); self.with_col(|col| { col.storage @@ -21,8 +24,8 @@ impl CardsService for Backend { fn update_cards( &self, - input: pb::cards::UpdateCardsRequest, - ) -> Result { + input: anki_proto::cards::UpdateCardsRequest, + ) -> Result { self.with_col(|col| { let cards = input .cards @@ -37,7 +40,7 @@ impl CardsService for Backend { .map(Into::into) } - fn remove_cards(&self, input: pb::cards::RemoveCardsRequest) -> Result { + fn remove_cards(&self, input: anki_proto::cards::RemoveCardsRequest) -> Result { self.with_col(|col| { col.transact_no_undo(|col| { col.remove_cards_and_orphaned_notes( @@ -54,8 +57,8 @@ impl CardsService for Backend { fn set_deck( &self, - input: pb::cards::SetDeckRequest, - ) -> Result { + input: anki_proto::cards::SetDeckRequest, + ) -> Result { let cids: Vec<_> = input.card_ids.into_iter().map(CardId).collect(); let deck_id = input.deck_id.into(); self.with_col(|col| col.set_deck(&cids, deck_id).map(Into::into)) @@ -63,8 +66,8 @@ impl CardsService for Backend { fn set_flag( &self, - input: pb::cards::SetFlagRequest, - ) -> Result { + input: anki_proto::cards::SetFlagRequest, + ) -> Result { self.with_col(|col| { col.set_card_flag(&to_card_ids(input.card_ids), input.flag) .map(Into::into) @@ -72,10 +75,10 @@ impl CardsService for Backend { } } -impl TryFrom for Card { +impl TryFrom for Card { type Error = AnkiError; - fn try_from(c: pb::cards::Card) -> Result { + fn try_from(c: anki_proto::cards::Card) -> Result { let ctype = CardType::try_from(c.ctype as u8).or_invalid("invalid card type")?; let queue = CardQueue::try_from(c.queue as i8).or_invalid("invalid card queue")?; Ok(Card { @@ -102,9 +105,9 @@ impl TryFrom for Card { } } -impl From for pb::cards::Card { +impl From for anki_proto::cards::Card { fn from(c: Card) -> Self { - pb::cards::Card { + anki_proto::cards::Card { id: c.id.0, note_id: c.note_id.0, deck_id: c.deck_id.0, @@ -131,3 +134,9 @@ impl From for pb::cards::Card { fn to_card_ids(v: Vec) -> Vec { v.into_iter().map(CardId).collect() } + +impl From for CardId { + fn from(cid: anki_proto::cards::CardId) -> Self { + CardId(cid.cid) + } +} diff --git a/rslib/src/backend/cardrendering.rs b/rslib/src/backend/cardrendering.rs index b947f824d..d249a3081 100644 --- a/rslib/src/backend/cardrendering.rs +++ b/rslib/src/backend/cardrendering.rs @@ -1,6 +1,10 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +pub(super) use anki_proto::card_rendering::cardrendering_service::Service as CardRenderingService; +use anki_proto::card_rendering::ExtractClozeForTypingRequest; +use anki_proto::generic; + use super::Backend; use crate::card_rendering::extract_av_tags; use crate::card_rendering::strip_av_tags; @@ -12,9 +16,6 @@ use crate::latex::ExtractedLatex; use crate::markdown::render_markdown; use crate::notetype::CardTemplateSchema11; use crate::notetype::RenderCardOutput; -use crate::pb; -pub(super) use crate::pb::card_rendering::cardrendering_service::Service as CardRenderingService; -use crate::pb::card_rendering::ExtractClozeForTypingRequest; use crate::prelude::*; use crate::template::RenderedNode; use crate::text::decode_iri_paths; @@ -25,12 +26,14 @@ use crate::text::strip_html_preserving_media_filenames; use crate::typeanswer::compare_answer; impl CardRenderingService for Backend { + type Error = AnkiError; + fn extract_av_tags( &self, - input: pb::card_rendering::ExtractAvTagsRequest, - ) -> Result { + input: anki_proto::card_rendering::ExtractAvTagsRequest, + ) -> Result { let out = extract_av_tags(input.text, input.question_side, self.i18n()); - Ok(pb::card_rendering::ExtractAvTagsResponse { + Ok(anki_proto::card_rendering::ExtractAvTagsResponse { text: out.0, av_tags: out.1, }) @@ -38,8 +41,8 @@ impl CardRenderingService for Backend { fn extract_latex( &self, - input: pb::card_rendering::ExtractLatexRequest, - ) -> Result { + input: anki_proto::card_rendering::ExtractLatexRequest, + ) -> Result { let func = if input.expand_clozes { extract_latex_expanding_clozes } else { @@ -47,22 +50,24 @@ impl CardRenderingService for Backend { }; let (text, extracted) = func(&input.text, input.svg); - Ok(pb::card_rendering::ExtractLatexResponse { + Ok(anki_proto::card_rendering::ExtractLatexResponse { text, latex: extracted .into_iter() - .map(|e: ExtractedLatex| pb::card_rendering::ExtractedLatex { - filename: e.fname, - latex_body: e.latex, - }) + .map( + |e: ExtractedLatex| anki_proto::card_rendering::ExtractedLatex { + filename: e.fname, + latex_body: e.latex, + }, + ) .collect(), }) } fn get_empty_cards( &self, - _input: pb::generic::Empty, - ) -> Result { + _input: generic::Empty, + ) -> Result { self.with_col(|col| { let mut empty = col.empty_cards()?; let report = col.empty_cards_report(&mut empty)?; @@ -70,14 +75,14 @@ impl CardRenderingService for Backend { let mut outnotes = vec![]; for (_ntid, notes) in empty { outnotes.extend(notes.into_iter().map(|e| { - pb::card_rendering::empty_cards_report::NoteWithEmptyCards { + anki_proto::card_rendering::empty_cards_report::NoteWithEmptyCards { note_id: e.nid.0, will_delete_note: e.empty.len() == e.current_count, card_ids: e.empty.into_iter().map(|(_ord, id)| id.0).collect(), } })) } - Ok(pb::card_rendering::EmptyCardsReport { + Ok(anki_proto::card_rendering::EmptyCardsReport { report, notes: outnotes, }) @@ -86,8 +91,8 @@ impl CardRenderingService for Backend { fn render_existing_card( &self, - input: pb::card_rendering::RenderExistingCardRequest, - ) -> Result { + input: anki_proto::card_rendering::RenderExistingCardRequest, + ) -> Result { self.with_col(|col| { col.render_existing_card(CardId(input.card_id), input.browser) .map(Into::into) @@ -96,8 +101,8 @@ impl CardRenderingService for Backend { fn render_uncommitted_card( &self, - input: pb::card_rendering::RenderUncommittedCardRequest, - ) -> Result { + input: anki_proto::card_rendering::RenderUncommittedCardRequest, + ) -> Result { let template = input.template.or_invalid("missing template")?.into(); let mut note = input.note.or_invalid("missing note")?.into(); let ord = input.card_ord as u16; @@ -110,8 +115,8 @@ impl CardRenderingService for Backend { fn render_uncommitted_card_legacy( &self, - input: pb::card_rendering::RenderUncommittedCardLegacyRequest, - ) -> Result { + input: anki_proto::card_rendering::RenderUncommittedCardLegacyRequest, + ) -> Result { let schema11: CardTemplateSchema11 = serde_json::from_slice(&input.template)?; let template = schema11.into(); let mut note = input.note.or_invalid("missing note")?.into(); @@ -123,14 +128,14 @@ impl CardRenderingService for Backend { }) } - fn strip_av_tags(&self, input: pb::generic::String) -> Result { + fn strip_av_tags(&self, input: generic::String) -> Result { Ok(strip_av_tags(input.val).into()) } fn render_markdown( &self, - input: pb::card_rendering::RenderMarkdownRequest, - ) -> Result { + input: anki_proto::card_rendering::RenderMarkdownRequest, + ) -> Result { let mut text = render_markdown(&input.markdown); if input.sanitize { // currently no images @@ -139,21 +144,21 @@ impl CardRenderingService for Backend { Ok(text.into()) } - fn encode_iri_paths(&self, input: pb::generic::String) -> Result { + fn encode_iri_paths(&self, input: generic::String) -> Result { Ok(encode_iri_paths(&input.val).to_string().into()) } - fn decode_iri_paths(&self, input: pb::generic::String) -> Result { + fn decode_iri_paths(&self, input: generic::String) -> Result { Ok(decode_iri_paths(&input.val).to_string().into()) } fn strip_html( &self, - input: pb::card_rendering::StripHtmlRequest, - ) -> Result { + input: anki_proto::card_rendering::StripHtmlRequest, + ) -> Result { Ok(match input.mode() { - pb::card_rendering::strip_html_request::Mode::Normal => strip_html(&input.text), - pb::card_rendering::strip_html_request::Mode::PreserveMediaFilenames => { + anki_proto::card_rendering::strip_html_request::Mode::Normal => strip_html(&input.text), + anki_proto::card_rendering::strip_html_request::Mode::PreserveMediaFilenames => { strip_html_preserving_media_filenames(&input.text) } } @@ -163,15 +168,15 @@ impl CardRenderingService for Backend { fn compare_answer( &self, - input: pb::card_rendering::CompareAnswerRequest, - ) -> Result { + input: anki_proto::card_rendering::CompareAnswerRequest, + ) -> Result { Ok(compare_answer(&input.expected, &input.provided).into()) } fn extract_cloze_for_typing( &self, input: ExtractClozeForTypingRequest, - ) -> Result { + ) -> Result { Ok(extract_cloze_for_typing(&input.text, input.ordinal as u16) .to_string() .into()) @@ -179,16 +184,16 @@ impl CardRenderingService for Backend { fn all_tts_voices( &self, - input: pb::card_rendering::AllTtsVoicesRequest, - ) -> Result { + input: anki_proto::card_rendering::AllTtsVoicesRequest, + ) -> Result { tts::all_voices(input.validate) - .map(|voices| pb::card_rendering::AllTtsVoicesResponse { voices }) + .map(|voices| anki_proto::card_rendering::AllTtsVoicesResponse { voices }) } fn write_tts_stream( &self, - request: pb::card_rendering::WriteTtsStreamRequest, - ) -> Result { + request: anki_proto::card_rendering::WriteTtsStreamRequest, + ) -> Result { tts::write_stream( &request.path, &request.voice_id, @@ -201,26 +206,28 @@ impl CardRenderingService for Backend { fn rendered_nodes_to_proto( nodes: Vec, -) -> Vec { +) -> Vec { nodes .into_iter() - .map(|n| pb::card_rendering::RenderedTemplateNode { + .map(|n| anki_proto::card_rendering::RenderedTemplateNode { value: Some(rendered_node_to_proto(n)), }) .collect() } -fn rendered_node_to_proto(node: RenderedNode) -> pb::card_rendering::rendered_template_node::Value { +fn rendered_node_to_proto( + node: RenderedNode, +) -> anki_proto::card_rendering::rendered_template_node::Value { match node { RenderedNode::Text { text } => { - pb::card_rendering::rendered_template_node::Value::Text(text) + anki_proto::card_rendering::rendered_template_node::Value::Text(text) } RenderedNode::Replacement { field_name, current_text, filters, - } => pb::card_rendering::rendered_template_node::Value::Replacement( - pb::card_rendering::RenderedTemplateReplacement { + } => anki_proto::card_rendering::rendered_template_node::Value::Replacement( + anki_proto::card_rendering::RenderedTemplateReplacement { field_name, current_text, filters, @@ -229,9 +236,9 @@ fn rendered_node_to_proto(node: RenderedNode) -> pb::card_rendering::rendered_te } } -impl From for pb::card_rendering::RenderCardResponse { +impl From for anki_proto::card_rendering::RenderCardResponse { fn from(o: RenderCardOutput) -> Self { - pb::card_rendering::RenderCardResponse { + anki_proto::card_rendering::RenderCardResponse { question_nodes: rendered_nodes_to_proto(o.qnodes), answer_nodes: rendered_nodes_to_proto(o.anodes), css: o.css, diff --git a/rslib/src/backend/collection.rs b/rslib/src/backend/collection.rs index 82d5ac818..9092f10b5 100644 --- a/rslib/src/backend/collection.rs +++ b/rslib/src/backend/collection.rs @@ -3,32 +3,34 @@ use std::sync::MutexGuard; +pub(super) use anki_proto::collection::collection_service::Service as CollectionService; +use anki_proto::generic; use tracing::error; use super::progress::Progress; use super::Backend; use crate::backend::progress::progress_to_proto; use crate::collection::CollectionBuilder; -use crate::pb; -pub(super) use crate::pb::collection::collection_service::Service as CollectionService; use crate::prelude::*; use crate::storage::SchemaVersion; impl CollectionService for Backend { - fn latest_progress(&self, _input: pb::generic::Empty) -> Result { + type Error = AnkiError; + + fn latest_progress(&self, _input: generic::Empty) -> Result { let progress = self.progress_state.lock().unwrap().last_progress; Ok(progress_to_proto(progress, &self.tr)) } - fn set_wants_abort(&self, _input: pb::generic::Empty) -> Result { + fn set_wants_abort(&self, _input: generic::Empty) -> Result { self.progress_state.lock().unwrap().want_abort = true; Ok(().into()) } fn open_collection( &self, - input: pb::collection::OpenCollectionRequest, - ) -> Result { + input: anki_proto::collection::OpenCollectionRequest, + ) -> Result { let mut guard = self.lock_closed_collection()?; let mut builder = CollectionBuilder::new(input.collection_path); @@ -45,8 +47,8 @@ impl CollectionService for Backend { fn close_collection( &self, - input: pb::collection::CloseCollectionRequest, - ) -> Result { + input: anki_proto::collection::CloseCollectionRequest, + ) -> Result { let desired_version = if input.downgrade_to_schema11 { Some(SchemaVersion::V11) } else { @@ -66,37 +68,44 @@ impl CollectionService for Backend { fn check_database( &self, - _input: pb::generic::Empty, - ) -> Result { + _input: generic::Empty, + ) -> Result { let mut handler = self.new_progress_handler(); let progress_fn = move |progress, throttle| { handler.update(Progress::DatabaseCheck(progress), throttle); }; self.with_col(|col| { - col.check_database(progress_fn) - .map(|problems| pb::collection::CheckDatabaseResponse { + col.check_database(progress_fn).map(|problems| { + anki_proto::collection::CheckDatabaseResponse { problems: problems.to_i18n_strings(&col.tr), - }) + } + }) }) } - fn get_undo_status(&self, _input: pb::generic::Empty) -> Result { + fn get_undo_status( + &self, + _input: generic::Empty, + ) -> Result { self.with_col(|col| Ok(col.undo_status().into_protobuf(&col.tr))) } - fn undo(&self, _input: pb::generic::Empty) -> Result { + fn undo(&self, _input: generic::Empty) -> Result { self.with_col(|col| col.undo().map(|out| out.into_protobuf(&col.tr))) } - fn redo(&self, _input: pb::generic::Empty) -> Result { + fn redo(&self, _input: generic::Empty) -> Result { self.with_col(|col| col.redo().map(|out| out.into_protobuf(&col.tr))) } - fn add_custom_undo_entry(&self, input: pb::generic::String) -> Result { + fn add_custom_undo_entry(&self, input: generic::String) -> Result { self.with_col(|col| Ok(col.add_custom_undo_step(input.val).into())) } - fn merge_undo_entries(&self, input: pb::generic::UInt32) -> Result { + fn merge_undo_entries( + &self, + input: generic::UInt32, + ) -> Result { let starting_from = input.val as usize; self.with_col(|col| col.merge_undoable_ops(starting_from)) .map(Into::into) @@ -104,8 +113,8 @@ impl CollectionService for Backend { fn create_backup( &self, - input: pb::collection::CreateBackupRequest, - ) -> Result { + input: anki_proto::collection::CreateBackupRequest, + ) -> Result { // lock collection let mut col_lock = self.lock_open_collection()?; let col = col_lock.as_mut().unwrap(); @@ -129,7 +138,7 @@ impl CollectionService for Backend { Ok(created.into()) } - fn await_backup_completion(&self, _input: pb::generic::Empty) -> Result { + fn await_backup_completion(&self, _input: generic::Empty) -> Result { self.await_backup_completion()?; Ok(().into()) } diff --git a/rslib/src/backend/config.rs b/rslib/src/backend/config.rs index f041a0043..b8e611e71 100644 --- a/rslib/src/backend/config.rs +++ b/rslib/src/backend/config.rs @@ -1,15 +1,15 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::config::config_key::Bool as BoolKeyProto; +use anki_proto::config::config_key::String as StringKeyProto; +pub(super) use anki_proto::config::config_service::Service as ConfigService; +use anki_proto::generic; use serde_json::Value; use super::Backend; use crate::config::BoolKey; use crate::config::StringKey; -use crate::pb; -use crate::pb::config::config_key::Bool as BoolKeyProto; -use crate::pb::config::config_key::String as StringKeyProto; -pub(super) use crate::pb::config::config_service::Service as ConfigService; use crate::prelude::*; impl From for BoolKey { @@ -54,7 +54,9 @@ impl From for StringKey { } impl ConfigService for Backend { - fn get_config_json(&self, input: pb::generic::String) -> Result { + type Error = AnkiError; + + fn get_config_json(&self, input: generic::String) -> Result { self.with_col(|col| { let val: Option = col.get_config_optional(input.val.as_str()); val.or_not_found(input.val) @@ -65,8 +67,8 @@ impl ConfigService for Backend { fn set_config_json( &self, - input: pb::config::SetConfigJsonRequest, - ) -> Result { + input: anki_proto::config::SetConfigJsonRequest, + ) -> Result { self.with_col(|col| { let val: Value = serde_json::from_slice(&input.value_json)?; col.set_config_json(input.key.as_str(), &val, input.undoable) @@ -76,8 +78,8 @@ impl ConfigService for Backend { fn set_config_json_no_undo( &self, - input: pb::config::SetConfigJsonRequest, - ) -> Result { + input: anki_proto::config::SetConfigJsonRequest, + ) -> Result { self.with_col(|col| { let val: Value = serde_json::from_slice(&input.value_json)?; col.transact_no_undo(|col| col.set_config(input.key.as_str(), &val).map(|_| ())) @@ -85,12 +87,12 @@ impl ConfigService for Backend { .map(Into::into) } - fn remove_config(&self, input: pb::generic::String) -> Result { + fn remove_config(&self, input: generic::String) -> Result { self.with_col(|col| col.remove_config(input.val.as_str())) .map(Into::into) } - fn get_all_config(&self, _input: pb::generic::Empty) -> Result { + fn get_all_config(&self, _input: generic::Empty) -> Result { self.with_col(|col| { let conf = col.storage.get_all_config()?; serde_json::to_vec(&conf).map_err(Into::into) @@ -100,10 +102,10 @@ impl ConfigService for Backend { fn get_config_bool( &self, - input: pb::config::GetConfigBoolRequest, - ) -> Result { + input: anki_proto::config::GetConfigBoolRequest, + ) -> Result { self.with_col(|col| { - Ok(pb::generic::Bool { + Ok(generic::Bool { val: col.get_config_bool(input.key().into()), }) }) @@ -111,18 +113,18 @@ impl ConfigService for Backend { fn set_config_bool( &self, - input: pb::config::SetConfigBoolRequest, - ) -> Result { + input: anki_proto::config::SetConfigBoolRequest, + ) -> Result { self.with_col(|col| col.set_config_bool(input.key().into(), input.value, input.undoable)) .map(Into::into) } fn get_config_string( &self, - input: pb::config::GetConfigStringRequest, - ) -> Result { + input: anki_proto::config::GetConfigStringRequest, + ) -> Result { self.with_col(|col| { - Ok(pb::generic::String { + Ok(generic::String { val: col.get_config_string(input.key().into()), }) }) @@ -130,17 +132,20 @@ impl ConfigService for Backend { fn set_config_string( &self, - input: pb::config::SetConfigStringRequest, - ) -> Result { + input: anki_proto::config::SetConfigStringRequest, + ) -> Result { self.with_col(|col| col.set_config_string(input.key().into(), &input.value, input.undoable)) .map(Into::into) } - fn get_preferences(&self, _input: pb::generic::Empty) -> Result { + fn get_preferences(&self, _input: generic::Empty) -> Result { self.with_col(|col| col.get_preferences()) } - fn set_preferences(&self, input: pb::config::Preferences) -> Result { + fn set_preferences( + &self, + input: anki_proto::config::Preferences, + ) -> Result { self.with_col(|col| col.set_preferences(input)) .map(Into::into) } diff --git a/rslib/src/backend/dbproxy.rs b/rslib/src/backend/dbproxy.rs index a977bad7f..e9283672b 100644 --- a/rslib/src/backend/dbproxy.rs +++ b/rslib/src/backend/dbproxy.rs @@ -1,6 +1,10 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::ankidroid::sql_value::Data; +use anki_proto::ankidroid::DbResponse; +use anki_proto::ankidroid::DbResult as ProtoDbResult; +use anki_proto::ankidroid::SqlValue as pb_SqlValue; use rusqlite::params_from_iter; use rusqlite::types::FromSql; use rusqlite::types::FromSqlError; @@ -8,15 +12,9 @@ use rusqlite::types::ToSql; use rusqlite::types::ToSqlOutput; use rusqlite::types::ValueRef; use rusqlite::OptionalExtension; -use serde_derive::Deserialize; -use serde_derive::Serialize; +use serde::Deserialize; +use serde::Serialize; -use crate::pb; -use crate::pb::ankidroid::sql_value::Data; -use crate::pb::ankidroid::DbResponse; -use crate::pb::ankidroid::DbResult as ProtoDbResult; -use crate::pb::ankidroid::Row; -use crate::pb::ankidroid::SqlValue as pb_SqlValue; use crate::prelude::*; use crate::storage::SqliteStorage; @@ -67,7 +65,7 @@ impl ToSql for SqlValue { } } -impl From<&SqlValue> for pb::ankidroid::SqlValue { +impl From<&SqlValue> for anki_proto::ankidroid::SqlValue { fn from(item: &SqlValue) -> Self { match item { SqlValue::Null => pb_SqlValue { data: Option::None }, @@ -87,19 +85,18 @@ impl From<&SqlValue> for pb::ankidroid::SqlValue { } } -impl From<&Vec> for pb::ankidroid::Row { - fn from(item: &Vec) -> Self { - Row { - fields: item.iter().map(pb::ankidroid::SqlValue::from).collect(), - } +fn row_to_proto(row: &[SqlValue]) -> anki_proto::ankidroid::Row { + anki_proto::ankidroid::Row { + fields: row + .iter() + .map(anki_proto::ankidroid::SqlValue::from) + .collect(), } } -impl From<&Vec>> for pb::ankidroid::DbResult { - fn from(item: &Vec>) -> Self { - ProtoDbResult { - rows: item.iter().map(Row::from).collect(), - } +fn rows_to_proto(rows: &[Vec]) -> anki_proto::ankidroid::DbResult { + anki_proto::ankidroid::DbResult { + rows: rows.iter().map(|r| row_to_proto(r)).collect(), } } @@ -182,7 +179,7 @@ pub(crate) fn db_command_proto(col: &mut Collection, input: &[u8]) -> Result ProtoDbResult { rows: Vec::new() }, - DbResult::Rows(rows) => ProtoDbResult::from(&rows), + DbResult::Rows(rows) => rows_to_proto(&rows), }; let trimmed = super::ankidroid::db::trim_and_cache_remaining( col, diff --git a/rslib/src/backend/deckconfig.rs b/rslib/src/backend/deckconfig.rs index 1429d969c..4d11b9f93 100644 --- a/rslib/src/backend/deckconfig.rs +++ b/rslib/src/backend/deckconfig.rs @@ -1,31 +1,34 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +pub(super) use anki_proto::deckconfig::deckconfig_service::Service as DeckConfigService; +use anki_proto::generic; + use super::Backend; use crate::deckconfig::DeckConfSchema11; use crate::deckconfig::DeckConfig; use crate::deckconfig::UpdateDeckConfigsRequest; -use crate::pb; -pub(super) use crate::pb::deckconfig::deckconfig_service::Service as DeckConfigService; use crate::prelude::*; impl DeckConfigService for Backend { + type Error = AnkiError; + fn add_or_update_deck_config_legacy( &self, - input: pb::generic::Json, - ) -> Result { + input: generic::Json, + ) -> Result { let conf: DeckConfSchema11 = serde_json::from_slice(&input.json)?; let mut conf: DeckConfig = conf.into(); self.with_col(|col| { col.transact_no_undo(|col| { col.add_or_update_deck_config_legacy(&mut conf)?; - Ok(pb::deckconfig::DeckConfigId { dcid: conf.id.0 }) + Ok(anki_proto::deckconfig::DeckConfigId { dcid: conf.id.0 }) }) }) .map(Into::into) } - fn all_deck_config_legacy(&self, _input: pb::generic::Empty) -> Result { + fn all_deck_config_legacy(&self, _input: generic::Empty) -> Result { self.with_col(|col| { let conf: Vec = col .storage @@ -40,15 +43,15 @@ impl DeckConfigService for Backend { fn get_deck_config( &self, - input: pb::deckconfig::DeckConfigId, - ) -> Result { + input: anki_proto::deckconfig::DeckConfigId, + ) -> Result { self.with_col(|col| Ok(col.get_deck_config(input.into(), true)?.unwrap().into())) } fn get_deck_config_legacy( &self, - input: pb::deckconfig::DeckConfigId, - ) -> Result { + input: anki_proto::deckconfig::DeckConfigId, + ) -> Result { self.with_col(|col| { let conf = col.get_deck_config(input.into(), true)?.unwrap(); let conf: DeckConfSchema11 = conf.into(); @@ -57,7 +60,7 @@ impl DeckConfigService for Backend { .map(Into::into) } - fn new_deck_config_legacy(&self, _input: pb::generic::Empty) -> Result { + fn new_deck_config_legacy(&self, _input: generic::Empty) -> Result { serde_json::to_vec(&DeckConfSchema11::default()) .map_err(Into::into) .map(Into::into) @@ -65,31 +68,31 @@ impl DeckConfigService for Backend { fn remove_deck_config( &self, - input: pb::deckconfig::DeckConfigId, - ) -> Result { + input: anki_proto::deckconfig::DeckConfigId, + ) -> Result { self.with_col(|col| col.transact_no_undo(|col| col.remove_deck_config_inner(input.into()))) .map(Into::into) } fn get_deck_configs_for_update( &self, - input: pb::decks::DeckId, - ) -> Result { - self.with_col(|col| col.get_deck_configs_for_update(input.into())) + input: anki_proto::decks::DeckId, + ) -> Result { + self.with_col(|col| col.get_deck_configs_for_update(input.did.into())) } fn update_deck_configs( &self, - input: pb::deckconfig::UpdateDeckConfigsRequest, - ) -> Result { + input: anki_proto::deckconfig::UpdateDeckConfigsRequest, + ) -> Result { self.with_col(|col| col.update_deck_configs(input.into())) .map(Into::into) } } -impl From for pb::deckconfig::DeckConfig { +impl From for anki_proto::deckconfig::DeckConfig { fn from(c: DeckConfig) -> Self { - pb::deckconfig::DeckConfig { + anki_proto::deckconfig::DeckConfig { id: c.id.0, name: c.name, mtime_secs: c.mtime_secs.0, @@ -99,8 +102,8 @@ impl From for pb::deckconfig::DeckConfig { } } -impl From for UpdateDeckConfigsRequest { - fn from(c: pb::deckconfig::UpdateDeckConfigsRequest) -> Self { +impl From for UpdateDeckConfigsRequest { + fn from(c: anki_proto::deckconfig::UpdateDeckConfigsRequest) -> Self { UpdateDeckConfigsRequest { target_deck_id: c.target_deck_id.into(), configs: c.configs.into_iter().map(Into::into).collect(), @@ -113,8 +116,8 @@ impl From for UpdateDeckConfigsRequest } } -impl From for DeckConfig { - fn from(c: pb::deckconfig::DeckConfig) -> Self { +impl From for DeckConfig { + fn from(c: anki_proto::deckconfig::DeckConfig) -> Self { DeckConfig { id: c.id.into(), name: c.name, @@ -124,3 +127,9 @@ impl From for DeckConfig { } } } + +impl From for DeckConfigId { + fn from(dcid: anki_proto::deckconfig::DeckConfigId) -> Self { + DeckConfigId(dcid.dcid) + } +} diff --git a/rslib/src/backend/decks.rs b/rslib/src/backend/decks.rs index bfa64dfa9..08b75c9d1 100644 --- a/rslib/src/backend/decks.rs +++ b/rslib/src/backend/decks.rs @@ -3,25 +3,34 @@ use std::convert::TryFrom; +pub(super) use anki_proto::decks::decks_service::Service as DecksService; +use anki_proto::generic; + use super::Backend; +use crate::decks::filtered::search_order_labels; use crate::decks::DeckSchema11; -use crate::decks::FilteredSearchOrder; -use crate::pb; -pub(super) use crate::pb::decks::decks_service::Service as DecksService; use crate::prelude::*; use crate::scheduler::filtered::FilteredDeckForUpdate; impl DecksService for Backend { - fn new_deck(&self, _input: pb::generic::Empty) -> Result { + type Error = AnkiError; + + fn new_deck(&self, _input: generic::Empty) -> Result { Ok(Deck::new_normal().into()) } - fn add_deck(&self, deck: pb::decks::Deck) -> Result { + fn add_deck( + &self, + deck: anki_proto::decks::Deck, + ) -> Result { let mut deck: Deck = deck.try_into()?; self.with_col(|col| Ok(col.add_deck(&mut deck)?.map(|_| deck.id.0).into())) } - fn add_deck_legacy(&self, input: pb::generic::Json) -> Result { + fn add_deck_legacy( + &self, + input: generic::Json, + ) -> Result { let schema11: DeckSchema11 = serde_json::from_slice(&input.json)?; let mut deck: Deck = schema11.into(); self.with_col(|col| { @@ -32,8 +41,8 @@ impl DecksService for Backend { fn add_or_update_deck_legacy( &self, - input: pb::decks::AddOrUpdateDeckLegacyRequest, - ) -> Result { + input: anki_proto::decks::AddOrUpdateDeckLegacyRequest, + ) -> Result { self.with_col(|col| { let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?; let mut deck: Deck = schema11.into(); @@ -45,11 +54,14 @@ impl DecksService for Backend { } else { col.add_or_update_deck(&mut deck)?; } - Ok(pb::decks::DeckId { did: deck.id.0 }) + Ok(anki_proto::decks::DeckId { did: deck.id.0 }) }) } - fn deck_tree(&self, input: pb::decks::DeckTreeRequest) -> Result { + fn deck_tree( + &self, + input: anki_proto::decks::DeckTreeRequest, + ) -> Result { self.with_col(|col| { let now = if input.now == 0 { None @@ -60,7 +72,7 @@ impl DecksService for Backend { }) } - fn deck_tree_legacy(&self, _input: pb::generic::Empty) -> Result { + fn deck_tree_legacy(&self, _input: generic::Empty) -> Result { self.with_col(|col| { let tree = col.legacy_deck_tree()?; serde_json::to_vec(&tree) @@ -69,7 +81,7 @@ impl DecksService for Backend { }) } - fn get_all_decks_legacy(&self, _input: pb::generic::Empty) -> Result { + fn get_all_decks_legacy(&self, _input: generic::Empty) -> Result { self.with_col(|col| { let decks = col.storage.get_all_decks_as_schema11()?; serde_json::to_vec(&decks).map_err(Into::into) @@ -77,28 +89,34 @@ impl DecksService for Backend { .map(Into::into) } - fn get_deck_id_by_name(&self, input: pb::generic::String) -> Result { + fn get_deck_id_by_name(&self, input: generic::String) -> Result { self.with_col(|col| { col.get_deck_id(&input.val).and_then(|d| { d.or_not_found(input.val) - .map(|d| pb::decks::DeckId { did: d.0 }) + .map(|d| anki_proto::decks::DeckId { did: d.0 }) }) }) } - fn get_deck(&self, input: pb::decks::DeckId) -> Result { + fn get_deck(&self, input: anki_proto::decks::DeckId) -> Result { let did = input.into(); self.with_col(|col| Ok(col.storage.get_deck(did)?.or_not_found(did)?.into())) } - fn update_deck(&self, input: pb::decks::Deck) -> Result { + fn update_deck( + &self, + input: anki_proto::decks::Deck, + ) -> Result { self.with_col(|col| { let mut deck = Deck::try_from(input)?; col.update_deck(&mut deck).map(Into::into) }) } - fn update_deck_legacy(&self, input: pb::generic::Json) -> Result { + fn update_deck_legacy( + &self, + input: generic::Json, + ) -> Result { self.with_col(|col| { let deck: DeckSchema11 = serde_json::from_slice(&input.json)?; let mut deck = deck.into(); @@ -106,7 +124,7 @@ impl DecksService for Backend { }) } - fn get_deck_legacy(&self, input: pb::decks::DeckId) -> Result { + fn get_deck_legacy(&self, input: anki_proto::decks::DeckId) -> Result { let did = input.into(); self.with_col(|col| { let deck: DeckSchema11 = col.storage.get_deck(did)?.or_not_found(did)?.into(); @@ -118,26 +136,29 @@ impl DecksService for Backend { fn get_deck_names( &self, - input: pb::decks::GetDeckNamesRequest, - ) -> Result { + input: anki_proto::decks::GetDeckNamesRequest, + ) -> Result { self.with_col(|col| { let names = if input.include_filtered { col.get_all_deck_names(input.skip_empty_default)? } else { col.get_all_normal_deck_names()? }; - Ok(names.into()) + Ok(deck_names_to_proto(names)) }) } - fn get_deck_and_child_names(&self, input: pb::decks::DeckId) -> Result { + fn get_deck_and_child_names( + &self, + input: anki_proto::decks::DeckId, + ) -> Result { self.with_col(|col| { col.get_deck_and_child_names(input.did.into()) - .map(Into::into) + .map(deck_names_to_proto) }) } - fn new_deck_legacy(&self, input: pb::generic::Bool) -> Result { + fn new_deck_legacy(&self, input: generic::Bool) -> Result { let deck = if input.val { Deck::new_filtered() } else { @@ -151,16 +172,20 @@ impl DecksService for Backend { fn remove_decks( &self, - input: pb::decks::DeckIds, - ) -> Result { - self.with_col(|col| col.remove_decks_and_child_decks(&Into::>::into(input))) - .map(Into::into) + input: anki_proto::decks::DeckIds, + ) -> Result { + self.with_col(|col| { + col.remove_decks_and_child_decks( + &input.dids.into_iter().map(DeckId).collect::>(), + ) + }) + .map(Into::into) } fn reparent_decks( &self, - input: pb::decks::ReparentDecksRequest, - ) -> Result { + input: anki_proto::decks::ReparentDecksRequest, + ) -> Result { let deck_ids: Vec<_> = input.deck_ids.into_iter().map(Into::into).collect(); let new_parent = if input.new_parent == 0 { None @@ -173,78 +198,72 @@ impl DecksService for Backend { fn rename_deck( &self, - input: pb::decks::RenameDeckRequest, - ) -> Result { + input: anki_proto::decks::RenameDeckRequest, + ) -> Result { self.with_col(|col| col.rename_deck(input.deck_id.into(), &input.new_name)) .map(Into::into) } fn get_or_create_filtered_deck( &self, - input: pb::decks::DeckId, - ) -> Result { + input: anki_proto::decks::DeckId, + ) -> Result { self.with_col(|col| col.get_or_create_filtered_deck(input.into())) .map(Into::into) } fn add_or_update_filtered_deck( &self, - input: pb::decks::FilteredDeckForUpdate, - ) -> Result { + input: anki_proto::decks::FilteredDeckForUpdate, + ) -> Result { self.with_col(|col| col.add_or_update_filtered_deck(input.into())) .map(|out| out.map(i64::from)) .map(Into::into) } - fn filtered_deck_order_labels( - &self, - _input: pb::generic::Empty, - ) -> Result { - Ok(FilteredSearchOrder::labels(&self.tr).into()) + fn filtered_deck_order_labels(&self, _input: generic::Empty) -> Result { + Ok(search_order_labels(&self.tr).into()) } fn set_deck_collapsed( &self, - input: pb::decks::SetDeckCollapsedRequest, - ) -> Result { + input: anki_proto::decks::SetDeckCollapsedRequest, + ) -> Result { self.with_col(|col| { col.set_deck_collapsed(input.deck_id.into(), input.collapsed, input.scope()) }) .map(Into::into) } - fn set_current_deck(&self, input: pb::decks::DeckId) -> Result { + fn set_current_deck( + &self, + input: anki_proto::decks::DeckId, + ) -> Result { self.with_col(|col| col.set_current_deck(input.did.into())) .map(Into::into) } - fn get_current_deck(&self, _input: pb::generic::Empty) -> Result { + fn get_current_deck(&self, _input: generic::Empty) -> Result { self.with_col(|col| col.get_current_deck()) .map(|deck| (*deck).clone().into()) } } -impl From for DeckId { - fn from(did: pb::decks::DeckId) -> Self { +impl From for DeckId { + fn from(did: anki_proto::decks::DeckId) -> Self { DeckId(did.did) } } -impl From for Vec { - fn from(dids: pb::decks::DeckIds) -> Self { - dids.dids.into_iter().map(DeckId).collect() - } -} - -impl From for pb::decks::DeckId { +impl From for anki_proto::decks::DeckId { fn from(did: DeckId) -> Self { - pb::decks::DeckId { did: did.0 } + anki_proto::decks::DeckId { did: did.0 } } } -impl From for pb::decks::FilteredDeckForUpdate { +impl From for anki_proto::decks::FilteredDeckForUpdate { fn from(deck: FilteredDeckForUpdate) -> Self { - pb::decks::FilteredDeckForUpdate { + anki_proto::decks::FilteredDeckForUpdate { id: deck.id.into(), name: deck.human_name, config: Some(deck.config), @@ -252,8 +271,8 @@ impl From for pb::decks::FilteredDeckForUpdate { } } -impl From for FilteredDeckForUpdate { - fn from(deck: pb::decks::FilteredDeckForUpdate) -> Self { +impl From for FilteredDeckForUpdate { + fn from(deck: anki_proto::decks::FilteredDeckForUpdate) -> Self { FilteredDeckForUpdate { id: deck.id.into(), human_name: deck.name, @@ -262,74 +281,54 @@ impl From for FilteredDeckForUpdate { } } -impl From for pb::decks::Deck { +impl From for anki_proto::decks::Deck { fn from(d: Deck) -> Self { - pb::decks::Deck { + anki_proto::decks::Deck { id: d.id.0, name: d.name.human_name(), mtime_secs: d.mtime_secs.0, usn: d.usn.0, common: Some(d.common), - kind: Some(d.kind.into()), + kind: Some(kind_from_inline(d.kind)), } } } -impl TryFrom for Deck { +impl TryFrom for Deck { type Error = AnkiError; - fn try_from(d: pb::decks::Deck) -> Result { + fn try_from(d: anki_proto::decks::Deck) -> Result { Ok(Deck { id: DeckId(d.id), name: NativeDeckName::from_human_name(&d.name), mtime_secs: TimestampSecs(d.mtime_secs), usn: Usn(d.usn), common: d.common.unwrap_or_default(), - kind: d.kind.or_invalid("missing kind")?.into(), + kind: kind_to_inline(d.kind.or_invalid("missing kind")?), }) } } -impl From for pb::decks::deck::Kind { - fn from(k: DeckKind) -> Self { - match k { - DeckKind::Normal(n) => pb::decks::deck::Kind::Normal(n), - DeckKind::Filtered(f) => pb::decks::deck::Kind::Filtered(f), - } +fn kind_to_inline(kind: anki_proto::decks::deck::Kind) -> DeckKind { + match kind { + anki_proto::decks::deck::Kind::Normal(normal) => DeckKind::Normal(normal), + anki_proto::decks::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered), } } -impl From for DeckKind { - fn from(kind: pb::decks::deck::Kind) -> Self { - match kind { - pb::decks::deck::Kind::Normal(normal) => DeckKind::Normal(normal), - pb::decks::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered), - } +fn kind_from_inline(k: DeckKind) -> anki_proto::decks::deck::Kind { + match k { + DeckKind::Normal(n) => anki_proto::decks::deck::Kind::Normal(n), + DeckKind::Filtered(f) => anki_proto::decks::deck::Kind::Filtered(f), } } -impl From<(DeckId, String)> for pb::decks::DeckNameId { - fn from(id_name: (DeckId, String)) -> Self { - pb::decks::DeckNameId { - id: id_name.0 .0, - name: id_name.1, - } - } +fn deck_name_to_proto((id, name): (DeckId, String)) -> anki_proto::decks::DeckNameId { + anki_proto::decks::DeckNameId { id: id.0, name } } -impl From> for pb::decks::DeckNames { - fn from(id_names: Vec<(DeckId, String)>) -> Self { - pb::decks::DeckNames { - entries: id_names.into_iter().map(Into::into).collect(), - } +fn deck_names_to_proto(names: Vec<(DeckId, String)>) -> anki_proto::decks::DeckNames { + anki_proto::decks::DeckNames { + entries: names.into_iter().map(deck_name_to_proto).collect(), } } - -// fn new_deck(&self, input: pb::generic::Bool) -> Result { -// let deck = if input.val { -// Deck::new_filtered() -// } else { -// Deck::new_normal() -// }; -// Ok(deck.into()) -// } diff --git a/rslib/src/backend/error.rs b/rslib/src/backend/error.rs index 800019bfd..d4b378885 100644 --- a/rslib/src/backend/error.rs +++ b/rslib/src/backend/error.rs @@ -1,14 +1,14 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::backend::backend_error::Kind; + use crate::error::AnkiError; use crate::error::SyncErrorKind; -use crate::pb; -use crate::pb::backend::backend_error::Kind; use crate::prelude::*; impl AnkiError { - pub fn into_protobuf(self, tr: &I18n) -> pb::backend::BackendError { + pub fn into_protobuf(self, tr: &I18n) -> anki_proto::backend::BackendError { let message = self.message(tr); let help_page = self.help_page().map(|page| page as i32); let context = self.context(); @@ -44,7 +44,7 @@ impl AnkiError { AnkiError::WindowsError { .. } => Kind::OsError, }; - pb::backend::BackendError { + anki_proto::backend::BackendError { kind: kind as i32, message, help_page, diff --git a/rslib/src/backend/generic.rs b/rslib/src/backend/generic.rs deleted file mode 100644 index 43ed4215d..000000000 --- a/rslib/src/backend/generic.rs +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use crate::pb; -use crate::prelude::*; - -impl From> for pb::generic::Json { - fn from(json: Vec) -> Self { - pb::generic::Json { json } - } -} - -impl From for pb::generic::String { - fn from(val: String) -> Self { - pb::generic::String { val } - } -} - -impl From for pb::generic::Bool { - fn from(val: bool) -> Self { - pb::generic::Bool { val } - } -} - -impl From for pb::generic::Int32 { - fn from(val: i32) -> Self { - pb::generic::Int32 { val } - } -} - -impl From for pb::generic::Int64 { - fn from(val: i64) -> Self { - pb::generic::Int64 { val } - } -} - -impl From for pb::generic::UInt32 { - fn from(val: u32) -> Self { - pb::generic::UInt32 { val } - } -} - -impl From for pb::generic::UInt32 { - fn from(val: usize) -> Self { - pb::generic::UInt32 { val: val as u32 } - } -} - -impl From<()> for pb::generic::Empty { - fn from(_val: ()) -> Self { - pb::generic::Empty {} - } -} - -impl From for CardId { - fn from(cid: pb::cards::CardId) -> Self { - CardId(cid.cid) - } -} - -impl From for Vec { - fn from(c: pb::cards::CardIds) -> Self { - c.cids.into_iter().map(CardId).collect() - } -} - -impl From for NoteId { - fn from(nid: pb::notes::NoteId) -> Self { - NoteId(nid.nid) - } -} - -impl From for pb::notes::NoteId { - fn from(nid: NoteId) -> Self { - pb::notes::NoteId { nid: nid.0 } - } -} - -impl From for NotetypeId { - fn from(ntid: pb::notetypes::NotetypeId) -> Self { - NotetypeId(ntid.ntid) - } -} - -impl From for pb::notetypes::NotetypeId { - fn from(ntid: NotetypeId) -> Self { - pb::notetypes::NotetypeId { ntid: ntid.0 } - } -} - -impl From for DeckConfigId { - fn from(dcid: pb::deckconfig::DeckConfigId) -> Self { - DeckConfigId(dcid.dcid) - } -} - -impl From> for pb::generic::StringList { - fn from(vals: Vec) -> Self { - pb::generic::StringList { vals } - } -} diff --git a/rslib/src/backend/i18n.rs b/rslib/src/backend/i18n.rs index 9996b0314..f094c4642 100644 --- a/rslib/src/backend/i18n.rs +++ b/rslib/src/backend/i18n.rs @@ -3,21 +3,23 @@ use std::collections::HashMap; +use anki_proto::generic; +pub(super) use anki_proto::i18n::i18n_service::Service as I18nService; use fluent::FluentArgs; use fluent::FluentValue; use super::Backend; -use crate::pb; -pub(super) use crate::pb::i18n::i18n_service::Service as I18nService; use crate::prelude::*; use crate::scheduler::timespan::answer_button_time; use crate::scheduler::timespan::time_span; impl I18nService for Backend { + type Error = AnkiError; + fn translate_string( &self, - input: pb::i18n::TranslateStringRequest, - ) -> Result { + input: anki_proto::i18n::TranslateStringRequest, + ) -> Result { let args = build_fluent_args(input.args); Ok(self @@ -32,9 +34,9 @@ impl I18nService for Backend { fn format_timespan( &self, - input: pb::i18n::FormatTimespanRequest, - ) -> Result { - use pb::i18n::format_timespan_request::Context; + input: anki_proto::i18n::FormatTimespanRequest, + ) -> Result { + use anki_proto::i18n::format_timespan_request::Context; Ok(match input.context() { Context::Precise => time_span(input.seconds, &self.tr, true), Context::Intervals => time_span(input.seconds, &self.tr, false), @@ -43,14 +45,19 @@ impl I18nService for Backend { .into()) } - fn i18n_resources(&self, input: pb::i18n::I18nResourcesRequest) -> Result { + fn i18n_resources( + &self, + input: anki_proto::i18n::I18nResourcesRequest, + ) -> Result { serde_json::to_vec(&self.tr.resources_for_js(&input.modules)) .map(Into::into) .map_err(Into::into) } } -fn build_fluent_args(input: HashMap) -> FluentArgs<'static> { +fn build_fluent_args( + input: HashMap, +) -> FluentArgs<'static> { let mut args = FluentArgs::new(); for (key, val) in input { args.set(key, translate_arg_to_fluent_val(&val)); @@ -58,8 +65,8 @@ fn build_fluent_args(input: HashMap) -> Flu args } -fn translate_arg_to_fluent_val(arg: &pb::i18n::TranslateArgValue) -> FluentValue<'static> { - use pb::i18n::translate_arg_value::Value as V; +fn translate_arg_to_fluent_val(arg: &anki_proto::i18n::TranslateArgValue) -> FluentValue<'static> { + use anki_proto::i18n::translate_arg_value::Value as V; match &arg.value { Some(val) => match val { V::Str(s) => FluentValue::String(s.to_owned().into()), diff --git a/rslib/src/backend/image_occlusion.rs b/rslib/src/backend/image_occlusion.rs index 0ec876475..152fdff1d 100644 --- a/rslib/src/backend/image_occlusion.rs +++ b/rslib/src/backend/image_occlusion.rs @@ -1,23 +1,26 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::generic; +pub(super) use anki_proto::image_occlusion::imageocclusion_service::Service as ImageOcclusionService; + use super::Backend; -use crate::pb; -pub(super) use crate::pb::image_occlusion::imageocclusion_service::Service as ImageOcclusionService; use crate::prelude::*; impl ImageOcclusionService for Backend { + type Error = AnkiError; + fn get_image_for_occlusion( &self, - input: pb::image_occlusion::GetImageForOcclusionRequest, - ) -> Result { + input: anki_proto::image_occlusion::GetImageForOcclusionRequest, + ) -> Result { self.with_col(|col| col.get_image_for_occlusion(&input.path)) } fn add_image_occlusion_note( &self, - input: pb::image_occlusion::AddImageOcclusionNoteRequest, - ) -> Result { + input: anki_proto::image_occlusion::AddImageOcclusionNoteRequest, + ) -> Result { self.with_col(|col| { col.add_image_occlusion_note( input.notetype_id.into(), @@ -33,15 +36,15 @@ impl ImageOcclusionService for Backend { fn get_image_occlusion_note( &self, - input: pb::image_occlusion::GetImageOcclusionNoteRequest, - ) -> Result { + input: anki_proto::image_occlusion::GetImageOcclusionNoteRequest, + ) -> Result { self.with_col(|col| col.get_image_occlusion_note(input.note_id.into())) } fn update_image_occlusion_note( &self, - input: pb::image_occlusion::UpdateImageOcclusionNoteRequest, - ) -> Result { + input: anki_proto::image_occlusion::UpdateImageOcclusionNoteRequest, + ) -> Result { self.with_col(|col| { col.update_image_occlusion_note( input.note_id.into(), @@ -56,8 +59,8 @@ impl ImageOcclusionService for Backend { fn add_image_occlusion_notetype( &self, - _input: pb::generic::Empty, - ) -> Result { + _input: generic::Empty, + ) -> Result { self.with_col(|col| col.add_image_occlusion_notetype()) .map(Into::into) } diff --git a/rslib/src/backend/import_export.rs b/rslib/src/backend/import_export.rs index 5ec32f0b4..6b4b0dd47 100644 --- a/rslib/src/backend/import_export.rs +++ b/rslib/src/backend/import_export.rs @@ -3,24 +3,27 @@ use std::path::Path; +use anki_proto::generic; +use anki_proto::import_export::export_limit; +pub(super) use anki_proto::import_export::importexport_service::Service as ImportExportService; +use anki_proto::import_export::ExportLimit; + use super::progress::Progress; use super::Backend; use crate::import_export::package::import_colpkg; use crate::import_export::ExportProgress; use crate::import_export::ImportProgress; use crate::import_export::NoteLog; -use crate::pb; -use crate::pb::import_export::export_limit; -pub(super) use crate::pb::import_export::importexport_service::Service as ImportExportService; -use crate::pb::import_export::ExportLimit; use crate::prelude::*; use crate::search::SearchNode; impl ImportExportService for Backend { + type Error = AnkiError; + fn export_collection_package( &self, - input: pb::import_export::ExportCollectionPackageRequest, - ) -> Result { + input: anki_proto::import_export::ExportCollectionPackageRequest, + ) -> Result { self.abort_media_sync_and_wait(); let mut guard = self.lock_open_collection()?; @@ -38,8 +41,8 @@ impl ImportExportService for Backend { fn import_collection_package( &self, - input: pb::import_export::ImportCollectionPackageRequest, - ) -> Result { + input: anki_proto::import_export::ImportCollectionPackageRequest, + ) -> Result { let _guard = self.lock_closed_collection()?; import_colpkg( @@ -54,16 +57,16 @@ impl ImportExportService for Backend { fn import_anki_package( &self, - input: pb::import_export::ImportAnkiPackageRequest, - ) -> Result { + input: anki_proto::import_export::ImportAnkiPackageRequest, + ) -> Result { self.with_col(|col| col.import_apkg(&input.package_path, self.import_progress_fn())) .map(Into::into) } fn export_anki_package( &self, - input: pb::import_export::ExportAnkiPackageRequest, - ) -> Result { + input: anki_proto::import_export::ExportAnkiPackageRequest, + ) -> Result { self.with_col(|col| { col.export_apkg( &input.out_path, @@ -80,8 +83,8 @@ impl ImportExportService for Backend { fn get_csv_metadata( &self, - input: pb::import_export::CsvMetadataRequest, - ) -> Result { + input: anki_proto::import_export::CsvMetadataRequest, + ) -> Result { let delimiter = input.delimiter.is_some().then(|| input.delimiter()); self.with_col(|col| { col.get_csv_metadata( @@ -96,8 +99,8 @@ impl ImportExportService for Backend { fn import_csv( &self, - input: pb::import_export::ImportCsvRequest, - ) -> Result { + input: anki_proto::import_export::ImportCsvRequest, + ) -> Result { self.with_col(|col| { col.import_csv( &input.path, @@ -110,16 +113,16 @@ impl ImportExportService for Backend { fn export_note_csv( &self, - input: pb::import_export::ExportNoteCsvRequest, - ) -> Result { + input: anki_proto::import_export::ExportNoteCsvRequest, + ) -> Result { self.with_col(|col| col.export_note_csv(input, self.export_progress_fn())) .map(Into::into) } fn export_card_csv( &self, - input: pb::import_export::ExportCardCsvRequest, - ) -> Result { + input: anki_proto::import_export::ExportCardCsvRequest, + ) -> Result { self.with_col(|col| { col.export_card_csv( &input.out_path, @@ -133,16 +136,16 @@ impl ImportExportService for Backend { fn import_json_file( &self, - input: pb::generic::String, - ) -> Result { + input: generic::String, + ) -> Result { self.with_col(|col| col.import_json_file(&input.val, self.import_progress_fn())) .map(Into::into) } fn import_json_string( &self, - input: pb::generic::String, - ) -> Result { + input: generic::String, + ) -> Result { self.with_col(|col| col.import_json_string(&input.val, self.import_progress_fn())) .map(Into::into) } @@ -160,7 +163,7 @@ impl Backend { } } -impl From> for pb::import_export::ImportResponse { +impl From> for anki_proto::import_export::ImportResponse { fn from(output: OpOutput) -> Self { Self { changes: Some(output.changes.into()), @@ -174,7 +177,7 @@ impl From for SearchNode { use export_limit::Limit; let limit = export_limit .limit - .unwrap_or(Limit::WholeCollection(pb::generic::Empty {})); + .unwrap_or(Limit::WholeCollection(generic::Empty {})); match limit { Limit::WholeCollection(_) => Self::WholeCollection, Limit::DeckId(did) => Self::from_deck_id(did, true), diff --git a/rslib/src/backend/links.rs b/rslib/src/backend/links.rs index 69e649583..d19f9a752 100644 --- a/rslib/src/backend/links.rs +++ b/rslib/src/backend/links.rs @@ -1,17 +1,20 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::links::help_page_link_request::HelpPage; +pub(super) use anki_proto::links::links_service::Service as LinksService; + use super::Backend; -use crate::pb; -use crate::pb::links::help_page_link_request::HelpPage; -pub(super) use crate::pb::links::links_service::Service as LinksService; +use crate::links::help_page_to_link; use crate::prelude::*; impl LinksService for Backend { - fn help_page_link(&self, input: pb::links::HelpPageLinkRequest) -> Result { - Ok(HelpPage::from_i32(input.page) - .unwrap_or(HelpPage::Index) - .to_link() - .into()) + type Error = AnkiError; + + fn help_page_link( + &self, + input: anki_proto::links::HelpPageLinkRequest, + ) -> Result { + Ok(help_page_to_link(HelpPage::from_i32(input.page).unwrap_or(HelpPage::Index)).into()) } } diff --git a/rslib/src/backend/media.rs b/rslib/src/backend/media.rs index b52668b95..9a9ab2d8e 100644 --- a/rslib/src/backend/media.rs +++ b/rslib/src/backend/media.rs @@ -1,19 +1,22 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::generic; +pub(super) use anki_proto::media::media_service::Service as MediaService; + use super::notes::to_i64s; use super::progress::Progress; use super::Backend; use crate::media::check::MediaChecker; -use crate::pb; -pub(super) use crate::pb::media::media_service::Service as MediaService; use crate::prelude::*; impl MediaService for Backend { + type Error = AnkiError; + // media //----------------------------------------------- - fn check_media(&self, _input: pb::generic::Empty) -> Result { + fn check_media(&self, _input: generic::Empty) -> Result { let mut handler = self.new_progress_handler(); let progress_fn = move |progress| handler.update(Progress::MediaCheck(progress as u32), true); @@ -26,7 +29,7 @@ impl MediaService for Backend { let mut report = checker.summarize_output(&mut output); ctx.report_media_field_referencing_templates(&mut report)?; - Ok(pb::media::CheckMediaResponse { + Ok(anki_proto::media::CheckMediaResponse { unused: output.unused, missing: output.missing, missing_media_notes: to_i64s(output.missing_media_notes), @@ -39,8 +42,8 @@ impl MediaService for Backend { fn trash_media_files( &self, - input: pb::media::TrashMediaFilesRequest, - ) -> Result { + input: anki_proto::media::TrashMediaFilesRequest, + ) -> Result { self.with_col(|col| { let mgr = col.media()?; mgr.remove_files(&input.fnames) @@ -48,7 +51,10 @@ impl MediaService for Backend { .map(Into::into) } - fn add_media_file(&self, input: pb::media::AddMediaFileRequest) -> Result { + fn add_media_file( + &self, + input: anki_proto::media::AddMediaFileRequest, + ) -> Result { self.with_col(|col| { let mgr = col.media()?; Ok(mgr @@ -58,7 +64,7 @@ impl MediaService for Backend { }) } - fn empty_trash(&self, _input: pb::generic::Empty) -> Result { + fn empty_trash(&self, _input: generic::Empty) -> Result { let mut handler = self.new_progress_handler(); let progress_fn = move |progress| handler.update(Progress::MediaCheck(progress as u32), true); @@ -71,7 +77,7 @@ impl MediaService for Backend { .map(Into::into) } - fn restore_trash(&self, _input: pb::generic::Empty) -> Result { + fn restore_trash(&self, _input: generic::Empty) -> Result { let mut handler = self.new_progress_handler(); let progress_fn = move |progress| handler.update(Progress::MediaCheck(progress as u32), true); diff --git a/rslib/src/backend/mod.rs b/rslib/src/backend/mod.rs index d715862bd..25765c68c 100644 --- a/rslib/src/backend/mod.rs +++ b/rslib/src/backend/mod.rs @@ -14,7 +14,6 @@ mod dbproxy; mod deckconfig; mod decks; mod error; -mod generic; mod i18n; mod image_occlusion; mod import_export; @@ -35,6 +34,7 @@ use std::sync::Arc; use std::sync::Mutex; use std::thread::JoinHandle; +use anki_proto::ServiceIndex; use once_cell::sync::OnceCell; use progress::AbortHandleSlot; use prost::Message; @@ -63,8 +63,6 @@ use self::sync::SyncService; use self::sync::SyncState; use self::tags::TagsService; use crate::backend::dbproxy::db_command_bytes; -use crate::pb; -use crate::pb::ServiceIndex; use crate::prelude::*; pub struct Backend { @@ -84,10 +82,11 @@ struct BackendState { } pub fn init_backend(init_msg: &[u8]) -> result::Result { - let input: pb::backend::BackendInit = match pb::backend::BackendInit::decode(init_msg) { - Ok(req) => req, - Err(_) => return Err("couldn't decode init request".into()), - }; + let input: anki_proto::backend::BackendInit = + match anki_proto::backend::BackendInit::decode(init_msg) { + Ok(req) => req, + Err(_) => return Err("couldn't decode init request".into()), + }; let tr = I18n::new(&input.preferred_langs); diff --git a/rslib/src/backend/notes.rs b/rslib/src/backend/notes.rs index d9a443132..57f3d9d80 100644 --- a/rslib/src/backend/notes.rs +++ b/rslib/src/backend/notes.rs @@ -3,14 +3,19 @@ use std::collections::HashSet; +pub(super) use anki_proto::notes::notes_service::Service as NotesService; + use super::Backend; use crate::cloze::add_cloze_numbers_in_string; -use crate::pb; -pub(super) use crate::pb::notes::notes_service::Service as NotesService; use crate::prelude::*; impl NotesService for Backend { - fn new_note(&self, input: pb::notetypes::NotetypeId) -> Result { + type Error = AnkiError; + + fn new_note( + &self, + input: anki_proto::notetypes::NotetypeId, + ) -> Result { let ntid = input.into(); self.with_col(|col| { let nt = col.get_notetype(ntid)?.or_not_found(ntid)?; @@ -18,11 +23,14 @@ impl NotesService for Backend { }) } - fn add_note(&self, input: pb::notes::AddNoteRequest) -> Result { + fn add_note( + &self, + input: anki_proto::notes::AddNoteRequest, + ) -> Result { self.with_col(|col| { let mut note: Note = input.note.or_invalid("no note provided")?.into(); let changes = col.add_note(&mut note, DeckId(input.deck_id))?; - Ok(pb::notes::AddNoteResponse { + Ok(anki_proto::notes::AddNoteResponse { note_id: note.id.0, changes: Some(changes.into()), }) @@ -31,8 +39,8 @@ impl NotesService for Backend { fn defaults_for_adding( &self, - input: pb::notes::DefaultsForAddingRequest, - ) -> Result { + input: anki_proto::notes::DefaultsForAddingRequest, + ) -> Result { self.with_col(|col| { let home_deck: DeckId = input.home_deck_of_current_review_card.into(); col.defaults_for_adding(home_deck).map(Into::into) @@ -41,8 +49,8 @@ impl NotesService for Backend { fn default_deck_for_notetype( &self, - input: pb::notetypes::NotetypeId, - ) -> Result { + input: anki_proto::notetypes::NotetypeId, + ) -> Result { self.with_col(|col| { Ok(col .default_deck_for_notetype(input.into())? @@ -53,8 +61,8 @@ impl NotesService for Backend { fn update_notes( &self, - input: pb::notes::UpdateNotesRequest, - ) -> Result { + input: anki_proto::notes::UpdateNotesRequest, + ) -> Result { self.with_col(|col| { let notes = input .notes @@ -66,15 +74,15 @@ impl NotesService for Backend { .map(Into::into) } - fn get_note(&self, input: pb::notes::NoteId) -> Result { + fn get_note(&self, input: anki_proto::notes::NoteId) -> Result { let nid = input.into(); self.with_col(|col| col.storage.get_note(nid)?.or_not_found(nid).map(Into::into)) } fn remove_notes( &self, - input: pb::notes::RemoveNotesRequest, - ) -> Result { + input: anki_proto::notes::RemoveNotesRequest, + ) -> Result { self.with_col(|col| { if !input.note_ids.is_empty() { col.remove_notes( @@ -100,21 +108,21 @@ impl NotesService for Backend { fn cloze_numbers_in_note( &self, - note: pb::notes::Note, - ) -> Result { + note: anki_proto::notes::Note, + ) -> Result { let mut set = HashSet::with_capacity(4); for field in ¬e.fields { add_cloze_numbers_in_string(field, &mut set); } - Ok(pb::notes::ClozeNumbersInNoteResponse { + Ok(anki_proto::notes::ClozeNumbersInNoteResponse { numbers: set.into_iter().map(|n| n as u32).collect(), }) } fn after_note_updates( &self, - input: pb::notes::AfterNoteUpdatesRequest, - ) -> Result { + input: anki_proto::notes::AfterNoteUpdatesRequest, + ) -> Result { self.with_col(|col| { col.after_note_updates( &to_note_ids(input.nids), @@ -127,32 +135,35 @@ impl NotesService for Backend { fn field_names_for_notes( &self, - input: pb::notes::FieldNamesForNotesRequest, - ) -> Result { + input: anki_proto::notes::FieldNamesForNotesRequest, + ) -> Result { self.with_col(|col| { let nids: Vec<_> = input.nids.into_iter().map(NoteId).collect(); col.storage .field_names_for_notes(&nids) - .map(|fields| pb::notes::FieldNamesForNotesResponse { fields }) + .map(|fields| anki_proto::notes::FieldNamesForNotesResponse { fields }) }) } fn note_fields_check( &self, - input: pb::notes::Note, - ) -> Result { + input: anki_proto::notes::Note, + ) -> Result { let note: Note = input.into(); self.with_col(|col| { col.note_fields_check(¬e) - .map(|r| pb::notes::NoteFieldsCheckResponse { state: r as i32 }) + .map(|r| anki_proto::notes::NoteFieldsCheckResponse { state: r as i32 }) }) } - fn cards_of_note(&self, input: pb::notes::NoteId) -> Result { + fn cards_of_note( + &self, + input: anki_proto::notes::NoteId, + ) -> Result { self.with_col(|col| { col.storage .all_card_ids_of_note_in_template_order(NoteId(input.nid)) - .map(|v| pb::cards::CardIds { + .map(|v| anki_proto::cards::CardIds { cids: v.into_iter().map(Into::into).collect(), }) }) @@ -160,8 +171,8 @@ impl NotesService for Backend { fn get_single_notetype_of_notes( &self, - input: pb::notes::NoteIds, - ) -> Result { + input: anki_proto::notes::NoteIds, + ) -> Result { self.with_col(|col| { col.get_single_notetype_of_notes(&input.note_ids.into_newtype(NoteId)) .map(Into::into) @@ -176,3 +187,15 @@ pub(super) fn to_note_ids(ids: Vec) -> Vec { pub(super) fn to_i64s(ids: Vec) -> Vec { ids.into_iter().map(Into::into).collect() } + +impl From for NoteId { + fn from(nid: anki_proto::notes::NoteId) -> Self { + NoteId(nid.nid) + } +} + +impl From for anki_proto::notes::NoteId { + fn from(nid: NoteId) -> Self { + anki_proto::notes::NoteId { nid: nid.0 } + } +} diff --git a/rslib/src/backend/notetypes.rs b/rslib/src/backend/notetypes.rs index 854d05611..0898d8253 100644 --- a/rslib/src/backend/notetypes.rs +++ b/rslib/src/backend/notetypes.rs @@ -1,6 +1,9 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::generic; +pub(super) use anki_proto::notetypes::notetypes_service::Service as NotetypesService; + use super::Backend; use crate::config::get_aux_notetype_config_key; use crate::notetype::stock::get_stock_notetype; @@ -9,15 +12,15 @@ use crate::notetype::ChangeNotetypeInput; use crate::notetype::Notetype; use crate::notetype::NotetypeChangeInfo; use crate::notetype::NotetypeSchema11; -use crate::pb; -pub(super) use crate::pb::notetypes::notetypes_service::Service as NotetypesService; use crate::prelude::*; impl NotetypesService for Backend { + type Error = AnkiError; + fn add_notetype( &self, - input: pb::notetypes::Notetype, - ) -> Result { + input: anki_proto::notetypes::Notetype, + ) -> Result { let mut notetype: Notetype = input.into(); self.with_col(|col| { Ok(col @@ -27,7 +30,10 @@ impl NotetypesService for Backend { }) } - fn update_notetype(&self, input: pb::notetypes::Notetype) -> Result { + fn update_notetype( + &self, + input: anki_proto::notetypes::Notetype, + ) -> Result { let mut notetype: Notetype = input.into(); self.with_col(|col| col.update_notetype(&mut notetype, false)) .map(Into::into) @@ -35,8 +41,8 @@ impl NotetypesService for Backend { fn add_notetype_legacy( &self, - input: pb::generic::Json, - ) -> Result { + input: generic::Json, + ) -> Result { let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?; let mut notetype: Notetype = legacy.into(); self.with_col(|col| { @@ -49,8 +55,8 @@ impl NotetypesService for Backend { fn update_notetype_legacy( &self, - input: pb::generic::Json, - ) -> Result { + input: generic::Json, + ) -> Result { let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?; let mut notetype: Notetype = legacy.into(); self.with_col(|col| col.update_notetype(&mut notetype, false)) @@ -59,8 +65,8 @@ impl NotetypesService for Backend { fn add_or_update_notetype( &self, - input: pb::notetypes::AddOrUpdateNotetypeRequest, - ) -> Result { + input: anki_proto::notetypes::AddOrUpdateNotetypeRequest, + ) -> Result { self.with_col(|col| { let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?; let mut nt: Notetype = legacy.into(); @@ -74,14 +80,14 @@ impl NotetypesService for Backend { } else { col.add_or_update_notetype_with_existing_id(&mut nt, input.skip_checks)?; } - Ok(pb::notetypes::NotetypeId { ntid: nt.id.0 }) + Ok(anki_proto::notetypes::NotetypeId { ntid: nt.id.0 }) }) } fn get_stock_notetype_legacy( &self, - input: pb::notetypes::StockNotetype, - ) -> Result { + input: anki_proto::notetypes::StockNotetype, + ) -> Result { let nt = get_stock_notetype(input.kind(), &self.tr); let schema11: NotetypeSchema11 = nt.into(); serde_json::to_vec(&schema11) @@ -89,7 +95,10 @@ impl NotetypesService for Backend { .map(Into::into) } - fn get_notetype(&self, input: pb::notetypes::NotetypeId) -> Result { + fn get_notetype( + &self, + input: anki_proto::notetypes::NotetypeId, + ) -> Result { let ntid = input.into(); self.with_col(|col| { col.storage @@ -99,7 +108,10 @@ impl NotetypesService for Backend { }) } - fn get_notetype_legacy(&self, input: pb::notetypes::NotetypeId) -> Result { + fn get_notetype_legacy( + &self, + input: anki_proto::notetypes::NotetypeId, + ) -> Result { let ntid = input.into(); self.with_col(|col| { let schema11: NotetypeSchema11 = @@ -110,71 +122,71 @@ impl NotetypesService for Backend { fn get_notetype_names( &self, - _input: pb::generic::Empty, - ) -> Result { + _input: generic::Empty, + ) -> Result { self.with_col(|col| { let entries: Vec<_> = col .storage .get_all_notetype_names()? .into_iter() - .map(|(id, name)| pb::notetypes::NotetypeNameId { id: id.0, name }) + .map(|(id, name)| anki_proto::notetypes::NotetypeNameId { id: id.0, name }) .collect(); - Ok(pb::notetypes::NotetypeNames { entries }) + Ok(anki_proto::notetypes::NotetypeNames { entries }) }) } fn get_notetype_names_and_counts( &self, - _input: pb::generic::Empty, - ) -> Result { + _input: generic::Empty, + ) -> Result { self.with_col(|col| { let entries: Vec<_> = col .storage .get_notetype_use_counts()? .into_iter() .map( - |(id, name, use_count)| pb::notetypes::NotetypeNameIdUseCount { + |(id, name, use_count)| anki_proto::notetypes::NotetypeNameIdUseCount { id: id.0, name, use_count, }, ) .collect(); - Ok(pb::notetypes::NotetypeUseCounts { entries }) + Ok(anki_proto::notetypes::NotetypeUseCounts { entries }) }) } fn get_notetype_id_by_name( &self, - input: pb::generic::String, - ) -> Result { + input: generic::String, + ) -> Result { self.with_col(|col| { col.storage .get_notetype_id(&input.val) .and_then(|nt| nt.or_not_found(input.val)) - .map(|ntid| pb::notetypes::NotetypeId { ntid: ntid.0 }) + .map(|ntid| anki_proto::notetypes::NotetypeId { ntid: ntid.0 }) }) } fn remove_notetype( &self, - input: pb::notetypes::NotetypeId, - ) -> Result { + input: anki_proto::notetypes::NotetypeId, + ) -> Result { self.with_col(|col| col.remove_notetype(input.into())) .map(Into::into) } fn get_aux_notetype_config_key( &self, - input: pb::notetypes::GetAuxConfigKeyRequest, - ) -> Result { + input: anki_proto::notetypes::GetAuxConfigKeyRequest, + ) -> Result { Ok(get_aux_notetype_config_key(input.id.into(), &input.key).into()) } fn get_aux_template_config_key( &self, - input: pb::notetypes::GetAuxTemplateConfigKeyRequest, - ) -> Result { + input: anki_proto::notetypes::GetAuxTemplateConfigKeyRequest, + ) -> Result { self.with_col(|col| { col.get_aux_template_config_key( input.notetype_id.into(), @@ -187,8 +199,8 @@ impl NotetypesService for Backend { fn get_change_notetype_info( &self, - input: pb::notetypes::GetChangeNotetypeInfoRequest, - ) -> Result { + input: anki_proto::notetypes::GetChangeNotetypeInfoRequest, + ) -> Result { self.with_col(|col| { col.notetype_change_info(input.old_notetype_id.into(), input.new_notetype_id.into()) .map(Into::into) @@ -197,20 +209,23 @@ impl NotetypesService for Backend { fn change_notetype( &self, - input: pb::notetypes::ChangeNotetypeRequest, - ) -> Result { + input: anki_proto::notetypes::ChangeNotetypeRequest, + ) -> Result { self.with_col(|col| col.change_notetype_of_notes(input.into()).map(Into::into)) } - fn get_field_names(&self, input: pb::notetypes::NotetypeId) -> Result { + fn get_field_names( + &self, + input: anki_proto::notetypes::NotetypeId, + ) -> Result { self.with_col(|col| col.storage.get_field_names(input.into())) .map(Into::into) } fn restore_notetype_to_stock( &self, - input: pb::notetypes::RestoreNotetypeToStockRequest, - ) -> Result { + input: anki_proto::notetypes::RestoreNotetypeToStockRequest, + ) -> Result { let force_kind = input.force_kind.and_then(StockKind::from_i32); self.with_col(|col| { col.restore_notetype_to_stock( @@ -222,8 +237,8 @@ impl NotetypesService for Backend { } } -impl From for Notetype { - fn from(n: pb::notetypes::Notetype) -> Self { +impl From for Notetype { + fn from(n: anki_proto::notetypes::Notetype) -> Self { Notetype { id: n.id.into(), name: n.name, @@ -236,9 +251,9 @@ impl From for Notetype { } } -impl From for pb::notetypes::ChangeNotetypeInfo { +impl From for anki_proto::notetypes::ChangeNotetypeInfo { fn from(i: NotetypeChangeInfo) -> Self { - pb::notetypes::ChangeNotetypeInfo { + anki_proto::notetypes::ChangeNotetypeInfo { old_notetype_name: i.old_notetype_name, old_field_names: i.old_field_names, old_template_names: i.old_template_names, @@ -249,8 +264,8 @@ impl From for pb::notetypes::ChangeNotetypeInfo { } } -impl From for ChangeNotetypeInput { - fn from(i: pb::notetypes::ChangeNotetypeRequest) -> Self { +impl From for ChangeNotetypeInput { + fn from(i: anki_proto::notetypes::ChangeNotetypeRequest) -> Self { ChangeNotetypeInput { current_schema: i.current_schema.into(), note_ids: i.note_ids.into_newtype(NoteId), @@ -278,9 +293,9 @@ impl From for ChangeNotetypeInput { } } -impl From for pb::notetypes::ChangeNotetypeRequest { +impl From for anki_proto::notetypes::ChangeNotetypeRequest { fn from(i: ChangeNotetypeInput) -> Self { - pb::notetypes::ChangeNotetypeRequest { + anki_proto::notetypes::ChangeNotetypeRequest { current_schema: i.current_schema.into(), note_ids: i.note_ids.into_iter().map(Into::into).collect(), old_notetype_name: i.old_notetype_name, @@ -300,3 +315,15 @@ impl From for pb::notetypes::ChangeNotetypeRequest { } } } + +impl From for NotetypeId { + fn from(ntid: anki_proto::notetypes::NotetypeId) -> Self { + NotetypeId(ntid.ntid) + } +} + +impl From for anki_proto::notetypes::NotetypeId { + fn from(ntid: NotetypeId) -> Self { + anki_proto::notetypes::NotetypeId { ntid: ntid.0 } + } +} diff --git a/rslib/src/backend/ops.rs b/rslib/src/backend/ops.rs index fd0f61ef4..510e4fc8f 100644 --- a/rslib/src/backend/ops.rs +++ b/rslib/src/backend/ops.rs @@ -2,14 +2,13 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use crate::ops::OpChanges; -use crate::pb; use crate::prelude::*; use crate::undo::UndoOutput; use crate::undo::UndoStatus; -impl From for pb::collection::OpChanges { +impl From for anki_proto::collection::OpChanges { fn from(c: OpChanges) -> Self { - pb::collection::OpChanges { + anki_proto::collection::OpChanges { card: c.changes.card, note: c.changes.note, deck: c.changes.deck, @@ -27,8 +26,8 @@ impl From for pb::collection::OpChanges { } impl UndoStatus { - pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::collection::UndoStatus { - pb::collection::UndoStatus { + pub(crate) fn into_protobuf(self, tr: &I18n) -> anki_proto::collection::UndoStatus { + anki_proto::collection::UndoStatus { undo: self.undo.map(|op| op.describe(tr)).unwrap_or_default(), redo: self.redo.map(|op| op.describe(tr)).unwrap_or_default(), last_step: self.last_step as u32, @@ -36,24 +35,24 @@ impl UndoStatus { } } -impl From> for pb::collection::OpChanges { +impl From> for anki_proto::collection::OpChanges { fn from(o: OpOutput<()>) -> Self { o.changes.into() } } -impl From> for pb::collection::OpChangesWithCount { +impl From> for anki_proto::collection::OpChangesWithCount { fn from(out: OpOutput) -> Self { - pb::collection::OpChangesWithCount { + anki_proto::collection::OpChangesWithCount { count: out.output as u32, changes: Some(out.changes.into()), } } } -impl From> for pb::collection::OpChangesWithId { +impl From> for anki_proto::collection::OpChangesWithId { fn from(out: OpOutput) -> Self { - pb::collection::OpChangesWithId { + anki_proto::collection::OpChangesWithId { id: out.output, changes: Some(out.changes.into()), } @@ -61,8 +60,8 @@ impl From> for pb::collection::OpChangesWithId { } impl OpOutput { - pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::collection::OpChangesAfterUndo { - pb::collection::OpChangesAfterUndo { + pub(crate) fn into_protobuf(self, tr: &I18n) -> anki_proto::collection::OpChangesAfterUndo { + anki_proto::collection::OpChangesAfterUndo { changes: Some(self.changes.into()), operation: self.output.undone_op.describe(tr), reverted_to_timestamp: self.output.reverted_to.0, diff --git a/rslib/src/backend/progress.rs b/rslib/src/backend/progress.rs index 6bbbcd401..7d504ad8a 100644 --- a/rslib/src/backend/progress.rs +++ b/rslib/src/backend/progress.rs @@ -4,14 +4,13 @@ use std::sync::Arc; use std::sync::Mutex; +use anki_i18n::I18n; use futures::future::AbortHandle; use super::Backend; use crate::dbcheck::DatabaseCheckProgress; -use crate::i18n::I18n; use crate::import_export::ExportProgress; use crate::import_export::ImportProgress; -use crate::pb; use crate::sync::collection::normal::NormalSyncProgress; use crate::sync::collection::progress::FullSyncProgress; use crate::sync::collection::progress::SyncStage; @@ -57,21 +56,24 @@ pub(super) enum Progress { Export(ExportProgress), } -pub(super) fn progress_to_proto(progress: Option, tr: &I18n) -> pb::collection::Progress { +pub(super) fn progress_to_proto( + progress: Option, + tr: &I18n, +) -> anki_proto::collection::Progress { let progress = if let Some(progress) = progress { match progress { Progress::MediaSync(p) => { - pb::collection::progress::Value::MediaSync(media_sync_progress(p, tr)) + anki_proto::collection::progress::Value::MediaSync(media_sync_progress(p, tr)) } - Progress::MediaCheck(n) => { - pb::collection::progress::Value::MediaCheck(tr.media_check_checked(n).into()) - } - Progress::FullSync(p) => { - pb::collection::progress::Value::FullSync(pb::collection::progress::FullSync { + Progress::MediaCheck(n) => anki_proto::collection::progress::Value::MediaCheck( + tr.media_check_checked(n).into(), + ), + Progress::FullSync(p) => anki_proto::collection::progress::Value::FullSync( + anki_proto::collection::progress::FullSync { transferred: p.transferred_bytes as u32, total: p.total_bytes as u32, - }) - } + }, + ), Progress::NormalSync(p) => { let stage = match p.stage { SyncStage::Connecting => tr.sync_syncing(), @@ -85,11 +87,13 @@ pub(super) fn progress_to_proto(progress: Option, tr: &I18n) -> pb::co let removed = tr .sync_media_removed_count(p.local_remove, p.remote_remove) .into(); - pb::collection::progress::Value::NormalSync(pb::collection::progress::NormalSync { - stage, - added, - removed, - }) + anki_proto::collection::progress::Value::NormalSync( + anki_proto::collection::progress::NormalSync { + stage, + added, + removed, + }, + ) } Progress::DatabaseCheck(p) => { let mut stage_total = 0; @@ -106,15 +110,15 @@ pub(super) fn progress_to_proto(progress: Option, tr: &I18n) -> pb::co DatabaseCheckProgress::History => tr.database_check_checking_history(), } .to_string(); - pb::collection::progress::Value::DatabaseCheck( - pb::collection::progress::DatabaseCheck { + anki_proto::collection::progress::Value::DatabaseCheck( + anki_proto::collection::progress::DatabaseCheck { stage, stage_total, stage_current, }, ) } - Progress::Import(progress) => pb::collection::progress::Value::Importing( + Progress::Import(progress) => anki_proto::collection::progress::Value::Importing( match progress { ImportProgress::File => tr.importing_importing_file(), ImportProgress::Media(n) => tr.importing_processed_media_file(n), @@ -125,7 +129,7 @@ pub(super) fn progress_to_proto(progress: Option, tr: &I18n) -> pb::co } .into(), ), - Progress::Export(progress) => pb::collection::progress::Value::Exporting( + Progress::Export(progress) => anki_proto::collection::progress::Value::Exporting( match progress { ExportProgress::File => tr.exporting_exporting_file(), ExportProgress::Media(n) => tr.exporting_processed_media_files(n), @@ -137,15 +141,18 @@ pub(super) fn progress_to_proto(progress: Option, tr: &I18n) -> pb::co ), } } else { - pb::collection::progress::Value::None(pb::generic::Empty {}) + anki_proto::collection::progress::Value::None(anki_proto::generic::Empty {}) }; - pb::collection::Progress { + anki_proto::collection::Progress { value: Some(progress), } } -fn media_sync_progress(p: MediaSyncProgress, tr: &I18n) -> pb::collection::progress::MediaSync { - pb::collection::progress::MediaSync { +fn media_sync_progress( + p: MediaSyncProgress, + tr: &I18n, +) -> anki_proto::collection::progress::MediaSync { + anki_proto::collection::progress::MediaSync { checked: tr.sync_media_checked_count(p.checked).into(), added: tr .sync_media_added_count(p.uploaded_files, p.downloaded_files) diff --git a/rslib/src/backend/scheduler/answering.rs b/rslib/src/backend/scheduler/answering.rs index ffbfe3dfb..65e98562f 100644 --- a/rslib/src/backend/scheduler/answering.rs +++ b/rslib/src/backend/scheduler/answering.rs @@ -3,15 +3,14 @@ use std::mem; -use crate::pb; use crate::prelude::*; use crate::scheduler::answering::CardAnswer; use crate::scheduler::answering::Rating; use crate::scheduler::queue::QueuedCard; use crate::scheduler::queue::QueuedCards; -impl From for CardAnswer { - fn from(mut answer: pb::scheduler::CardAnswer) -> Self { +impl From for CardAnswer { + fn from(mut answer: anki_proto::scheduler::CardAnswer) -> Self { let mut new_state = mem::take(&mut answer.new_state).unwrap_or_default(); let custom_data = mem::take(&mut new_state.custom_data); CardAnswer { @@ -26,18 +25,18 @@ impl From for CardAnswer { } } -impl From for Rating { - fn from(rating: pb::scheduler::card_answer::Rating) -> Self { +impl From for Rating { + fn from(rating: anki_proto::scheduler::card_answer::Rating) -> Self { match rating { - pb::scheduler::card_answer::Rating::Again => Rating::Again, - pb::scheduler::card_answer::Rating::Hard => Rating::Hard, - pb::scheduler::card_answer::Rating::Good => Rating::Good, - pb::scheduler::card_answer::Rating::Easy => Rating::Easy, + anki_proto::scheduler::card_answer::Rating::Again => Rating::Again, + anki_proto::scheduler::card_answer::Rating::Hard => Rating::Hard, + anki_proto::scheduler::card_answer::Rating::Good => Rating::Good, + anki_proto::scheduler::card_answer::Rating::Easy => Rating::Easy, } } } -impl From for pb::scheduler::queued_cards::QueuedCard { +impl From for anki_proto::scheduler::queued_cards::QueuedCard { fn from(queued_card: QueuedCard) -> Self { Self { card: Some(queued_card.card.into()), @@ -45,20 +44,20 @@ impl From for pb::scheduler::queued_cards::QueuedCard { context: Some(queued_card.context), queue: match queued_card.kind { crate::scheduler::queue::QueueEntryKind::New => { - pb::scheduler::queued_cards::Queue::New + anki_proto::scheduler::queued_cards::Queue::New } crate::scheduler::queue::QueueEntryKind::Review => { - pb::scheduler::queued_cards::Queue::Review + anki_proto::scheduler::queued_cards::Queue::Review } crate::scheduler::queue::QueueEntryKind::Learning => { - pb::scheduler::queued_cards::Queue::Learning + anki_proto::scheduler::queued_cards::Queue::Learning } } as i32, } } } -impl From for pb::scheduler::QueuedCards { +impl From for anki_proto::scheduler::QueuedCards { fn from(queued_cards: QueuedCards) -> Self { Self { cards: queued_cards.cards.into_iter().map(Into::into).collect(), diff --git a/rslib/src/backend/scheduler/mod.rs b/rslib/src/backend/scheduler/mod.rs index 15b0506d8..58cbb77f5 100644 --- a/rslib/src/backend/scheduler/mod.rs +++ b/rslib/src/backend/scheduler/mod.rs @@ -4,9 +4,11 @@ mod answering; mod states; +use anki_proto::generic; +use anki_proto::scheduler; +pub(super) use anki_proto::scheduler::scheduler_service::Service as SchedulerService; + use super::Backend; -use crate::pb; -pub(super) use crate::pb::scheduler::scheduler_service::Service as SchedulerService; use crate::prelude::*; use crate::scheduler::new::NewCardDueOrder; use crate::scheduler::states::CardState; @@ -14,12 +16,14 @@ use crate::scheduler::states::SchedulingStates; use crate::stats::studied_today; impl SchedulerService for Backend { + type Error = AnkiError; + /// This behaves like _updateCutoff() in older code - it also unburies at /// the start of a new day. fn sched_timing_today( &self, - _input: pb::generic::Empty, - ) -> Result { + _input: generic::Empty, + ) -> Result { self.with_col(|col| { let timing = col.timing_today()?; col.unbury_if_day_rolled_over(timing)?; @@ -28,19 +32,19 @@ impl SchedulerService for Backend { } /// Fetch data from DB and return rendered string. - fn studied_today(&self, _input: pb::generic::Empty) -> Result { + fn studied_today(&self, _input: generic::Empty) -> Result { self.with_col(|col| col.studied_today().map(Into::into)) } /// Message rendering only, for old graphs. fn studied_today_message( &self, - input: pb::scheduler::StudiedTodayMessageRequest, - ) -> Result { + input: scheduler::StudiedTodayMessageRequest, + ) -> Result { Ok(studied_today(input.cards, input.seconds as f32, &self.tr).into()) } - fn update_stats(&self, input: pb::scheduler::UpdateStatsRequest) -> Result { + fn update_stats(&self, input: scheduler::UpdateStatsRequest) -> Result { self.with_col(|col| { col.transact_no_undo(|col| { let today = col.current_due_day(0)?; @@ -50,10 +54,7 @@ impl SchedulerService for Backend { }) } - fn extend_limits( - &self, - input: pb::scheduler::ExtendLimitsRequest, - ) -> Result { + fn extend_limits(&self, input: scheduler::ExtendLimitsRequest) -> Result { self.with_col(|col| { col.transact_no_undo(|col| { let today = col.current_due_day(0)?; @@ -72,30 +73,27 @@ impl SchedulerService for Backend { fn counts_for_deck_today( &self, - input: pb::decks::DeckId, - ) -> Result { + input: anki_proto::decks::DeckId, + ) -> Result { self.with_col(|col| col.counts_for_deck_today(input.did.into())) } - fn congrats_info( - &self, - _input: pb::generic::Empty, - ) -> Result { + fn congrats_info(&self, _input: generic::Empty) -> Result { self.with_col(|col| col.congrats_info()) } fn restore_buried_and_suspended_cards( &self, - input: pb::cards::CardIds, - ) -> Result { - let cids: Vec<_> = input.into(); + input: anki_proto::cards::CardIds, + ) -> Result { + let cids: Vec<_> = input.cids.into_iter().map(CardId).collect(); self.with_col(|col| col.unbury_or_unsuspend_cards(&cids).map(Into::into)) } fn unbury_deck( &self, - input: pb::scheduler::UnburyDeckRequest, - ) -> Result { + input: scheduler::UnburyDeckRequest, + ) -> Result { self.with_col(|col| { col.unbury_deck(input.deck_id.into(), input.mode()) .map(Into::into) @@ -104,8 +102,8 @@ impl SchedulerService for Backend { fn bury_or_suspend_cards( &self, - input: pb::scheduler::BuryOrSuspendCardsRequest, - ) -> Result { + input: scheduler::BuryOrSuspendCardsRequest, + ) -> Result { self.with_col(|col| { let mode = input.mode(); let cids = if input.card_ids.is_empty() { @@ -118,21 +116,24 @@ impl SchedulerService for Backend { }) } - fn empty_filtered_deck(&self, input: pb::decks::DeckId) -> Result { + fn empty_filtered_deck( + &self, + input: anki_proto::decks::DeckId, + ) -> Result { self.with_col(|col| col.empty_filtered_deck(input.did.into()).map(Into::into)) } fn rebuild_filtered_deck( &self, - input: pb::decks::DeckId, - ) -> Result { + input: anki_proto::decks::DeckId, + ) -> Result { self.with_col(|col| col.rebuild_filtered_deck(input.did.into()).map(Into::into)) } fn schedule_cards_as_new( &self, - input: pb::scheduler::ScheduleCardsAsNewRequest, - ) -> Result { + input: scheduler::ScheduleCardsAsNewRequest, + ) -> Result { self.with_col(|col| { let cids = input.card_ids.into_newtype(CardId); col.reschedule_cards_as_new( @@ -142,7 +143,7 @@ impl SchedulerService for Backend { input.reset_counts, input .context - .and_then(pb::scheduler::schedule_cards_as_new_request::Context::from_i32), + .and_then(scheduler::schedule_cards_as_new_request::Context::from_i32), ) .map(Into::into) }) @@ -150,15 +151,15 @@ impl SchedulerService for Backend { fn schedule_cards_as_new_defaults( &self, - input: pb::scheduler::ScheduleCardsAsNewDefaultsRequest, - ) -> Result { + input: scheduler::ScheduleCardsAsNewDefaultsRequest, + ) -> Result { self.with_col(|col| Ok(col.reschedule_cards_as_new_defaults(input.context()))) } fn set_due_date( &self, - input: pb::scheduler::SetDueDateRequest, - ) -> Result { + input: scheduler::SetDueDateRequest, + ) -> Result { let config = input.config_key.map(|v| v.key().into()); let days = input.days; let cids = input.card_ids.into_newtype(CardId); @@ -167,8 +168,8 @@ impl SchedulerService for Backend { fn sort_cards( &self, - input: pb::scheduler::SortCardsRequest, - ) -> Result { + input: scheduler::SortCardsRequest, + ) -> Result { let cids = input.card_ids.into_newtype(CardId); let (start, step, random, shift) = ( input.starting_from, @@ -189,15 +190,15 @@ impl SchedulerService for Backend { fn reposition_defaults( &self, - _input: pb::generic::Empty, - ) -> Result { + _input: generic::Empty, + ) -> Result { self.with_col(|col| Ok(col.reposition_defaults())) } fn sort_deck( &self, - input: pb::scheduler::SortDeckRequest, - ) -> Result { + input: scheduler::SortDeckRequest, + ) -> Result { self.with_col(|col| { col.sort_deck_legacy(input.deck_id.into(), input.randomize) .map(Into::into) @@ -206,8 +207,8 @@ impl SchedulerService for Backend { fn get_scheduling_states( &self, - input: pb::cards::CardId, - ) -> Result { + input: anki_proto::cards::CardId, + ) -> Result { let cid: CardId = input.into(); self.with_col(|col| col.get_scheduling_states(cid)) .map(Into::into) @@ -215,32 +216,35 @@ impl SchedulerService for Backend { fn describe_next_states( &self, - input: pb::scheduler::SchedulingStates, - ) -> Result { + input: scheduler::SchedulingStates, + ) -> Result { let states: SchedulingStates = input.into(); self.with_col(|col| col.describe_next_states(states)) .map(Into::into) } - fn state_is_leech(&self, input: pb::scheduler::SchedulingState) -> Result { + fn state_is_leech(&self, input: scheduler::SchedulingState) -> Result { let state: CardState = input.into(); Ok(state.leeched().into()) } - fn answer_card(&self, input: pb::scheduler::CardAnswer) -> Result { + fn answer_card( + &self, + input: scheduler::CardAnswer, + ) -> Result { self.with_col(|col| col.answer_card(&mut input.into())) .map(Into::into) } - fn upgrade_scheduler(&self, _input: pb::generic::Empty) -> Result { + fn upgrade_scheduler(&self, _input: generic::Empty) -> Result { self.with_col(|col| col.transact_no_undo(|col| col.upgrade_to_v2_scheduler())) .map(Into::into) } fn get_queued_cards( &self, - input: pb::scheduler::GetQueuedCardsRequest, - ) -> Result { + input: scheduler::GetQueuedCardsRequest, + ) -> Result { self.with_col(|col| { col.get_queued_cards(input.fetch_limit as usize, input.intraday_learning_only) .map(Into::into) @@ -249,26 +253,15 @@ impl SchedulerService for Backend { fn custom_study( &self, - input: pb::scheduler::CustomStudyRequest, - ) -> Result { + input: scheduler::CustomStudyRequest, + ) -> Result { self.with_col(|col| col.custom_study(input)).map(Into::into) } fn custom_study_defaults( &self, - input: pb::scheduler::CustomStudyDefaultsRequest, - ) -> Result { + input: scheduler::CustomStudyDefaultsRequest, + ) -> Result { self.with_col(|col| col.custom_study_defaults(input.deck_id.into())) } } - -impl From for pb::scheduler::SchedTimingTodayResponse { - fn from( - t: crate::scheduler::timing::SchedTimingToday, - ) -> pb::scheduler::SchedTimingTodayResponse { - pb::scheduler::SchedTimingTodayResponse { - days_elapsed: t.days_elapsed, - next_day_at: t.next_day_at.0, - } - } -} diff --git a/rslib/src/backend/scheduler/states/filtered.rs b/rslib/src/backend/scheduler/states/filtered.rs index 56733f614..a47baf722 100644 --- a/rslib/src/backend/scheduler/states/filtered.rs +++ b/rslib/src/backend/scheduler/states/filtered.rs @@ -1,33 +1,34 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::pb; use crate::scheduler::states::FilteredState; -impl From for pb::scheduler::scheduling_state::Filtered { +impl From for anki_proto::scheduler::scheduling_state::Filtered { fn from(state: FilteredState) -> Self { - pb::scheduler::scheduling_state::Filtered { + anki_proto::scheduler::scheduling_state::Filtered { value: Some(match state { FilteredState::Preview(state) => { - pb::scheduler::scheduling_state::filtered::Value::Preview(state.into()) + anki_proto::scheduler::scheduling_state::filtered::Value::Preview(state.into()) } FilteredState::Rescheduling(state) => { - pb::scheduler::scheduling_state::filtered::Value::Rescheduling(state.into()) + anki_proto::scheduler::scheduling_state::filtered::Value::Rescheduling( + state.into(), + ) } }), } } } -impl From for FilteredState { - fn from(state: pb::scheduler::scheduling_state::Filtered) -> Self { +impl From for FilteredState { + fn from(state: anki_proto::scheduler::scheduling_state::Filtered) -> Self { match state.value.unwrap_or_else(|| { - pb::scheduler::scheduling_state::filtered::Value::Preview(Default::default()) + anki_proto::scheduler::scheduling_state::filtered::Value::Preview(Default::default()) }) { - pb::scheduler::scheduling_state::filtered::Value::Preview(state) => { + anki_proto::scheduler::scheduling_state::filtered::Value::Preview(state) => { FilteredState::Preview(state.into()) } - pb::scheduler::scheduling_state::filtered::Value::Rescheduling(state) => { + anki_proto::scheduler::scheduling_state::filtered::Value::Rescheduling(state) => { FilteredState::Rescheduling(state.into()) } } diff --git a/rslib/src/backend/scheduler/states/learning.rs b/rslib/src/backend/scheduler/states/learning.rs index f165bac12..29f5af143 100644 --- a/rslib/src/backend/scheduler/states/learning.rs +++ b/rslib/src/backend/scheduler/states/learning.rs @@ -1,11 +1,10 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::pb; use crate::scheduler::states::LearnState; -impl From for LearnState { - fn from(state: pb::scheduler::scheduling_state::Learning) -> Self { +impl From for LearnState { + fn from(state: anki_proto::scheduler::scheduling_state::Learning) -> Self { LearnState { remaining_steps: state.remaining_steps, scheduled_secs: state.scheduled_secs, @@ -13,9 +12,9 @@ impl From for LearnState { } } -impl From for pb::scheduler::scheduling_state::Learning { +impl From for anki_proto::scheduler::scheduling_state::Learning { fn from(state: LearnState) -> Self { - pb::scheduler::scheduling_state::Learning { + anki_proto::scheduler::scheduling_state::Learning { remaining_steps: state.remaining_steps, scheduled_secs: state.scheduled_secs, } diff --git a/rslib/src/backend/scheduler/states/mod.rs b/rslib/src/backend/scheduler/states/mod.rs index ee1722b7a..f02c2fa69 100644 --- a/rslib/src/backend/scheduler/states/mod.rs +++ b/rslib/src/backend/scheduler/states/mod.rs @@ -10,15 +10,14 @@ mod relearning; mod rescheduling; mod review; -use crate::pb; use crate::scheduler::states::CardState; use crate::scheduler::states::NewState; use crate::scheduler::states::NormalState; use crate::scheduler::states::SchedulingStates; -impl From for pb::scheduler::SchedulingStates { +impl From for anki_proto::scheduler::SchedulingStates { fn from(choices: SchedulingStates) -> Self { - pb::scheduler::SchedulingStates { + anki_proto::scheduler::SchedulingStates { current: Some(choices.current.into()), again: Some(choices.again.into()), hard: Some(choices.hard.into()), @@ -28,8 +27,8 @@ impl From for pb::scheduler::SchedulingStates { } } -impl From for SchedulingStates { - fn from(choices: pb::scheduler::SchedulingStates) -> Self { +impl From for SchedulingStates { + fn from(choices: anki_proto::scheduler::SchedulingStates) -> Self { SchedulingStates { current: choices.current.unwrap_or_default().into(), again: choices.again.unwrap_or_default().into(), @@ -40,15 +39,15 @@ impl From for SchedulingStates { } } -impl From for pb::scheduler::SchedulingState { +impl From for anki_proto::scheduler::SchedulingState { fn from(state: CardState) -> Self { - pb::scheduler::SchedulingState { + anki_proto::scheduler::SchedulingState { value: Some(match state { CardState::Normal(state) => { - pb::scheduler::scheduling_state::Value::Normal(state.into()) + anki_proto::scheduler::scheduling_state::Value::Normal(state.into()) } CardState::Filtered(state) => { - pb::scheduler::scheduling_state::Value::Filtered(state.into()) + anki_proto::scheduler::scheduling_state::Value::Filtered(state.into()) } }), custom_data: None, @@ -56,14 +55,14 @@ impl From for pb::scheduler::SchedulingState { } } -impl From for CardState { - fn from(state: pb::scheduler::SchedulingState) -> Self { +impl From for CardState { + fn from(state: anki_proto::scheduler::SchedulingState) -> Self { if let Some(value) = state.value { match value { - pb::scheduler::scheduling_state::Value::Normal(normal) => { + anki_proto::scheduler::scheduling_state::Value::Normal(normal) => { CardState::Normal(normal.into()) } - pb::scheduler::scheduling_state::Value::Filtered(filtered) => { + anki_proto::scheduler::scheduling_state::Value::Filtered(filtered) => { CardState::Filtered(filtered.into()) } } diff --git a/rslib/src/backend/scheduler/states/new.rs b/rslib/src/backend/scheduler/states/new.rs index 9b866951a..9a814371a 100644 --- a/rslib/src/backend/scheduler/states/new.rs +++ b/rslib/src/backend/scheduler/states/new.rs @@ -1,20 +1,19 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::pb; use crate::scheduler::states::NewState; -impl From for NewState { - fn from(state: pb::scheduler::scheduling_state::New) -> Self { +impl From for NewState { + fn from(state: anki_proto::scheduler::scheduling_state::New) -> Self { NewState { position: state.position, } } } -impl From for pb::scheduler::scheduling_state::New { +impl From for anki_proto::scheduler::scheduling_state::New { fn from(state: NewState) -> Self { - pb::scheduler::scheduling_state::New { + anki_proto::scheduler::scheduling_state::New { position: state.position, } } diff --git a/rslib/src/backend/scheduler/states/normal.rs b/rslib/src/backend/scheduler/states/normal.rs index 457e2f342..1c8595da3 100644 --- a/rslib/src/backend/scheduler/states/normal.rs +++ b/rslib/src/backend/scheduler/states/normal.rs @@ -1,45 +1,44 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::pb; use crate::scheduler::states::NormalState; -impl From for pb::scheduler::scheduling_state::Normal { +impl From for anki_proto::scheduler::scheduling_state::Normal { fn from(state: NormalState) -> Self { - pb::scheduler::scheduling_state::Normal { + anki_proto::scheduler::scheduling_state::Normal { value: Some(match state { NormalState::New(state) => { - pb::scheduler::scheduling_state::normal::Value::New(state.into()) + anki_proto::scheduler::scheduling_state::normal::Value::New(state.into()) } NormalState::Learning(state) => { - pb::scheduler::scheduling_state::normal::Value::Learning(state.into()) + anki_proto::scheduler::scheduling_state::normal::Value::Learning(state.into()) } NormalState::Review(state) => { - pb::scheduler::scheduling_state::normal::Value::Review(state.into()) + anki_proto::scheduler::scheduling_state::normal::Value::Review(state.into()) } NormalState::Relearning(state) => { - pb::scheduler::scheduling_state::normal::Value::Relearning(state.into()) + anki_proto::scheduler::scheduling_state::normal::Value::Relearning(state.into()) } }), } } } -impl From for NormalState { - fn from(state: pb::scheduler::scheduling_state::Normal) -> Self { +impl From for NormalState { + fn from(state: anki_proto::scheduler::scheduling_state::Normal) -> Self { match state.value.unwrap_or_else(|| { - pb::scheduler::scheduling_state::normal::Value::New(Default::default()) + anki_proto::scheduler::scheduling_state::normal::Value::New(Default::default()) }) { - pb::scheduler::scheduling_state::normal::Value::New(state) => { + anki_proto::scheduler::scheduling_state::normal::Value::New(state) => { NormalState::New(state.into()) } - pb::scheduler::scheduling_state::normal::Value::Learning(state) => { + anki_proto::scheduler::scheduling_state::normal::Value::Learning(state) => { NormalState::Learning(state.into()) } - pb::scheduler::scheduling_state::normal::Value::Review(state) => { + anki_proto::scheduler::scheduling_state::normal::Value::Review(state) => { NormalState::Review(state.into()) } - pb::scheduler::scheduling_state::normal::Value::Relearning(state) => { + anki_proto::scheduler::scheduling_state::normal::Value::Relearning(state) => { NormalState::Relearning(state.into()) } } diff --git a/rslib/src/backend/scheduler/states/preview.rs b/rslib/src/backend/scheduler/states/preview.rs index b6d457f2a..0220e36e0 100644 --- a/rslib/src/backend/scheduler/states/preview.rs +++ b/rslib/src/backend/scheduler/states/preview.rs @@ -1,11 +1,10 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::pb; use crate::scheduler::states::PreviewState; -impl From for PreviewState { - fn from(state: pb::scheduler::scheduling_state::Preview) -> Self { +impl From for PreviewState { + fn from(state: anki_proto::scheduler::scheduling_state::Preview) -> Self { PreviewState { scheduled_secs: state.scheduled_secs, finished: state.finished, @@ -13,9 +12,9 @@ impl From for PreviewState { } } -impl From for pb::scheduler::scheduling_state::Preview { +impl From for anki_proto::scheduler::scheduling_state::Preview { fn from(state: PreviewState) -> Self { - pb::scheduler::scheduling_state::Preview { + anki_proto::scheduler::scheduling_state::Preview { scheduled_secs: state.scheduled_secs, finished: state.finished, } diff --git a/rslib/src/backend/scheduler/states/relearning.rs b/rslib/src/backend/scheduler/states/relearning.rs index 2d2fa9405..f4633f823 100644 --- a/rslib/src/backend/scheduler/states/relearning.rs +++ b/rslib/src/backend/scheduler/states/relearning.rs @@ -1,11 +1,10 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::pb; use crate::scheduler::states::RelearnState; -impl From for RelearnState { - fn from(state: pb::scheduler::scheduling_state::Relearning) -> Self { +impl From for RelearnState { + fn from(state: anki_proto::scheduler::scheduling_state::Relearning) -> Self { RelearnState { review: state.review.unwrap_or_default().into(), learning: state.learning.unwrap_or_default().into(), @@ -13,9 +12,9 @@ impl From for RelearnState { } } -impl From for pb::scheduler::scheduling_state::Relearning { +impl From for anki_proto::scheduler::scheduling_state::Relearning { fn from(state: RelearnState) -> Self { - pb::scheduler::scheduling_state::Relearning { + anki_proto::scheduler::scheduling_state::Relearning { review: Some(state.review.into()), learning: Some(state.learning.into()), } diff --git a/rslib/src/backend/scheduler/states/rescheduling.rs b/rslib/src/backend/scheduler/states/rescheduling.rs index d2ad5f416..2708f8ce9 100644 --- a/rslib/src/backend/scheduler/states/rescheduling.rs +++ b/rslib/src/backend/scheduler/states/rescheduling.rs @@ -1,20 +1,19 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::pb; use crate::scheduler::states::ReschedulingFilterState; -impl From for ReschedulingFilterState { - fn from(state: pb::scheduler::scheduling_state::ReschedulingFilter) -> Self { +impl From for ReschedulingFilterState { + fn from(state: anki_proto::scheduler::scheduling_state::ReschedulingFilter) -> Self { ReschedulingFilterState { original_state: state.original_state.unwrap_or_default().into(), } } } -impl From for pb::scheduler::scheduling_state::ReschedulingFilter { +impl From for anki_proto::scheduler::scheduling_state::ReschedulingFilter { fn from(state: ReschedulingFilterState) -> Self { - pb::scheduler::scheduling_state::ReschedulingFilter { + anki_proto::scheduler::scheduling_state::ReschedulingFilter { original_state: Some(state.original_state.into()), } } diff --git a/rslib/src/backend/scheduler/states/review.rs b/rslib/src/backend/scheduler/states/review.rs index b175f31f1..17d4ab7e4 100644 --- a/rslib/src/backend/scheduler/states/review.rs +++ b/rslib/src/backend/scheduler/states/review.rs @@ -1,11 +1,10 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::pb; use crate::scheduler::states::ReviewState; -impl From for ReviewState { - fn from(state: pb::scheduler::scheduling_state::Review) -> Self { +impl From for ReviewState { + fn from(state: anki_proto::scheduler::scheduling_state::Review) -> Self { ReviewState { scheduled_days: state.scheduled_days, elapsed_days: state.elapsed_days, @@ -16,9 +15,9 @@ impl From for ReviewState { } } -impl From for pb::scheduler::scheduling_state::Review { +impl From for anki_proto::scheduler::scheduling_state::Review { fn from(state: ReviewState) -> Self { - pb::scheduler::scheduling_state::Review { + anki_proto::scheduler::scheduling_state::Review { scheduled_days: state.scheduled_days, elapsed_days: state.elapsed_days, ease_factor: state.ease_factor, diff --git a/rslib/src/backend/search/browser_table.rs b/rslib/src/backend/search/browser_table.rs index cfa08f902..82584f262 100644 --- a/rslib/src/backend/search/browser_table.rs +++ b/rslib/src/backend/search/browser_table.rs @@ -3,13 +3,13 @@ use std::str::FromStr; +use anki_i18n::I18n; + use crate::browser_table; -use crate::i18n::I18n; -use crate::pb; impl browser_table::Column { - pub fn to_pb_column(self, i18n: &I18n) -> pb::search::browser_columns::Column { - pb::search::browser_columns::Column { + pub fn to_pb_column(self, i18n: &I18n) -> anki_proto::search::browser_columns::Column { + anki_proto::search::browser_columns::Column { key: self.to_string(), cards_mode_label: self.cards_mode_label(i18n), notes_mode_label: self.notes_mode_label(i18n), @@ -22,12 +22,11 @@ impl browser_table::Column { } } -impl From for Vec { - fn from(input: pb::generic::StringList) -> Self { - input - .vals - .iter() - .map(|c| browser_table::Column::from_str(c).unwrap_or_default()) - .collect() - } +pub(crate) fn string_list_to_browser_columns( + list: anki_proto::generic::StringList, +) -> Vec { + list.vals + .into_iter() + .map(|c| browser_table::Column::from_str(&c).unwrap_or_default()) + .collect() } diff --git a/rslib/src/backend/search/mod.rs b/rslib/src/backend/search/mod.rs index 905c2588a..916dae432 100644 --- a/rslib/src/backend/search/mod.rs +++ b/rslib/src/backend/search/mod.rs @@ -7,12 +7,14 @@ mod search_node; use std::str::FromStr; use std::sync::Arc; +use anki_proto::generic; +pub(super) use anki_proto::search::search_service::Service as SearchService; +use anki_proto::search::sort_order::Value as SortOrderProto; + use super::notes::to_note_ids; use super::Backend; +use crate::backend::search::browser_table::string_list_to_browser_columns; use crate::browser_table::Column; -use crate::pb; -pub(super) use crate::pb::search::search_service::Service as SearchService; -use crate::pb::search::sort_order::Value as SortOrderProto; use crate::prelude::*; use crate::search::replace_search_node; use crate::search::JoinSearches; @@ -20,26 +22,37 @@ use crate::search::Node; use crate::search::SortMode; impl SearchService for Backend { - fn build_search_string(&self, input: pb::search::SearchNode) -> Result { + type Error = AnkiError; + + fn build_search_string( + &self, + input: anki_proto::search::SearchNode, + ) -> Result { let node: Node = input.try_into()?; Ok(SearchBuilder::from_root(node).write().into()) } - fn search_cards(&self, input: pb::search::SearchRequest) -> Result { + fn search_cards( + &self, + input: anki_proto::search::SearchRequest, + ) -> Result { self.with_col(|col| { let order = input.order.unwrap_or_default().value.into(); let cids = col.search_cards(&input.search, order)?; - Ok(pb::search::SearchResponse { + Ok(anki_proto::search::SearchResponse { ids: cids.into_iter().map(|v| v.0).collect(), }) }) } - fn search_notes(&self, input: pb::search::SearchRequest) -> Result { + fn search_notes( + &self, + input: anki_proto::search::SearchRequest, + ) -> Result { self.with_col(|col| { let order = input.order.unwrap_or_default().value.into(); let nids = col.search_notes(&input.search, order)?; - Ok(pb::search::SearchResponse { + Ok(anki_proto::search::SearchResponse { ids: nids.into_iter().map(|v| v.0).collect(), }) }) @@ -47,15 +60,21 @@ impl SearchService for Backend { fn join_search_nodes( &self, - input: pb::search::JoinSearchNodesRequest, - ) -> Result { + input: anki_proto::search::JoinSearchNodesRequest, + ) -> Result { let existing_node: Node = input.existing_node.unwrap_or_default().try_into()?; let additional_node: Node = input.additional_node.unwrap_or_default().try_into()?; Ok( - match pb::search::search_node::group::Joiner::from_i32(input.joiner).unwrap_or_default() { - pb::search::search_node::group::Joiner::And => existing_node.and_flat(additional_node), - pb::search::search_node::group::Joiner::Or => existing_node.or_flat(additional_node), + match anki_proto::search::search_node::group::Joiner::from_i32(input.joiner) + .unwrap_or_default() + { + anki_proto::search::search_node::group::Joiner::And => { + existing_node.and_flat(additional_node) + } + anki_proto::search::search_node::group::Joiner::Or => { + existing_node.or_flat(additional_node) + } } .write() .into(), @@ -64,8 +83,8 @@ impl SearchService for Backend { fn replace_search_node( &self, - input: pb::search::ReplaceSearchNodeRequest, - ) -> Result { + input: anki_proto::search::ReplaceSearchNodeRequest, + ) -> Result { let existing = { let node = input.existing_node.unwrap_or_default().try_into()?; if let Node::Group(nodes) = node { @@ -80,8 +99,8 @@ impl SearchService for Backend { fn find_and_replace( &self, - input: pb::search::FindAndReplaceRequest, - ) -> Result { + input: anki_proto::search::FindAndReplaceRequest, + ) -> Result { let mut search = if input.regex { input.search } else { @@ -108,31 +127,29 @@ impl SearchService for Backend { fn all_browser_columns( &self, - _input: pb::generic::Empty, - ) -> Result { + _input: generic::Empty, + ) -> Result { self.with_col(|col| Ok(col.all_browser_columns())) } - fn set_active_browser_columns( - &self, - input: pb::generic::StringList, - ) -> Result { + fn set_active_browser_columns(&self, input: generic::StringList) -> Result { self.with_col(|col| { - col.state.active_browser_columns = Some(Arc::new(input.into())); + col.state.active_browser_columns = + Some(Arc::new(string_list_to_browser_columns(input))); Ok(()) }) .map(Into::into) } - fn browser_row_for_id(&self, input: pb::generic::Int64) -> Result { + fn browser_row_for_id(&self, input: generic::Int64) -> Result { self.with_col(|col| col.browser_row_for_id(input.val).map(Into::into)) } } impl From> for SortMode { fn from(order: Option) -> Self { - use pb::search::sort_order::Value as V; - match order.unwrap_or(V::None(pb::generic::Empty {})) { + use anki_proto::search::sort_order::Value as V; + match order.unwrap_or(V::None(generic::Empty {})) { V::None(_) => SortMode::NoOrder, V::Custom(s) => SortMode::Custom(s), V::Builtin(b) => SortMode::Builtin { diff --git a/rslib/src/backend/search/search_node.rs b/rslib/src/backend/search/search_node.rs index 907f69a98..ce533d191 100644 --- a/rslib/src/backend/search/search_node.rs +++ b/rslib/src/backend/search/search_node.rs @@ -1,9 +1,9 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::search::search_node::IdList; use itertools::Itertools; -use crate::pb; use crate::prelude::*; use crate::search::parse_search; use crate::search::Negated; @@ -16,13 +16,13 @@ use crate::search::TemplateKind; use crate::text::escape_anki_wildcards; use crate::text::escape_anki_wildcards_for_search_node; -impl TryFrom for Node { +impl TryFrom for Node { type Error = AnkiError; - fn try_from(msg: pb::search::SearchNode) -> std::result::Result { - use pb::search::search_node::group::Joiner; - use pb::search::search_node::Filter; - use pb::search::search_node::Flag; + fn try_from(msg: anki_proto::search::SearchNode) -> std::result::Result { + use anki_proto::search::search_node::group::Joiner; + use anki_proto::search::search_node::Filter; + use anki_proto::search::search_node::Flag; Ok(if let Some(filter) = msg.filter { match filter { Filter::Tag(s) => SearchNode::from_tag_name(&s).into(), @@ -32,7 +32,7 @@ impl TryFrom for Node { Node::Search(SearchNode::CardTemplate(TemplateKind::Ordinal(u as u16))) } Filter::Nid(nid) => Node::Search(SearchNode::NoteIds(nid.to_string())), - Filter::Nids(nids) => Node::Search(SearchNode::NoteIds(nids.into_id_string())), + Filter::Nids(nids) => Node::Search(SearchNode::NoteIds(id_list_to_string(nids))), Filter::Dupe(dupe) => Node::Search(SearchNode::Duplicates { notetype_id: dupe.notetype_id.into(), text: dupe.first_field, @@ -58,7 +58,7 @@ impl TryFrom for Node { }), Filter::EditedInDays(u) => Node::Search(SearchNode::EditedInDays(u)), Filter::CardState(state) => Node::Search(SearchNode::State( - pb::search::search_node::CardState::from_i32(state) + anki_proto::search::search_node::CardState::from_i32(state) .unwrap_or_default() .into(), )), @@ -120,38 +120,36 @@ impl TryFrom for Node { } } -impl From for RatingKind { - fn from(r: pb::search::search_node::Rating) -> Self { +impl From for RatingKind { + fn from(r: anki_proto::search::search_node::Rating) -> Self { match r { - pb::search::search_node::Rating::Again => RatingKind::AnswerButton(1), - pb::search::search_node::Rating::Hard => RatingKind::AnswerButton(2), - pb::search::search_node::Rating::Good => RatingKind::AnswerButton(3), - pb::search::search_node::Rating::Easy => RatingKind::AnswerButton(4), - pb::search::search_node::Rating::Any => RatingKind::AnyAnswerButton, - pb::search::search_node::Rating::ByReschedule => RatingKind::ManualReschedule, + anki_proto::search::search_node::Rating::Again => RatingKind::AnswerButton(1), + anki_proto::search::search_node::Rating::Hard => RatingKind::AnswerButton(2), + anki_proto::search::search_node::Rating::Good => RatingKind::AnswerButton(3), + anki_proto::search::search_node::Rating::Easy => RatingKind::AnswerButton(4), + anki_proto::search::search_node::Rating::Any => RatingKind::AnyAnswerButton, + anki_proto::search::search_node::Rating::ByReschedule => RatingKind::ManualReschedule, } } } -impl From for StateKind { - fn from(k: pb::search::search_node::CardState) -> Self { +impl From for StateKind { + fn from(k: anki_proto::search::search_node::CardState) -> Self { match k { - pb::search::search_node::CardState::New => StateKind::New, - pb::search::search_node::CardState::Learn => StateKind::Learning, - pb::search::search_node::CardState::Review => StateKind::Review, - pb::search::search_node::CardState::Due => StateKind::Due, - pb::search::search_node::CardState::Suspended => StateKind::Suspended, - pb::search::search_node::CardState::Buried => StateKind::Buried, + anki_proto::search::search_node::CardState::New => StateKind::New, + anki_proto::search::search_node::CardState::Learn => StateKind::Learning, + anki_proto::search::search_node::CardState::Review => StateKind::Review, + anki_proto::search::search_node::CardState::Due => StateKind::Due, + anki_proto::search::search_node::CardState::Suspended => StateKind::Suspended, + anki_proto::search::search_node::CardState::Buried => StateKind::Buried, } } } -impl pb::search::search_node::IdList { - fn into_id_string(self) -> String { - self.ids - .iter() - .map(|i| i.to_string()) - .collect::>() - .join(",") - } +fn id_list_to_string(list: IdList) -> String { + list.ids + .iter() + .map(|i| i.to_string()) + .collect::>() + .join(",") } diff --git a/rslib/src/backend/stats.rs b/rslib/src/backend/stats.rs index 7ac9c8caa..be1de00b7 100644 --- a/rslib/src/backend/stats.rs +++ b/rslib/src/backend/stats.rs @@ -1,32 +1,40 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +pub(super) use anki_proto::stats::stats_service::Service as StatsService; + use super::Backend; -use crate::pb; -pub(super) use crate::pb::stats::stats_service::Service as StatsService; use crate::prelude::*; use crate::revlog::RevlogReviewKind; impl StatsService for Backend { - fn card_stats(&self, input: pb::cards::CardId) -> Result { - self.with_col(|col| col.card_stats(input.into())) + type Error = AnkiError; + + fn card_stats( + &self, + input: anki_proto::cards::CardId, + ) -> Result { + self.with_col(|col| col.card_stats(input.cid.into())) } - fn graphs(&self, input: pb::stats::GraphsRequest) -> Result { + fn graphs( + &self, + input: anki_proto::stats::GraphsRequest, + ) -> Result { self.with_col(|col| col.graph_data_for_search(&input.search, input.days)) } fn get_graph_preferences( &self, - _input: pb::generic::Empty, - ) -> Result { + _input: anki_proto::generic::Empty, + ) -> Result { self.with_col(|col| Ok(col.get_graph_preferences())) } fn set_graph_preferences( &self, - input: pb::stats::GraphPreferences, - ) -> Result { + input: anki_proto::stats::GraphPreferences, + ) -> Result { self.with_col(|col| col.set_graph_preferences(input)) .map(Into::into) } @@ -35,11 +43,11 @@ impl StatsService for Backend { impl From for i32 { fn from(kind: RevlogReviewKind) -> Self { (match kind { - RevlogReviewKind::Learning => pb::stats::revlog_entry::ReviewKind::Learning, - RevlogReviewKind::Review => pb::stats::revlog_entry::ReviewKind::Review, - RevlogReviewKind::Relearning => pb::stats::revlog_entry::ReviewKind::Relearning, - RevlogReviewKind::Filtered => pb::stats::revlog_entry::ReviewKind::Filtered, - RevlogReviewKind::Manual => pb::stats::revlog_entry::ReviewKind::Manual, + RevlogReviewKind::Learning => anki_proto::stats::revlog_entry::ReviewKind::Learning, + RevlogReviewKind::Review => anki_proto::stats::revlog_entry::ReviewKind::Review, + RevlogReviewKind::Relearning => anki_proto::stats::revlog_entry::ReviewKind::Relearning, + RevlogReviewKind::Filtered => anki_proto::stats::revlog_entry::ReviewKind::Filtered, + RevlogReviewKind::Manual => anki_proto::stats::revlog_entry::ReviewKind::Manual, }) as i32 } } diff --git a/rslib/src/backend/sync/mod.rs b/rslib/src/backend/sync/mod.rs index 0f83f3d5b..93ee7f000 100644 --- a/rslib/src/backend/sync/mod.rs +++ b/rslib/src/backend/sync/mod.rs @@ -3,18 +3,18 @@ use std::sync::Arc; +use anki_proto::generic; +pub(super) use anki_proto::sync::sync_service::Service as SyncService; +use anki_proto::sync::sync_status_response::Required; +use anki_proto::sync::SyncStatusResponse; use futures::future::AbortHandle; use futures::future::AbortRegistration; use futures::future::Abortable; -use pb::sync::sync_status_response::Required; use reqwest::Url; use tracing::warn; use super::progress::AbortHandleSlot; use super::Backend; -use crate::pb; -pub(super) use crate::pb::sync::sync_service::Service as SyncService; -use crate::pb::sync::SyncStatusResponse; use crate::prelude::*; use crate::sync::collection::normal::ClientSyncState; use crate::sync::collection::normal::NormalSyncProgress; @@ -46,40 +46,42 @@ impl RemoteSyncStatus { } } -impl From for pb::sync::SyncCollectionResponse { +impl From for anki_proto::sync::SyncCollectionResponse { fn from(o: SyncOutput) -> Self { - pb::sync::SyncCollectionResponse { + anki_proto::sync::SyncCollectionResponse { host_number: o.host_number, server_message: o.server_message, new_endpoint: o.new_endpoint, required: match o.required { SyncActionRequired::NoChanges => { - pb::sync::sync_collection_response::ChangesRequired::NoChanges as i32 + anki_proto::sync::sync_collection_response::ChangesRequired::NoChanges as i32 } SyncActionRequired::FullSyncRequired { upload_ok, download_ok, } => { if !upload_ok { - pb::sync::sync_collection_response::ChangesRequired::FullDownload as i32 + anki_proto::sync::sync_collection_response::ChangesRequired::FullDownload + as i32 } else if !download_ok { - pb::sync::sync_collection_response::ChangesRequired::FullUpload as i32 + anki_proto::sync::sync_collection_response::ChangesRequired::FullUpload + as i32 } else { - pb::sync::sync_collection_response::ChangesRequired::FullSync as i32 + anki_proto::sync::sync_collection_response::ChangesRequired::FullSync as i32 } } SyncActionRequired::NormalSyncRequired => { - pb::sync::sync_collection_response::ChangesRequired::NormalSync as i32 + anki_proto::sync::sync_collection_response::ChangesRequired::NormalSync as i32 } }, } } } -impl TryFrom for SyncAuth { +impl TryFrom for SyncAuth { type Error = AnkiError; - fn try_from(value: pb::sync::SyncAuth) -> std::result::Result { + fn try_from(value: anki_proto::sync::SyncAuth) -> std::result::Result { Ok(SyncAuth { hkey: value.hkey, endpoint: value @@ -100,11 +102,13 @@ impl TryFrom for SyncAuth { } impl SyncService for Backend { - fn sync_media(&self, input: pb::sync::SyncAuth) -> Result { + type Error = AnkiError; + + fn sync_media(&self, input: anki_proto::sync::SyncAuth) -> Result { self.sync_media_inner(input).map(Into::into) } - fn abort_sync(&self, _input: pb::generic::Empty) -> Result { + fn abort_sync(&self, _input: generic::Empty) -> Result { if let Some(handle) = self.sync_abort.lock().unwrap().take() { handle.abort(); } @@ -112,7 +116,7 @@ impl SyncService for Backend { } /// Abort the media sync. Does not wait for completion. - fn abort_media_sync(&self, _input: pb::generic::Empty) -> Result { + fn abort_media_sync(&self, _input: generic::Empty) -> Result { let guard = self.state.lock().unwrap(); if let Some(handle) = &guard.sync.media_sync_abort { handle.abort(); @@ -120,27 +124,33 @@ impl SyncService for Backend { Ok(().into()) } - fn sync_login(&self, input: pb::sync::SyncLoginRequest) -> Result { + fn sync_login( + &self, + input: anki_proto::sync::SyncLoginRequest, + ) -> Result { self.sync_login_inner(input) } - fn sync_status(&self, input: pb::sync::SyncAuth) -> Result { + fn sync_status( + &self, + input: anki_proto::sync::SyncAuth, + ) -> Result { self.sync_status_inner(input) } fn sync_collection( &self, - input: pb::sync::SyncAuth, - ) -> Result { + input: anki_proto::sync::SyncAuth, + ) -> Result { self.sync_collection_inner(input) } - fn full_upload(&self, input: pb::sync::SyncAuth) -> Result { + fn full_upload(&self, input: anki_proto::sync::SyncAuth) -> Result { self.full_sync_inner(input, true)?; Ok(().into()) } - fn full_download(&self, input: pb::sync::SyncAuth) -> Result { + fn full_download(&self, input: anki_proto::sync::SyncAuth) -> Result { self.full_sync_inner(input, false)?; Ok(().into()) } @@ -173,7 +183,7 @@ impl Backend { Ok((guard, abort_reg)) } - pub(super) fn sync_media_inner(&self, auth: pb::sync::SyncAuth) -> Result<()> { + pub(super) fn sync_media_inner(&self, auth: anki_proto::sync::SyncAuth) -> Result<()> { let auth = auth.try_into()?; // mark media sync as active let (abort_handle, abort_reg) = AbortHandle::new_pair(); @@ -228,8 +238,8 @@ impl Backend { pub(super) fn sync_login_inner( &self, - input: pb::sync::SyncLoginRequest, - ) -> Result { + input: anki_proto::sync::SyncLoginRequest, + ) -> Result { let (_guard, abort_reg) = self.sync_abort_handle()?; let rt = self.runtime_handle(); @@ -239,7 +249,7 @@ impl Backend { Ok(sync_result) => sync_result, Err(_) => Err(AnkiError::Interrupted), }; - ret.map(|a| pb::sync::SyncAuth { + ret.map(|a| anki_proto::sync::SyncAuth { hkey: a.hkey, endpoint: None, io_timeout_secs: None, @@ -248,19 +258,21 @@ impl Backend { pub(super) fn sync_status_inner( &self, - input: pb::sync::SyncAuth, - ) -> Result { + input: anki_proto::sync::SyncAuth, + ) -> Result { // any local changes mean we can skip the network round-trip let req = self.with_col(|col| col.sync_status_offline())?; if req != Required::NoChanges { - return Ok(req.into()); + return Ok(status_response_from_required(req)); } // return cached server response if only a short time has elapsed { let guard = self.state.lock().unwrap(); if guard.sync.remote_sync_status.last_check.elapsed_secs() < 300 { - return Ok(guard.sync.remote_sync_status.last_response.into()); + return Ok(status_response_from_required( + guard.sync.remote_sync_status.last_response, + )); } } @@ -288,8 +300,8 @@ impl Backend { pub(super) fn sync_collection_inner( &self, - input: pb::sync::SyncAuth, - ) -> Result { + input: anki_proto::sync::SyncAuth, + ) -> Result { let auth: SyncAuth = input.try_into()?; let (_guard, abort_reg) = self.sync_abort_handle()?; @@ -329,7 +341,11 @@ impl Backend { Ok(output.into()) } - pub(super) fn full_sync_inner(&self, input: pb::sync::SyncAuth, upload: bool) -> Result<()> { + pub(super) fn full_sync_inner( + &self, + input: anki_proto::sync::SyncAuth, + upload: bool, + ) -> Result<()> { let auth = input.try_into()?; self.abort_media_sync_and_wait(); @@ -381,12 +397,10 @@ impl Backend { } } -impl From for SyncStatusResponse { - fn from(r: Required) -> Self { - SyncStatusResponse { - required: r.into(), - new_endpoint: None, - } +fn status_response_from_required(required: Required) -> SyncStatusResponse { + SyncStatusResponse { + required: required.into(), + new_endpoint: None, } } diff --git a/rslib/src/backend/tags.rs b/rslib/src/backend/tags.rs index d10b89997..d3db5d144 100644 --- a/rslib/src/backend/tags.rs +++ b/rslib/src/backend/tags.rs @@ -1,22 +1,25 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::generic; +pub(super) use anki_proto::tags::tags_service::Service as TagsService; + use super::notes::to_note_ids; use super::Backend; -use crate::pb; -pub(super) use crate::pb::tags::tags_service::Service as TagsService; use crate::prelude::*; impl TagsService for Backend { + type Error = AnkiError; + fn clear_unused_tags( &self, - _input: pb::generic::Empty, - ) -> Result { + _input: generic::Empty, + ) -> Result { self.with_col(|col| col.clear_unused_tags().map(Into::into)) } - fn all_tags(&self, _input: pb::generic::Empty) -> Result { - Ok(pb::generic::StringList { + fn all_tags(&self, _input: generic::Empty) -> Result { + Ok(generic::StringList { vals: self.with_col(|col| { Ok(col .storage @@ -28,28 +31,31 @@ impl TagsService for Backend { }) } - fn remove_tags(&self, tags: pb::generic::String) -> Result { + fn remove_tags( + &self, + tags: generic::String, + ) -> Result { self.with_col(|col| col.remove_tags(tags.val.as_str()).map(Into::into)) } fn set_tag_collapsed( &self, - input: pb::tags::SetTagCollapsedRequest, - ) -> Result { + input: anki_proto::tags::SetTagCollapsedRequest, + ) -> Result { self.with_col(|col| { col.set_tag_collapsed(&input.name, input.collapsed) .map(Into::into) }) } - fn tag_tree(&self, _input: pb::generic::Empty) -> Result { + fn tag_tree(&self, _input: generic::Empty) -> Result { self.with_col(|col| col.tag_tree()) } fn reparent_tags( &self, - input: pb::tags::ReparentTagsRequest, - ) -> Result { + input: anki_proto::tags::ReparentTagsRequest, + ) -> Result { let source_tags = input.tags; let target_tag = if input.new_parent.is_empty() { None @@ -62,16 +68,16 @@ impl TagsService for Backend { fn rename_tags( &self, - input: pb::tags::RenameTagsRequest, - ) -> Result { + input: anki_proto::tags::RenameTagsRequest, + ) -> Result { self.with_col(|col| col.rename_tag(&input.current_prefix, &input.new_prefix)) .map(Into::into) } fn add_note_tags( &self, - input: pb::tags::NoteIdsAndTagsRequest, - ) -> Result { + input: anki_proto::tags::NoteIdsAndTagsRequest, + ) -> Result { self.with_col(|col| { col.add_tags_to_notes(&to_note_ids(input.note_ids), &input.tags) .map(Into::into) @@ -80,8 +86,8 @@ impl TagsService for Backend { fn remove_note_tags( &self, - input: pb::tags::NoteIdsAndTagsRequest, - ) -> Result { + input: anki_proto::tags::NoteIdsAndTagsRequest, + ) -> Result { self.with_col(|col| { col.remove_tags_from_notes(&to_note_ids(input.note_ids), &input.tags) .map(Into::into) @@ -90,8 +96,8 @@ impl TagsService for Backend { fn find_and_replace_tag( &self, - input: pb::tags::FindAndReplaceTagRequest, - ) -> Result { + input: anki_proto::tags::FindAndReplaceTagRequest, + ) -> Result { self.with_col(|col| { let note_ids = if input.note_ids.is_empty() { col.search_notes_unordered("")? @@ -111,11 +117,11 @@ impl TagsService for Backend { fn complete_tag( &self, - input: pb::tags::CompleteTagRequest, - ) -> Result { + input: anki_proto::tags::CompleteTagRequest, + ) -> Result { self.with_col(|col| { let tags = col.complete_tag(&input.input, input.match_limit as usize)?; - Ok(pb::tags::CompleteTagResponse { tags }) + Ok(anki_proto::tags::CompleteTagResponse { tags }) }) } } diff --git a/rslib/src/browser_table.rs b/rslib/src/browser_table.rs index 4e9803cc1..a2343eb10 100644 --- a/rslib/src/browser_table.rs +++ b/rslib/src/browser_table.rs @@ -14,7 +14,6 @@ use crate::card::CardType; use crate::card_rendering::prettify_av_tags; use crate::notetype::CardTemplate; use crate::notetype::NotetypeKind; -use crate::pb; use crate::prelude::*; use crate::scheduler::timespan::time_span; use crate::scheduler::timing::SchedTimingToday; @@ -182,8 +181,8 @@ impl Column { .into() } - pub fn default_order(self) -> pb::search::browser_columns::Sorting { - use pb::search::browser_columns::Sorting; + pub fn default_order(self) -> anki_proto::search::browser_columns::Sorting { + use anki_proto::search::browser_columns::Sorting; match self { Column::Question | Column::Answer | Column::Custom => Sorting::None, Column::SortField | Column::Tags | Column::Notetype | Column::Deck => { @@ -205,8 +204,8 @@ impl Column { matches!(self, Self::Question | Self::Answer | Self::SortField) } - pub fn alignment(self) -> pb::search::browser_columns::Alignment { - use pb::search::browser_columns::Alignment; + pub fn alignment(self) -> anki_proto::search::browser_columns::Alignment { + use anki_proto::search::browser_columns::Alignment; match self { Self::Question | Self::Answer @@ -221,16 +220,16 @@ impl Column { } impl Collection { - pub fn all_browser_columns(&self) -> pb::search::BrowserColumns { - let mut columns: Vec = Column::iter() + pub fn all_browser_columns(&self) -> anki_proto::search::BrowserColumns { + let mut columns: Vec = Column::iter() .filter(|&c| c != Column::Custom) .map(|c| c.to_pb_column(&self.tr)) .collect(); columns.sort_by(|c1, c2| c1.cards_mode_label.cmp(&c2.cards_mode_label)); - pb::search::BrowserColumns { columns } + anki_proto::search::BrowserColumns { columns } } - pub fn browser_row_for_id(&mut self, id: i64) -> Result { + pub fn browser_row_for_id(&mut self, id: i64) -> Result { let notes_mode = self.get_config_bool(BoolKey::BrowserTableShowNotesMode); let columns = Arc::clone( self.state @@ -361,8 +360,8 @@ impl RowContext { }) } - fn browser_row(&self, columns: &[Column]) -> Result { - Ok(pb::search::BrowserRow { + fn browser_row(&self, columns: &[Column]) -> Result { + Ok(anki_proto::search::BrowserRow { cells: columns .iter() .map(|&column| self.get_cell(column)) @@ -373,8 +372,8 @@ impl RowContext { }) } - fn get_cell(&self, column: Column) -> Result { - Ok(pb::search::browser_row::Cell { + fn get_cell(&self, column: Column) -> Result { + Ok(anki_proto::search::browser_row::Cell { text: self.get_cell_text(column)?, is_rtl: self.get_is_rtl(column), }) @@ -551,8 +550,8 @@ impl RowContext { Ok(self.template()?.config.browser_font_size) } - fn get_row_color(&self) -> pb::search::browser_row::Color { - use pb::search::browser_row::Color; + fn get_row_color(&self) -> anki_proto::search::browser_row::Color { + use anki_proto::search::browser_row::Color; if self.notes_mode { if self.note.is_marked() { Color::Marked diff --git a/rslib/src/card_rendering/mod.rs b/rslib/src/card_rendering/mod.rs index e9a41df8d..350f76afa 100644 --- a/rslib/src/card_rendering/mod.rs +++ b/rslib/src/card_rendering/mod.rs @@ -3,7 +3,6 @@ use std::collections::HashMap; -use crate::pb; use crate::prelude::*; mod parser; @@ -20,7 +19,7 @@ pub fn extract_av_tags + AsRef>( txt: S, question_side: bool, tr: &I18n, -) -> (String, Vec) { +) -> (String, Vec) { nodes_or_text_only(txt.as_ref()) .map(|nodes| nodes.write_and_extract_av_tags(question_side, tr)) .unwrap_or_else(|| (txt.into(), vec![])) @@ -125,14 +124,14 @@ mod test { ( "foo [anki:play:q:0] baz [anki:play:q:1]", vec![ - pb::card_rendering::AvTag { - value: Some(pb::card_rendering::av_tag::Value::SoundOrVideo( + anki_proto::card_rendering::AvTag { + value: Some(anki_proto::card_rendering::av_tag::Value::SoundOrVideo( "bar.mp3".to_string() )) }, - pb::card_rendering::AvTag { - value: Some(pb::card_rendering::av_tag::Value::Tts( - pb::card_rendering::TtsTag { + anki_proto::card_rendering::AvTag { + value: Some(anki_proto::card_rendering::av_tag::Value::Tts( + anki_proto::card_rendering::TtsTag { field_text: tr.card_templates_blank().to_string(), lang: "en_US".to_string(), voices: vec![], diff --git a/rslib/src/card_rendering/tts/mod.rs b/rslib/src/card_rendering/tts/mod.rs index 19253c4b7..1477c61b6 100644 --- a/rslib/src/card_rendering/tts/mod.rs +++ b/rslib/src/card_rendering/tts/mod.rs @@ -1,7 +1,8 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::pb::card_rendering::all_tts_voices_response::TtsVoice; +use anki_proto::card_rendering::all_tts_voices_response::TtsVoice; + use crate::prelude::*; #[cfg(windows)] diff --git a/rslib/src/card_rendering/tts/other.rs b/rslib/src/card_rendering/tts/other.rs index d68ce5e36..611747659 100644 --- a/rslib/src/card_rendering/tts/other.rs +++ b/rslib/src/card_rendering/tts/other.rs @@ -1,7 +1,8 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::pb::card_rendering::all_tts_voices_response::TtsVoice; +use anki_proto::card_rendering::all_tts_voices_response::TtsVoice; + use crate::prelude::*; pub(super) fn all_voices(_validate: bool) -> Result> { diff --git a/rslib/src/card_rendering/tts/windows.rs b/rslib/src/card_rendering/tts/windows.rs index dc30b8921..95cd9026d 100644 --- a/rslib/src/card_rendering/tts/windows.rs +++ b/rslib/src/card_rendering/tts/windows.rs @@ -4,6 +4,7 @@ use std::fs::File; use std::io::Write; +use anki_proto::card_rendering::all_tts_voices_response::TtsVoice; use futures::executor::block_on; use windows::core::HSTRING; use windows::Media::SpeechSynthesis::SpeechSynthesisStream; @@ -13,7 +14,6 @@ use windows::Storage::Streams::DataReader; use crate::error::windows::WindowsErrorDetails; use crate::error::windows::WindowsSnafu; -use crate::pb::card_rendering::all_tts_voices_response::TtsVoice; use crate::prelude::*; const MAX_BUFFER_SIZE: usize = 128 * 1024; diff --git a/rslib/src/card_rendering/writer.rs b/rslib/src/card_rendering/writer.rs index 0695eb131..594d25fd5 100644 --- a/rslib/src/card_rendering/writer.rs +++ b/rslib/src/card_rendering/writer.rs @@ -8,7 +8,6 @@ use super::Directive; use super::Node; use super::OtherDirective; use super::TtsDirective; -use crate::pb; use crate::prelude::*; use crate::text::decode_entities; use crate::text::strip_html_for_tts; @@ -22,7 +21,7 @@ impl<'a> CardNodes<'a> { &self, question_side: bool, tr: &I18n, - ) -> (String, Vec) { + ) -> (String, Vec) { let mut extractor = AvExtractor::new(question_side, tr); (extractor.write(self), extractor.tags) } @@ -122,7 +121,7 @@ impl Write for AvStripper { struct AvExtractor<'a> { side: char, - tags: Vec, + tags: Vec, tr: &'a I18n, } @@ -150,8 +149,8 @@ impl<'a> AvExtractor<'a> { impl Write for AvExtractor<'_> { fn write_sound(&mut self, buf: &mut String, resource: &str) { self.write_play_tag(buf); - self.tags.push(pb::card_rendering::AvTag { - value: Some(pb::card_rendering::av_tag::Value::SoundOrVideo( + self.tags.push(anki_proto::card_rendering::AvTag { + value: Some(anki_proto::card_rendering::av_tag::Value::SoundOrVideo( decode_entities(resource).into(), )), }); @@ -164,9 +163,9 @@ impl Write for AvExtractor<'_> { } self.write_play_tag(buf); - self.tags.push(pb::card_rendering::AvTag { - value: Some(pb::card_rendering::av_tag::Value::Tts( - pb::card_rendering::TtsTag { + self.tags.push(anki_proto::card_rendering::AvTag { + value: Some(anki_proto::card_rendering::av_tag::Value::Tts( + anki_proto::card_rendering::TtsTag { field_text: self.transform_tts_content(directive), lang: directive.lang.into(), voices: directive.voices.iter().map(ToString::to_string).collect(), diff --git a/rslib/src/collection/backup.rs b/rslib/src/collection/backup.rs index f0b5e9897..fd061f301 100644 --- a/rslib/src/collection/backup.rs +++ b/rslib/src/collection/backup.rs @@ -11,13 +11,13 @@ use std::thread; use std::thread::JoinHandle; use std::time::SystemTime; +use anki_proto::config::preferences::BackupLimits; use chrono::prelude::*; use itertools::Itertools; use tracing::error; use crate::import_export::package::export_colpkg_from_data; use crate::io::read_locked_db_file; -use crate::pb::config::preferences::BackupLimits; use crate::prelude::*; const BACKUP_FORMAT_STRING: &str = "backup-%Y-%m-%d-%H.%M.%S.colpkg"; diff --git a/rslib/src/collection/mod.rs b/rslib/src/collection/mod.rs index 6f3e859ca..17d43f25c 100644 --- a/rslib/src/collection/mod.rs +++ b/rslib/src/collection/mod.rs @@ -12,11 +12,12 @@ use std::fmt::Formatter; use std::path::PathBuf; use std::sync::Arc; +use anki_i18n::I18n; + use crate::browser_table; use crate::decks::Deck; use crate::decks::DeckId; use crate::error::Result; -use crate::i18n::I18n; use crate::io::create_dir_all; use crate::notetype::Notetype; use crate::notetype::NotetypeId; diff --git a/rslib/src/config/bool.rs b/rslib/src/config/bool.rs index ac03e0457..82bf75fdc 100644 --- a/rslib/src/config/bool.rs +++ b/rslib/src/config/bool.rs @@ -1,8 +1,8 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use serde::Deserialize; use serde_aux::field_attributes::deserialize_bool_from_anything; -use serde_derive::Deserialize; use strum::IntoStaticStr; use crate::prelude::*; diff --git a/rslib/src/config/mod.rs b/rslib/src/config/mod.rs index 396bf7ad3..b7023560e 100644 --- a/rslib/src/config/mod.rs +++ b/rslib/src/config/mod.rs @@ -9,6 +9,7 @@ pub(crate) mod schema11; mod string; pub(crate) mod undo; +use anki_proto::config::preferences::BackupLimits; use serde::de::DeserializeOwned; use serde::Serialize; use serde_repr::Deserialize_repr; @@ -20,7 +21,6 @@ pub use self::deck::DeckConfigKey; pub use self::notetype::get_aux_notetype_config_key; pub use self::number::I32ConfigKey; pub use self::string::StringKey; -use crate::pb::config::preferences::BackupLimits; use crate::prelude::*; /// Only used when updating/undoing. diff --git a/rslib/src/dbcheck.rs b/rslib/src/dbcheck.rs index ad0cf81ce..550d3cde1 100644 --- a/rslib/src/dbcheck.rs +++ b/rslib/src/dbcheck.rs @@ -4,6 +4,7 @@ use std::collections::HashSet; use std::sync::Arc; +use anki_i18n::I18n; use itertools::Itertools; use tracing::debug; @@ -13,7 +14,6 @@ use crate::error::AnkiError; use crate::error::DbError; use crate::error::DbErrorKind; use crate::error::Result; -use crate::i18n::I18n; use crate::notetype::all_stock_notetypes; use crate::notetype::AlreadyGeneratedCardInfo; use crate::notetype::CardGenContext; diff --git a/rslib/src/deckconfig/mod.rs b/rslib/src/deckconfig/mod.rs index ad7ac0605..785f8085b 100644 --- a/rslib/src/deckconfig/mod.rs +++ b/rslib/src/deckconfig/mod.rs @@ -5,18 +5,17 @@ mod schema11; pub(crate) mod undo; mod update; +pub use anki_proto::deckconfig::deck_config::config::LeechAction; +pub use anki_proto::deckconfig::deck_config::config::NewCardGatherPriority; +pub use anki_proto::deckconfig::deck_config::config::NewCardInsertOrder; +pub use anki_proto::deckconfig::deck_config::config::NewCardSortOrder; +pub use anki_proto::deckconfig::deck_config::config::ReviewCardOrder; +pub use anki_proto::deckconfig::deck_config::config::ReviewMix; +pub use anki_proto::deckconfig::deck_config::Config as DeckConfigInner; pub use schema11::DeckConfSchema11; pub use schema11::NewCardOrderSchema11; pub use update::UpdateDeckConfigsRequest; -pub use crate::pb::deckconfig::deck_config::config::LeechAction; -pub use crate::pb::deckconfig::deck_config::config::NewCardGatherPriority; -pub use crate::pb::deckconfig::deck_config::config::NewCardInsertOrder; -pub use crate::pb::deckconfig::deck_config::config::NewCardSortOrder; -pub use crate::pb::deckconfig::deck_config::config::ReviewCardOrder; -pub use crate::pb::deckconfig::deck_config::config::ReviewMix; -pub use crate::pb::deckconfig::deck_config::Config as DeckConfigInner; - /// Old deck config and cards table store 250% as 2500. pub(crate) const INITIAL_EASE_FACTOR_THOUSANDS: u16 = (INITIAL_EASE_FACTOR * 1000.0) as u16; @@ -186,67 +185,85 @@ impl Collection { } } -impl DeckConfigInner { - /// There was a period of time when the deck options screen was allowing - /// 0/NaN to be persisted, so we need to check the values are within - /// valid bounds when reading from the DB. - pub(crate) fn ensure_values_valid(&mut self) { - let default = DEFAULT_DECK_CONFIG_INNER; - ensure_u32_valid(&mut self.new_per_day, default.new_per_day, 0, 9999); - ensure_u32_valid(&mut self.reviews_per_day, default.reviews_per_day, 0, 9999); - ensure_u32_valid( - &mut self.new_per_day_minimum, - default.new_per_day_minimum, - 0, - 9999, - ); - ensure_f32_valid(&mut self.initial_ease, default.initial_ease, 1.31, 5.0); - ensure_f32_valid(&mut self.easy_multiplier, default.easy_multiplier, 1.0, 5.0); - ensure_f32_valid(&mut self.hard_multiplier, default.hard_multiplier, 0.5, 1.3); - ensure_f32_valid( - &mut self.lapse_multiplier, - default.lapse_multiplier, - 0.0, - 1.0, - ); - ensure_f32_valid( - &mut self.interval_multiplier, - default.interval_multiplier, - 0.5, - 2.0, - ); - ensure_u32_valid( - &mut self.maximum_review_interval, - default.maximum_review_interval, - 1, - 36_500, - ); - ensure_u32_valid( - &mut self.minimum_lapse_interval, - default.minimum_lapse_interval, - 1, - 36_500, - ); - ensure_u32_valid( - &mut self.graduating_interval_good, - default.graduating_interval_good, - 1, - 36_500, - ); - ensure_u32_valid( - &mut self.graduating_interval_easy, - default.graduating_interval_easy, - 1, - 36_500, - ); - ensure_u32_valid(&mut self.leech_threshold, default.leech_threshold, 1, 9999); - ensure_u32_valid( - &mut self.cap_answer_time_to_secs, - default.cap_answer_time_to_secs, - 1, - 9999, - ); - } +/// There was a period of time when the deck options screen was allowing +/// 0/NaN to be persisted, so we need to check the values are within +/// valid bounds when reading from the DB. +pub(crate) fn ensure_deck_config_values_valid(config: &mut DeckConfigInner) { + let default = DEFAULT_DECK_CONFIG_INNER; + ensure_u32_valid(&mut config.new_per_day, default.new_per_day, 0, 9999); + ensure_u32_valid( + &mut config.reviews_per_day, + default.reviews_per_day, + 0, + 9999, + ); + ensure_u32_valid( + &mut config.new_per_day_minimum, + default.new_per_day_minimum, + 0, + 9999, + ); + ensure_f32_valid(&mut config.initial_ease, default.initial_ease, 1.31, 5.0); + ensure_f32_valid( + &mut config.easy_multiplier, + default.easy_multiplier, + 1.0, + 5.0, + ); + ensure_f32_valid( + &mut config.hard_multiplier, + default.hard_multiplier, + 0.5, + 1.3, + ); + ensure_f32_valid( + &mut config.lapse_multiplier, + default.lapse_multiplier, + 0.0, + 1.0, + ); + ensure_f32_valid( + &mut config.interval_multiplier, + default.interval_multiplier, + 0.5, + 2.0, + ); + ensure_u32_valid( + &mut config.maximum_review_interval, + default.maximum_review_interval, + 1, + 36_500, + ); + ensure_u32_valid( + &mut config.minimum_lapse_interval, + default.minimum_lapse_interval, + 1, + 36_500, + ); + ensure_u32_valid( + &mut config.graduating_interval_good, + default.graduating_interval_good, + 1, + 36_500, + ); + ensure_u32_valid( + &mut config.graduating_interval_easy, + default.graduating_interval_easy, + 1, + 36_500, + ); + ensure_u32_valid( + &mut config.leech_threshold, + default.leech_threshold, + 1, + 9999, + ); + ensure_u32_valid( + &mut config.cap_answer_time_to_secs, + default.cap_answer_time_to_secs, + 1, + 9999, + ); } fn ensure_f32_valid(val: &mut f32, default: f32, min: f32, max: f32) { diff --git a/rslib/src/deckconfig/schema11.rs b/rslib/src/deckconfig/schema11.rs index 1d713d1f9..856fbbe81 100644 --- a/rslib/src/deckconfig/schema11.rs +++ b/rslib/src/deckconfig/schema11.rs @@ -4,10 +4,10 @@ use std::collections::HashMap; use serde::Deserialize as DeTrait; +use serde::Deserialize; use serde::Deserializer; +use serde::Serialize; use serde_aux::field_attributes::deserialize_number_from_string; -use serde_derive::Deserialize; -use serde_derive::Serialize; use serde_json::Value; use serde_repr::Deserialize_repr; use serde_repr::Serialize_repr; diff --git a/rslib/src/deckconfig/update.rs b/rslib/src/deckconfig/update.rs index ef79d7450..dbd5b92e7 100644 --- a/rslib/src/deckconfig/update.rs +++ b/rslib/src/deckconfig/update.rs @@ -7,13 +7,13 @@ use std::collections::HashMap; use std::collections::HashSet; use std::iter; +use anki_proto::deckconfig::deck_configs_for_update::current_deck::Limits; +use anki_proto::deckconfig::deck_configs_for_update::ConfigWithExtra; +use anki_proto::deckconfig::deck_configs_for_update::CurrentDeck; +use anki_proto::decks::deck::normal::DayLimit; + use crate::config::StringKey; use crate::decks::NormalDeck; -use crate::pb; -use crate::pb::deckconfig::deck_configs_for_update::current_deck::Limits; -use crate::pb::deckconfig::deck_configs_for_update::ConfigWithExtra; -use crate::pb::deckconfig::deck_configs_for_update::CurrentDeck; -use crate::pb::decks::deck::normal::DayLimit; use crate::prelude::*; use crate::search::JoinSearches; use crate::search::SearchNode; @@ -35,8 +35,8 @@ impl Collection { pub fn get_deck_configs_for_update( &mut self, deck: DeckId, - ) -> Result { - Ok(pb::deckconfig::DeckConfigsForUpdate { + ) -> Result { + Ok(anki_proto::deckconfig::DeckConfigsForUpdate { all_config: self.get_deck_config_with_extra_for_update()?, current_deck: Some(self.get_current_deck_for_update(deck)?), defaults: Some(DeckConfig::default().into()), @@ -99,7 +99,7 @@ impl Collection { .into_iter() .map(Into::into) .collect(), - limits: Some(normal.to_limits(today)), + limits: Some(normal_deck_to_limits(normal, today)), }) } @@ -172,7 +172,7 @@ impl Collection { { let mut updated = deck.clone(); updated.normal_mut()?.config_id = selected_config.id.0; - updated.normal_mut()?.update_limits(&input.limits, today); + update_deck_limits(updated.normal_mut()?, &input.limits, today); self.update_deck_inner(&mut updated, deck, usn)?; selected_config.id } else { @@ -236,30 +236,28 @@ impl Collection { } } -impl NormalDeck { - fn to_limits(&self, today: u32) -> Limits { - Limits { - review: self.review_limit, - new: self.new_limit, - review_today: self.review_limit_today.map(|limit| limit.limit), - new_today: self.new_limit_today.map(|limit| limit.limit), - review_today_active: self - .review_limit_today - .map(|limit| limit.today == today) - .unwrap_or_default(), - new_today_active: self - .new_limit_today - .map(|limit| limit.today == today) - .unwrap_or_default(), - } +fn normal_deck_to_limits(deck: &NormalDeck, today: u32) -> Limits { + Limits { + review: deck.review_limit, + new: deck.new_limit, + review_today: deck.review_limit_today.map(|limit| limit.limit), + new_today: deck.new_limit_today.map(|limit| limit.limit), + review_today_active: deck + .review_limit_today + .map(|limit| limit.today == today) + .unwrap_or_default(), + new_today_active: deck + .new_limit_today + .map(|limit| limit.today == today) + .unwrap_or_default(), } +} - fn update_limits(&mut self, limits: &Limits, today: u32) { - self.review_limit = limits.review; - self.new_limit = limits.new; - update_day_limit(&mut self.review_limit_today, limits.review_today, today); - update_day_limit(&mut self.new_limit_today, limits.new_today, today); - } +fn update_deck_limits(deck: &mut NormalDeck, limits: &Limits, today: u32) { + deck.review_limit = limits.review; + deck.new_limit = limits.new; + update_day_limit(&mut deck.review_limit_today, limits.review_today, today); + update_day_limit(&mut deck.new_limit_today, limits.new_today, today); } fn update_day_limit(day_limit: &mut Option, new_limit: Option, today: u32) { diff --git a/rslib/src/decks/counts.rs b/rslib/src/decks/counts.rs index 2f2f2997f..8cd55c115 100644 --- a/rslib/src/decks/counts.rs +++ b/rslib/src/decks/counts.rs @@ -2,7 +2,6 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use std::collections::HashMap; -use crate::pb; use crate::prelude::*; #[derive(Debug)] @@ -43,11 +42,11 @@ impl Collection { pub(crate) fn counts_for_deck_today( &mut self, did: DeckId, - ) -> Result { + ) -> Result { let today = self.current_due_day(0)?; let mut deck = self.storage.get_deck(did)?.or_not_found(did)?; deck.reset_stats_if_day_changed(today); - Ok(pb::scheduler::CountsForDeckTodayResponse { + Ok(anki_proto::scheduler::CountsForDeckTodayResponse { new: deck.common.new_studied, review: deck.common.review_studied, }) diff --git a/rslib/src/decks/filtered.rs b/rslib/src/decks/filtered.rs index e0d083afc..c9dfad09c 100644 --- a/rslib/src/decks/filtered.rs +++ b/rslib/src/decks/filtered.rs @@ -43,23 +43,23 @@ impl Deck { } } -impl FilteredSearchOrder { - pub fn labels(tr: &I18n) -> Vec { - FilteredSearchOrder::iter().map(|v| v.label(tr)).collect() - } - - fn label(self, tr: &I18n) -> String { - match self { - FilteredSearchOrder::OldestReviewedFirst => tr.decks_oldest_seen_first(), - FilteredSearchOrder::Random => tr.decks_random(), - FilteredSearchOrder::IntervalsAscending => tr.decks_increasing_intervals(), - FilteredSearchOrder::IntervalsDescending => tr.decks_decreasing_intervals(), - FilteredSearchOrder::Lapses => tr.decks_most_lapses(), - FilteredSearchOrder::Added => tr.decks_order_added(), - FilteredSearchOrder::Due => tr.decks_order_due(), - FilteredSearchOrder::ReverseAdded => tr.decks_latest_added_first(), - FilteredSearchOrder::DuePriority => tr.decks_relative_overdueness(), - } - .into() - } +pub fn search_order_labels(tr: &I18n) -> Vec { + FilteredSearchOrder::iter() + .map(|v| search_order_label(v, tr)) + .collect() +} + +fn search_order_label(order: FilteredSearchOrder, tr: &I18n) -> String { + match order { + FilteredSearchOrder::OldestReviewedFirst => tr.decks_oldest_seen_first(), + FilteredSearchOrder::Random => tr.decks_random(), + FilteredSearchOrder::IntervalsAscending => tr.decks_increasing_intervals(), + FilteredSearchOrder::IntervalsDescending => tr.decks_decreasing_intervals(), + FilteredSearchOrder::Lapses => tr.decks_most_lapses(), + FilteredSearchOrder::Added => tr.decks_order_added(), + FilteredSearchOrder::Due => tr.decks_order_due(), + FilteredSearchOrder::ReverseAdded => tr.decks_latest_added_first(), + FilteredSearchOrder::DuePriority => tr.decks_relative_overdueness(), + } + .into() } diff --git a/rslib/src/decks/limits.rs b/rslib/src/decks/limits.rs index 7f6a93d50..ec39a9c62 100644 --- a/rslib/src/decks/limits.rs +++ b/rslib/src/decks/limits.rs @@ -4,6 +4,7 @@ use std::collections::HashMap; use std::iter::Peekable; +use anki_proto::decks::deck::normal::DayLimit; use id_tree::InsertBehavior; use id_tree::Node; use id_tree::NodeId; @@ -13,7 +14,6 @@ use super::Deck; use super::NormalDeck; use crate::deckconfig::DeckConfig; use crate::deckconfig::DeckConfigId; -use crate::pb::decks::deck::normal::DayLimit; use crate::prelude::*; #[derive(Debug, Clone, Copy)] @@ -22,36 +22,32 @@ pub(crate) enum LimitKind { New, } -impl NormalDeck { - /// The deck's review limit for today, or its regular one, if any is - /// configured. - pub fn current_review_limit(&self, today: u32) -> Option { - self.review_limit_today(today).or(self.review_limit) - } - - /// The deck's new limit for today, or its regular one, if any is - /// configured. - pub fn current_new_limit(&self, today: u32) -> Option { - self.new_limit_today(today).or(self.new_limit) - } - - /// The deck's review limit for today. - pub fn review_limit_today(&self, today: u32) -> Option { - self.review_limit_today - .and_then(|day_limit| day_limit.limit(today)) - } - - /// The deck's new limit for today. - pub fn new_limit_today(&self, today: u32) -> Option { - self.new_limit_today - .and_then(|day_limit| day_limit.limit(today)) - } +/// The deck's review limit for today, or its regular one, if any is +/// configured. +pub fn current_review_limit(deck: &NormalDeck, today: u32) -> Option { + review_limit_today(deck, today).or(deck.review_limit) } -impl DayLimit { - pub fn limit(&self, today: u32) -> Option { - (self.today == today).then_some(self.limit) - } +/// The deck's new limit for today, or its regular one, if any is +/// configured. +pub fn current_new_limit(deck: &NormalDeck, today: u32) -> Option { + new_limit_today(deck, today).or(deck.new_limit) +} + +/// The deck's review limit for today. +pub fn review_limit_today(deck: &NormalDeck, today: u32) -> Option { + deck.review_limit_today + .and_then(|day_limit| limit_if_today(day_limit, today)) +} + +/// The deck's new limit for today. +pub fn new_limit_today(deck: &NormalDeck, today: u32) -> Option { + deck.new_limit_today + .and_then(|day_limit| limit_if_today(day_limit, today)) +} + +pub fn limit_if_today(limit: DayLimit, today: u32) -> Option { + (limit.today == today).then_some(limit.limit) } #[derive(Clone, Copy, Debug, PartialEq, Eq)] @@ -118,12 +114,10 @@ impl RemainingLimits { normal: &NormalDeck, config: &DeckConfig, ) -> RemainingLimits { - let mut review_limit = normal - .current_review_limit(today) - .unwrap_or(config.inner.reviews_per_day) as i32; - let mut new_limit = normal - .current_new_limit(today) - .unwrap_or(config.inner.new_per_day) as i32; + let mut review_limit = + current_review_limit(normal, today).unwrap_or(config.inner.reviews_per_day) as i32; + let mut new_limit = + current_new_limit(normal, today).unwrap_or(config.inner.new_per_day) as i32; let (new_today_count, review_today_count) = deck.new_rev_counts(today); review_limit -= review_today_count; diff --git a/rslib/src/decks/mod.rs b/rslib/src/decks/mod.rs index 06bb2f07a..25713092c 100644 --- a/rslib/src/decks/mod.rs +++ b/rslib/src/decks/mod.rs @@ -4,18 +4,26 @@ mod addupdate; mod counts; mod current; -mod filtered; +pub mod filtered; pub(crate) mod limits; mod name; mod remove; mod reparent; mod schema11; mod stats; -mod tree; +pub mod tree; pub(crate) mod undo; use std::sync::Arc; +pub use anki_proto::decks::deck::filtered::search_term::Order as FilteredSearchOrder; +pub use anki_proto::decks::deck::filtered::SearchTerm as FilteredSearchTerm; +pub use anki_proto::decks::deck::kind_container::Kind as DeckKind; +pub use anki_proto::decks::deck::Common as DeckCommon; +pub use anki_proto::decks::deck::Filtered as FilteredDeck; +pub use anki_proto::decks::deck::KindContainer as DeckKindContainer; +pub use anki_proto::decks::deck::Normal as NormalDeck; +pub use anki_proto::decks::Deck as DeckProto; pub(crate) use counts::DueCounts; pub(crate) use name::immediate_parent_name; pub use name::NativeDeckName; @@ -24,14 +32,6 @@ pub use schema11::DeckSchema11; use crate::define_newtype; use crate::error::FilteredDeckError; use crate::markdown::render_markdown; -pub use crate::pb::decks::deck::filtered::search_term::Order as FilteredSearchOrder; -pub use crate::pb::decks::deck::filtered::SearchTerm as FilteredSearchTerm; -pub use crate::pb::decks::deck::kind_container::Kind as DeckKind; -pub use crate::pb::decks::deck::Common as DeckCommon; -pub use crate::pb::decks::deck::Filtered as FilteredDeck; -pub use crate::pb::decks::deck::KindContainer as DeckKindContainer; -pub use crate::pb::decks::deck::Normal as NormalDeck; -pub use crate::pb::decks::Deck as DeckProto; use crate::prelude::*; use crate::text::sanitize_html_no_images; diff --git a/rslib/src/decks/schema11.rs b/rslib/src/decks/schema11.rs index e1b87276a..49fc07dd0 100644 --- a/rslib/src/decks/schema11.rs +++ b/rslib/src/decks/schema11.rs @@ -3,8 +3,9 @@ use std::collections::HashMap; -use serde_derive::Deserialize; -use serde_derive::Serialize; +use anki_proto::decks::deck::normal::DayLimit; +use serde::Deserialize; +use serde::Serialize; use serde_json::Value; use serde_tuple::Serialize_tuple; @@ -12,7 +13,6 @@ use super::DeckCommon; use super::FilteredDeck; use super::FilteredSearchTerm; use super::NormalDeck; -use crate::pb::decks::deck::normal::DayLimit; use crate::prelude::*; use crate::serde::default_on_invalid; use crate::serde::deserialize_bool_from_anything; diff --git a/rslib/src/decks/stats.rs b/rslib/src/decks/stats.rs index 502388e7a..ac174b827 100644 --- a/rslib/src/decks/stats.rs +++ b/rslib/src/decks/stats.rs @@ -1,7 +1,6 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use super::DeckCommon; -use crate::pb; use crate::prelude::*; impl Deck { @@ -24,7 +23,7 @@ impl Collection { &mut self, today: u32, usn: Usn, - input: pb::scheduler::UpdateStatsRequest, + input: anki_proto::scheduler::UpdateStatsRequest, ) -> Result<()> { let did = input.deck_id.into(); let mutator = |c: &mut DeckCommon| { diff --git a/rslib/src/decks/tree.rs b/rslib/src/decks/tree.rs index d73f18df7..54d59fa3b 100644 --- a/rslib/src/decks/tree.rs +++ b/rslib/src/decks/tree.rs @@ -6,6 +6,8 @@ use std::collections::HashSet; use std::iter::Peekable; use std::ops::AddAssign; +pub use anki_proto::decks::set_deck_collapsed_request::Scope as DeckCollapseScope; +use anki_proto::decks::DeckTreeNode; use serde_tuple::Serialize_tuple; use unicase::UniCase; @@ -14,8 +16,6 @@ use super::limits::RemainingLimits; use super::DueCounts; use crate::config::SchedulerVersion; use crate::ops::OpOutput; -pub use crate::pb::decks::set_deck_collapsed_request::Scope as DeckCollapseScope; -use crate::pb::decks::DeckTreeNode; use crate::prelude::*; use crate::undo::Op; @@ -250,28 +250,29 @@ fn hide_default_deck(node: &mut DeckTreeNode) { } } -impl DeckTreeNode { - /// Locate provided deck in tree, and return it. - pub fn get_deck(self, deck_id: DeckId) -> Option { - if self.deck_id == deck_id.0 { - return Some(self); +/// Locate provided deck in tree, and return it. +pub fn get_deck_in_tree(tree: DeckTreeNode, deck_id: DeckId) -> Option { + if tree.deck_id == deck_id.0 { + return Some(tree); + } + for child in tree.children { + if let Some(node) = get_deck_in_tree(child, deck_id) { + return Some(node); } - for child in self.children { - if let Some(node) = child.get_deck(deck_id) { - return Some(node); - } - } - - None } - pub(crate) fn sum(&self, map: fn(&DeckTreeNode) -> T) -> T { - let mut output = map(self); - for child in &self.children { - output += child.sum(map); - } - output + None +} + +pub(crate) fn sum_deck_tree_node( + node: &DeckTreeNode, + map: fn(&DeckTreeNode) -> T, +) -> T { + let mut output = map(node); + for child in &node.children { + output += sum_deck_tree_node(child, map) } + output } #[derive(Serialize_tuple)] @@ -355,7 +356,7 @@ impl Collection { pub fn current_deck_tree(&mut self) -> Result> { let target = self.get_current_deck_id(); let tree = self.deck_tree(Some(TimestampSecs::now()))?; - Ok(tree.get_deck(target)) + Ok(get_deck_in_tree(tree, target)) } pub fn set_deck_collapsed( diff --git a/rslib/src/error/mod.rs b/rslib/src/error/mod.rs index 519fa4241..237ba13da 100644 --- a/rslib/src/error/mod.rs +++ b/rslib/src/error/mod.rs @@ -11,6 +11,8 @@ mod search; #[cfg(windows)] pub mod windows; +use anki_i18n::I18n; +use anki_proto::ProtoError; pub use db::DbError; pub use db::DbErrorKind; pub use filtered::CustomStudyError; @@ -30,7 +32,6 @@ pub use self::invalid_input::InvalidInputError; pub use self::invalid_input::OrInvalid; pub use self::not_found::NotFoundError; pub use self::not_found::OrNotFound; -use crate::i18n::I18n; use crate::import_export::ImportError; use crate::links::HelpPage; @@ -300,3 +301,11 @@ pub enum CardTypeErrorDetails { MissingCloze, ExtraneousCloze, } + +impl From for AnkiError { + fn from(value: ProtoError) -> Self { + AnkiError::ProtoError { + info: value.to_string(), + } + } +} diff --git a/rslib/src/i18n.rs b/rslib/src/i18n.rs deleted file mode 100644 index 452e91d12..000000000 --- a/rslib/src/i18n.rs +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -pub use anki_i18n::I18n; diff --git a/rslib/src/image_occlusion/imagedata.rs b/rslib/src/image_occlusion/imagedata.rs index bf85448c6..0dfd78303 100644 --- a/rslib/src/image_occlusion/imagedata.rs +++ b/rslib/src/image_occlusion/imagedata.rs @@ -4,16 +4,16 @@ use std::path::Path; use std::path::PathBuf; +use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageClozeNote; +use anki_proto::image_occlusion::get_image_occlusion_note_response::Value; +use anki_proto::image_occlusion::GetImageForOcclusionResponse; +use anki_proto::image_occlusion::GetImageOcclusionNoteResponse; use regex::Regex; use crate::io::metadata; use crate::io::read_file; use crate::media::MediaManager; use crate::notetype::CardGenContext; -use crate::pb::image_occlusion::get_image_occlusion_note_response::ImageClozeNote; -use crate::pb::image_occlusion::get_image_occlusion_note_response::Value; -use crate::pb::image_occlusion::GetImageForOcclusionResponse; -use crate::pb::image_occlusion::GetImageOcclusionNoteResponse; use crate::prelude::*; impl Collection { diff --git a/rslib/src/image_occlusion/notetype.rs b/rslib/src/image_occlusion/notetype.rs index a192e0083..988afcb64 100644 --- a/rslib/src/image_occlusion/notetype.rs +++ b/rslib/src/image_occlusion/notetype.rs @@ -3,10 +3,11 @@ use std::sync::Arc; +use anki_proto::notetypes::stock_notetype::OriginalStockKind; + use crate::notetype::stock::empty_stock; use crate::notetype::Notetype; use crate::notetype::NotetypeKind; -use crate::pb::notetypes::stock_notetype::OriginalStockKind; use crate::prelude::*; impl Collection { diff --git a/rslib/src/import_export/mod.rs b/rslib/src/import_export/mod.rs index 94e3c4768..8f86e070c 100644 --- a/rslib/src/import_export/mod.rs +++ b/rslib/src/import_export/mod.rs @@ -8,10 +8,10 @@ pub mod text; use std::marker::PhantomData; +pub use anki_proto::import_export::import_response::Log as NoteLog; +pub use anki_proto::import_export::import_response::Note as LogNote; use snafu::Snafu; -pub use crate::pb::import_export::import_response::Log as NoteLog; -pub use crate::pb::import_export::import_response::Note as LogNote; use crate::prelude::*; use crate::text::newlines_to_spaces; use crate::text::strip_html_preserving_media_filenames; @@ -114,7 +114,7 @@ impl<'f, F: 'f + FnMut(usize) -> Result<()>> Incrementor<'f, F> { impl Note { pub(crate) fn into_log_note(self) -> LogNote { LogNote { - id: Some(self.id.into()), + id: Some(anki_proto::notes::NoteId { nid: self.id.0 }), fields: self .into_fields() .into_iter() diff --git a/rslib/src/import_export/package/apkg/export.rs b/rslib/src/import_export/package/apkg/export.rs index e20d5d762..114fe242d 100644 --- a/rslib/src/import_export/package/apkg/export.rs +++ b/rslib/src/import_export/package/apkg/export.rs @@ -5,6 +5,7 @@ use std::collections::HashSet; use std::path::Path; use std::path::PathBuf; +use super::super::meta::MetaExt; use crate::collection::CollectionBuilder; use crate::import_export::gather::ExchangeData; use crate::import_export::package::colpkg::export::export_collection; diff --git a/rslib/src/import_export/package/apkg/import/decks.rs b/rslib/src/import_export/package/apkg/import/decks.rs index 19b129954..e0ee3b439 100644 --- a/rslib/src/import_export/package/apkg/import/decks.rs +++ b/rslib/src/import_export/package/apkg/import/decks.rs @@ -141,7 +141,7 @@ impl DeckContext<'_> { fn update_deck(&mut self, deck: &Deck, original: Deck) -> Result<()> { let mut new_deck = original.clone(); if let (Ok(new), Ok(old)) = (new_deck.normal_mut(), deck.normal()) { - new.update_with_other(old); + update_normal_with_other(new, old); } else if let (Ok(new), Ok(old)) = (new_deck.filtered_mut(), deck.filtered()) { *new = old.clone(); } else { @@ -188,20 +188,18 @@ impl Deck { } } -impl NormalDeck { - fn update_with_other(&mut self, other: &Self) { - if !other.description.is_empty() { - self.markdown_description = other.markdown_description; - self.description = other.description.clone(); - } - if other.config_id != 1 { - self.config_id = other.config_id; - } - self.review_limit = other.review_limit.or(self.review_limit); - self.new_limit = other.new_limit.or(self.new_limit); - self.review_limit_today = other.review_limit_today.or(self.review_limit_today); - self.new_limit_today = other.new_limit_today.or(self.new_limit_today); +fn update_normal_with_other(normal: &mut NormalDeck, other: &NormalDeck) { + if !other.description.is_empty() { + normal.markdown_description = other.markdown_description; + normal.description = other.description.clone(); } + if other.config_id != 1 { + normal.config_id = other.config_id; + } + normal.review_limit = other.review_limit.or(normal.review_limit); + normal.new_limit = other.new_limit.or(normal.new_limit); + normal.review_limit_today = other.review_limit_today.or(normal.review_limit_today); + normal.new_limit_today = other.new_limit_today.or(normal.new_limit_today); } #[cfg(test)] diff --git a/rslib/src/import_export/package/apkg/import/media.rs b/rslib/src/import_export/package/apkg/import/media.rs index c1d0bfad3..04221b30f 100644 --- a/rslib/src/import_export/package/apkg/import/media.rs +++ b/rslib/src/import_export/package/apkg/import/media.rs @@ -7,6 +7,7 @@ use std::mem; use zip::ZipArchive; +use super::super::super::meta::MetaExt; use super::Context; use crate::error::FileIoSnafu; use crate::error::FileOp; diff --git a/rslib/src/import_export/package/apkg/import/mod.rs b/rslib/src/import_export/package/apkg/import/mod.rs index 18ca39d82..cebee9b69 100644 --- a/rslib/src/import_export/package/apkg/import/mod.rs +++ b/rslib/src/import_export/package/apkg/import/mod.rs @@ -15,6 +15,7 @@ use rusqlite::OptionalExtension; use tempfile::NamedTempFile; use zip::ZipArchive; +use super::super::meta::MetaExt; use crate::collection::CollectionBuilder; use crate::error::FileIoSnafu; use crate::error::FileOp; diff --git a/rslib/src/import_export/package/colpkg/export.rs b/rslib/src/import_export/package/colpkg/export.rs index be23499f0..a504c03a3 100644 --- a/rslib/src/import_export/package/colpkg/export.rs +++ b/rslib/src/import_export/package/colpkg/export.rs @@ -18,11 +18,14 @@ use zstd::stream::raw::Encoder as RawEncoder; use zstd::stream::zio; use zstd::Encoder; +use super::super::meta::MetaExt; +use super::super::meta::VersionExt; use super::super::MediaEntries; use super::super::MediaEntry; use super::super::Meta; use super::super::Version; use crate::collection::CollectionBuilder; +use crate::import_export::package::media::new_media_entry; use crate::import_export::package::media::MediaCopier; use crate::import_export::package::media::MediaIter; use crate::import_export::ExportProgress; @@ -269,7 +272,7 @@ fn write_media_files( zip.start_file(index.to_string(), file_options_stored())?; let (size, sha1) = copier.copy(&mut entry.data, zip)?; - media_entries.push(MediaEntry::new(entry.nfc_filename, size, sha1)); + media_entries.push(new_media_entry(entry.nfc_filename, size, sha1)); } Ok(()) diff --git a/rslib/src/import_export/package/colpkg/import.rs b/rslib/src/import_export/package/colpkg/import.rs index 75b25c224..c306de87b 100644 --- a/rslib/src/import_export/package/colpkg/import.rs +++ b/rslib/src/import_export/package/colpkg/import.rs @@ -11,6 +11,7 @@ use zip::read::ZipFile; use zip::ZipArchive; use zstd::stream::copy_decode; +use super::super::meta::MetaExt; use crate::collection::CollectionBuilder; use crate::error::FileIoSnafu; use crate::error::FileOp; diff --git a/rslib/src/import_export/package/media.rs b/rslib/src/import_export/package/media.rs index 1f403165b..2df102ec3 100644 --- a/rslib/src/import_export/package/media.rs +++ b/rslib/src/import_export/package/media.rs @@ -20,6 +20,7 @@ use zip::ZipArchive; use zstd::stream::copy_decode; use zstd::stream::raw::Encoder as RawEncoder; +use super::meta::MetaExt; use super::MediaEntries; use super::MediaEntry; use super::Meta; @@ -44,18 +45,16 @@ pub(super) struct SafeMediaEntry { pub(super) index: usize, } -impl MediaEntry { - pub(super) fn new( - name: impl Into, - size: impl TryInto, - sha1: impl Into>, - ) -> Self { - MediaEntry { - name: name.into(), - size: size.try_into().unwrap_or_default(), - sha1: sha1.into(), - legacy_zip_filename: None, - } +pub(super) fn new_media_entry( + name: impl Into, + size: impl TryInto, + sha1: impl Into>, +) -> MediaEntry { + MediaEntry { + name: name.into(), + size: size.try_into().unwrap_or_default(), + sha1: sha1.into(), + legacy_zip_filename: None, } } @@ -146,7 +145,7 @@ pub(super) fn extract_media_entries( let map: HashMap<&str, String> = serde_json::from_slice(&media_list_data)?; map.into_iter().map(SafeMediaEntry::from_legacy).collect() } else { - MediaEntries::decode_safe_entries(&media_list_data) + decode_safe_entries(&media_list_data) } } @@ -171,16 +170,14 @@ fn get_media_list_data(archive: &mut ZipArchive, meta: &Meta) -> Result Result> { - let entries: Self = Message::decode(buf)?; - entries - .entries - .into_iter() - .enumerate() - .map(SafeMediaEntry::from_entry) - .collect() - } +pub(super) fn decode_safe_entries(buf: &[u8]) -> Result> { + let entries: MediaEntries = Message::decode(buf)?; + entries + .entries + .into_iter() + .enumerate() + .map(SafeMediaEntry::from_entry) + .collect() } pub struct MediaIterEntry { @@ -353,10 +350,10 @@ mod test { // new-style entries should have been normalized on export let mut entries = Vec::new(); MediaEntries { - entries: vec![MediaEntry::new("con", 0, Vec::new())], + entries: vec![new_media_entry("con", 0, Vec::new())], } .encode(&mut entries) .unwrap(); - assert!(MediaEntries::decode_safe_entries(&entries).is_err()); + assert!(decode_safe_entries(&entries).is_err()); } } diff --git a/rslib/src/import_export/package/meta.rs b/rslib/src/import_export/package/meta.rs index 7d0ba36bd..1b8797471 100644 --- a/rslib/src/import_export/package/meta.rs +++ b/rslib/src/import_export/package/meta.rs @@ -5,18 +5,23 @@ use std::fs::File; use std::io; use std::io::Read; +pub(super) use anki_proto::import_export::package_metadata::Version; +pub(super) use anki_proto::import_export::PackageMetadata as Meta; use prost::Message; use zip::ZipArchive; use zstd::stream::copy_decode; use crate::import_export::ImportError; -pub(super) use crate::pb::import_export::package_metadata::Version; -pub(super) use crate::pb::import_export::PackageMetadata as Meta; use crate::prelude::*; use crate::storage::SchemaVersion; -impl Version { - pub(super) fn collection_filename(&self) -> &'static str { +pub(super) trait VersionExt { + fn collection_filename(&self) -> &'static str; + fn schema_version(&self) -> SchemaVersion; +} + +impl VersionExt for Version { + fn collection_filename(&self) -> &'static str { match self { Version::Unknown => unreachable!(), Version::Legacy1 => "collection.anki2", @@ -27,7 +32,7 @@ impl Version { /// Latest schema version that is supported by all clients supporting /// this package version. - pub(super) fn schema_version(&self) -> SchemaVersion { + fn schema_version(&self) -> SchemaVersion { match self { Version::Unknown => unreachable!(), Version::Legacy1 | Version::Legacy2 => SchemaVersion::V11, @@ -36,14 +41,26 @@ impl Version { } } -impl Meta { - pub(super) fn new() -> Self { +pub(in crate::import_export) trait MetaExt: Sized { + fn new() -> Self; + fn new_legacy() -> Self; + fn from_archive(archive: &mut ZipArchive) -> Result; + fn collection_filename(&self) -> &'static str; + fn schema_version(&self) -> SchemaVersion; + fn zstd_compressed(&self) -> bool; + fn media_list_is_hashmap(&self) -> bool; + fn is_legacy(&self) -> bool; + fn copy(&self, reader: &mut impl Read, writer: &mut impl io::Write) -> io::Result<()>; +} + +impl MetaExt for Meta { + fn new() -> Self { Self { version: Version::Latest as i32, } } - pub(super) fn new_legacy() -> Self { + fn new_legacy() -> Self { Self { version: Version::Legacy2 as i32, } @@ -51,7 +68,7 @@ impl Meta { /// Extracts meta data from an archive and checks if its version is /// supported. - pub(super) fn from_archive(archive: &mut ZipArchive) -> Result { + fn from_archive(archive: &mut ZipArchive) -> Result { let meta_bytes = archive.by_name("meta").ok().and_then(|mut meta_file| { let mut buf = vec![]; meta_file.read_to_end(&mut buf).ok()?; @@ -77,21 +94,21 @@ impl Meta { Ok(meta) } - pub(super) fn collection_filename(&self) -> &'static str { + fn collection_filename(&self) -> &'static str { self.version().collection_filename() } /// Latest schema version that is supported by all clients supporting /// this package version. - pub(super) fn schema_version(&self) -> SchemaVersion { + fn schema_version(&self) -> SchemaVersion { self.version().schema_version() } - pub(super) fn zstd_compressed(&self) -> bool { + fn zstd_compressed(&self) -> bool { !self.is_legacy() } - pub(super) fn media_list_is_hashmap(&self) -> bool { + fn media_list_is_hashmap(&self) -> bool { self.is_legacy() } @@ -99,11 +116,7 @@ impl Meta { matches!(self.version(), Version::Legacy1 | Version::Legacy2) } - pub(super) fn copy( - &self, - reader: &mut impl Read, - writer: &mut impl io::Write, - ) -> io::Result<()> { + fn copy(&self, reader: &mut impl Read, writer: &mut impl io::Write) -> io::Result<()> { if self.zstd_compressed() { copy_decode(reader, writer) } else { diff --git a/rslib/src/import_export/package/mod.rs b/rslib/src/import_export/package/mod.rs index b9da55fe0..4e7cfc0e2 100644 --- a/rslib/src/import_export/package/mod.rs +++ b/rslib/src/import_export/package/mod.rs @@ -6,6 +6,8 @@ mod colpkg; mod media; mod meta; +pub(self) use anki_proto::import_export::media_entries::MediaEntry; +pub(self) use anki_proto::import_export::MediaEntries; pub(crate) use apkg::NoteMeta; pub(crate) use colpkg::export::export_colpkg_from_data; pub use colpkg::import::import_colpkg; @@ -14,6 +16,3 @@ pub use media::MediaIterEntry; pub use media::MediaIterError; pub(self) use meta::Meta; pub(self) use meta::Version; - -pub(self) use crate::pb::import_export::media_entries::MediaEntry; -pub(self) use crate::pb::import_export::MediaEntries; diff --git a/rslib/src/import_export/text/csv/export.rs b/rslib/src/import_export/text/csv/export.rs index 142b9a34b..656698b6d 100644 --- a/rslib/src/import_export/text/csv/export.rs +++ b/rslib/src/import_export/text/csv/export.rs @@ -7,15 +7,16 @@ use std::fs::File; use std::io::Write; use std::sync::Arc; +use anki_proto::import_export::ExportNoteCsvRequest; use itertools::Itertools; use lazy_static::lazy_static; use regex::Regex; use super::metadata::Delimiter; +use crate::import_export::text::csv::metadata::DelimeterExt; use crate::import_export::ExportProgress; use crate::import_export::IncrementableProgress; use crate::notetype::RenderCardOutput; -use crate::pb::import_export::ExportNoteCsvRequest; use crate::prelude::*; use crate::search::SearchNode; use crate::search::SortMode; @@ -60,7 +61,7 @@ impl Collection { progress.call(ExportProgress::File)?; let mut incrementor = progress.incrementor(ExportProgress::Notes); - let guard = self.search_notes_into_table(request.search_node())?; + let guard = self.search_notes_into_table(Into::::into(&mut request))?; let ctx = NoteContext::new(&request, guard.col)?; let mut writer = note_file_writer_with_header(&request.out_path, &ctx)?; guard.col.storage.for_each_note_in_search(|note| { @@ -283,8 +284,8 @@ impl NoteContext { } } -impl ExportNoteCsvRequest { - fn search_node(&mut self) -> SearchNode { - SearchNode::from(self.limit.take().unwrap_or_default()) +impl From<&mut ExportNoteCsvRequest> for SearchNode { + fn from(req: &mut ExportNoteCsvRequest) -> Self { + SearchNode::from(req.limit.take().unwrap_or_default()) } } diff --git a/rslib/src/import_export/text/csv/import.rs b/rslib/src/import_export/text/csv/import.rs index 9ddd84aa9..0931ecfaf 100644 --- a/rslib/src/import_export/text/csv/import.rs +++ b/rslib/src/import_export/text/csv/import.rs @@ -9,7 +9,9 @@ use std::io::SeekFrom; use crate::import_export::text::csv::metadata::CsvDeck; use crate::import_export::text::csv::metadata::CsvMetadata; +use crate::import_export::text::csv::metadata::CsvMetadataHelpers; use crate::import_export::text::csv::metadata::CsvNotetype; +use crate::import_export::text::csv::metadata::DelimeterExt; use crate::import_export::text::csv::metadata::Delimiter; use crate::import_export::text::ForeignData; use crate::import_export::text::ForeignNote; @@ -53,34 +55,12 @@ impl From for ForeignData { } } -impl CsvMetadata { - fn deck(&self) -> Result<&CsvDeck> { - self.deck.as_ref().or_invalid("deck oneof not set") - } - - fn notetype(&self) -> Result<&CsvNotetype> { - self.notetype.as_ref().or_invalid("notetype oneof not set") - } - - fn field_source_columns(&self) -> Result { - Ok(match self.notetype()? { - CsvNotetype::GlobalNotetype(global) => global - .field_columns - .iter() - .map(|&i| (i > 0).then_some(i as usize)) - .collect(), - CsvNotetype::NotetypeColumn(_) => { - let meta_columns = self.meta_columns(); - (1..self.column_labels.len() + 1) - .filter(|idx| !meta_columns.contains(idx)) - .map(Some) - .collect() - } - }) - } +trait CsvDeckExt { + fn name_or_id(&self) -> NameOrId; + fn column(&self) -> Option; } -impl CsvDeck { +impl CsvDeckExt for CsvDeck { fn name_or_id(&self) -> NameOrId { match self { Self::DeckId(did) => NameOrId::Id(*did), @@ -96,7 +76,12 @@ impl CsvDeck { } } -impl CsvNotetype { +trait CsvNotetypeExt { + fn name_or_id(&self) -> NameOrId; + fn column(&self) -> Option; +} + +impl CsvNotetypeExt for CsvNotetype { fn name_or_id(&self) -> NameOrId { match self { Self::GlobalNotetype(nt) => NameOrId::Id(nt.id), @@ -113,7 +98,7 @@ impl CsvNotetype { } /// Column indices for the fields of a notetype. -type FieldSourceColumns = Vec>; +pub(super) type FieldSourceColumns = Vec>; // Column indices are 1-based. struct ColumnContext { @@ -244,8 +229,10 @@ fn remove_tags_line_from_reader(reader: &mut (impl Read + Seek)) -> Result<()> { mod test { use std::io::Cursor; + use anki_proto::import_export::csv_metadata::MappedNotetype; + + use super::super::metadata::test::CsvMetadataTestExt; use super::*; - use crate::pb::import_export::csv_metadata::MappedNotetype; macro_rules! import { ($metadata:expr, $csv:expr) => {{ @@ -276,30 +263,6 @@ mod test { }; } - impl CsvMetadata { - fn defaults_for_testing() -> Self { - Self { - delimiter: Delimiter::Comma as i32, - force_delimiter: false, - is_html: false, - force_is_html: false, - tags_column: 0, - guid_column: 0, - global_tags: Vec::new(), - updated_tags: Vec::new(), - column_labels: vec!["".to_string(); 2], - deck: Some(CsvDeck::DeckId(1)), - notetype: Some(CsvNotetype::GlobalNotetype(MappedNotetype { - id: 1, - field_columns: vec![1, 2], - })), - preview: Vec::new(), - dupe_resolution: 0, - match_scope: 0, - } - } - } - #[test] fn should_allow_missing_columns() { let metadata = CsvMetadata::defaults_for_testing(); diff --git a/rslib/src/import_export/text/csv/metadata.rs b/rslib/src/import_export/text/csv/metadata.rs index 13c4ef261..f755e13da 100644 --- a/rslib/src/import_export/text/csv/metadata.rs +++ b/rslib/src/import_export/text/csv/metadata.rs @@ -9,23 +9,23 @@ use std::io::Read; use std::io::Seek; use std::io::SeekFrom; +pub use anki_proto::import_export::csv_metadata::Deck as CsvDeck; +pub use anki_proto::import_export::csv_metadata::Delimiter; +pub use anki_proto::import_export::csv_metadata::DupeResolution; +pub use anki_proto::import_export::csv_metadata::MappedNotetype; +pub use anki_proto::import_export::csv_metadata::MatchScope; +pub use anki_proto::import_export::csv_metadata::Notetype as CsvNotetype; +pub use anki_proto::import_export::CsvMetadata; use itertools::Itertools; use strum::IntoEnumIterator; use super::import::build_csv_reader; use crate::config::I32ConfigKey; +use crate::import_export::text::csv::import::FieldSourceColumns; use crate::import_export::text::NameOrId; use crate::import_export::ImportError; use crate::io::open_file; use crate::notetype::NoteField; -use crate::pb::generic::StringList; -pub use crate::pb::import_export::csv_metadata::Deck as CsvDeck; -pub use crate::pb::import_export::csv_metadata::Delimiter; -pub use crate::pb::import_export::csv_metadata::DupeResolution; -pub use crate::pb::import_export::csv_metadata::MappedNotetype; -pub use crate::pb::import_export::csv_metadata::MatchScope; -pub use crate::pb::import_export::csv_metadata::Notetype as CsvNotetype; -pub use crate::pb::import_export::CsvMetadata; use crate::prelude::*; use crate::text::html_to_text_line; use crate::text::is_html; @@ -135,7 +135,7 @@ impl Collection { } "notetype" => { if let Ok(Some(nt)) = self.notetype_by_name_or_id(&NameOrId::parse(value)) { - metadata.notetype = Some(CsvNotetype::new_global(nt.id)); + metadata.notetype = Some(new_global_csv_notetype(nt.id)); } } "deck" => { @@ -191,13 +191,13 @@ impl Collection { /// we apply the defaults from defaults_for_adding(). pub(crate) fn maybe_set_notetype_and_deck( &mut self, - metadata: &mut crate::pb::import_export::CsvMetadata, + metadata: &mut anki_proto::import_export::CsvMetadata, notetype_id: Option, deck_id: Option, ) -> Result<()> { let defaults = self.defaults_for_adding(DeckId(0))?; if metadata.notetype.is_none() || notetype_id.is_some() { - metadata.notetype = Some(CsvNotetype::new_global( + metadata.notetype = Some(new_global_csv_notetype( notetype_id.unwrap_or(defaults.notetype_id), )); } @@ -233,7 +233,15 @@ impl Collection { } } -impl CsvMetadata { +pub(super) trait CsvMetadataHelpers { + fn from_config(col: &Collection) -> Self; + fn deck(&self) -> Result<&CsvDeck>; + fn notetype(&self) -> Result<&CsvNotetype>; + fn field_source_columns(&self) -> Result; + fn meta_columns(&self) -> HashSet; +} + +impl CsvMetadataHelpers for CsvMetadata { /// Defaults with config values filled in. fn from_config(col: &Collection) -> Self { Self { @@ -242,9 +250,56 @@ impl CsvMetadata { ..Default::default() } } + + fn deck(&self) -> Result<&CsvDeck> { + self.deck.as_ref().or_invalid("deck oneof not set") + } + + fn notetype(&self) -> Result<&CsvNotetype> { + self.notetype.as_ref().or_invalid("notetype oneof not set") + } + + fn field_source_columns(&self) -> Result { + Ok(match self.notetype()? { + CsvNotetype::GlobalNotetype(global) => global + .field_columns + .iter() + .map(|&i| (i > 0).then_some(i as usize)) + .collect(), + CsvNotetype::NotetypeColumn(_) => { + let meta_columns = self.meta_columns(); + (1..self.column_labels.len() + 1) + .filter(|idx| !meta_columns.contains(idx)) + .map(Some) + .collect() + } + }) + } + + fn meta_columns(&self) -> HashSet { + let mut columns = HashSet::new(); + if let Some(CsvDeck::DeckColumn(deck_column)) = self.deck { + columns.insert(deck_column as usize); + } + if let Some(CsvNotetype::NotetypeColumn(notetype_column)) = self.notetype { + columns.insert(notetype_column as usize); + } + if self.tags_column > 0 { + columns.insert(self.tags_column as usize); + } + if self.guid_column > 0 { + columns.insert(self.guid_column as usize); + } + columns + } } -impl DupeResolution { +pub(super) trait DupeResolutionExt: Sized { + fn from_config(col: &Collection) -> Self; + fn from_text(text: &str) -> Option; +} + +impl DupeResolutionExt for DupeResolution { fn from_config(col: &Collection) -> Self { Self::from_i32(col.get_config_i32(I32ConfigKey::CsvDuplicateResolution)).unwrap_or_default() } @@ -259,7 +314,12 @@ impl DupeResolution { } } -impl MatchScope { +pub(super) trait MatchScopeExt: Sized { + fn from_config(col: &Collection) -> Self; + fn from_text(text: &str) -> Option; +} + +impl MatchScopeExt for MatchScope { fn from_config(col: &Collection) -> Self { Self::from_i32(col.get_config_i32(I32ConfigKey::MatchScope)).unwrap_or_default() } @@ -308,8 +368,12 @@ fn set_preview(metadata: &mut CsvMetadata, records: &[csv::StringRecord]) -> Res Ok(()) } -fn build_preview_row(min_len: usize, record: &csv::StringRecord, strip_html: bool) -> StringList { - StringList { +fn build_preview_row( + min_len: usize, + record: &csv::StringRecord, + strip_html: bool, +) -> anki_proto::generic::StringList { + anki_proto::generic::StringList { vals: record .iter() .pad_using(min_len, |_| "") @@ -475,8 +539,13 @@ fn strip_line_ending(line: &str) -> &str { .unwrap_or_else(|| line.strip_suffix('\n').unwrap_or(line)) } -impl Delimiter { - pub fn byte(self) -> u8 { +pub(super) trait DelimeterExt { + fn byte(self) -> u8; + fn name(self) -> &'static str; +} + +impl DelimeterExt for Delimiter { + fn byte(self) -> u8 { match self { Delimiter::Comma => b',', Delimiter::Semicolon => b';', @@ -487,7 +556,7 @@ impl Delimiter { } } - pub fn name(self) -> &'static str { + fn name(self) -> &'static str { match self { Delimiter::Comma => "comma", Delimiter::Semicolon => "semicolon", @@ -499,32 +568,11 @@ impl Delimiter { } } -impl CsvNotetype { - fn new_global(id: NotetypeId) -> Self { - Self::GlobalNotetype(MappedNotetype { - id: id.0, - field_columns: Vec::new(), - }) - } -} - -impl CsvMetadata { - pub(super) fn meta_columns(&self) -> HashSet { - let mut columns = HashSet::new(); - if let Some(CsvDeck::DeckColumn(deck_column)) = self.deck { - columns.insert(deck_column as usize); - } - if let Some(CsvNotetype::NotetypeColumn(notetype_column)) = self.notetype { - columns.insert(notetype_column as usize); - } - if self.tags_column > 0 { - columns.insert(self.tags_column as usize); - } - if self.guid_column > 0 { - columns.insert(self.guid_column as usize); - } - columns - } +fn new_global_csv_notetype(id: NotetypeId) -> CsvNotetype { + CsvNotetype::GlobalNotetype(MappedNotetype { + id: id.0, + field_columns: Vec::new(), + }) } impl NameOrId { @@ -537,16 +585,8 @@ impl NameOrId { } } -impl From for StringList { - fn from(record: csv::StringRecord) -> Self { - Self { - vals: record.iter().map(ToString::to_string).collect(), - } - } -} - #[cfg(test)] -mod test { +pub(in crate::import_export) mod test { use std::io::Cursor; use super::*; @@ -561,7 +601,36 @@ mod test { }; } - impl CsvMetadata { + pub trait CsvMetadataTestExt { + fn defaults_for_testing() -> Self; + fn unwrap_deck_id(&self) -> i64; + fn unwrap_notetype_id(&self) -> i64; + fn unwrap_notetype_map(&self) -> &[u32]; + } + + impl CsvMetadataTestExt for CsvMetadata { + fn defaults_for_testing() -> Self { + Self { + delimiter: Delimiter::Comma as i32, + force_delimiter: false, + is_html: false, + force_is_html: false, + tags_column: 0, + guid_column: 0, + global_tags: Vec::new(), + updated_tags: Vec::new(), + column_labels: vec!["".to_string(); 2], + deck: Some(CsvDeck::DeckId(1)), + notetype: Some(CsvNotetype::GlobalNotetype(MappedNotetype { + id: 1, + field_columns: vec![1, 2], + })), + preview: Vec::new(), + dupe_resolution: 0, + match_scope: 0, + } + } + fn unwrap_deck_id(&self) -> i64 { match self.deck { Some(CsvDeck::DeckId(did)) => did, @@ -575,6 +644,12 @@ mod test { _ => panic!("no notetype id"), } } + fn unwrap_notetype_map(&self) -> &[u32] { + match &self.notetype { + Some(CsvNotetype::GlobalNotetype(nt)) => &nt.field_columns, + _ => panic!("no notetype map"), + } + } } #[test] @@ -729,15 +804,6 @@ mod test { ); } - impl CsvMetadata { - fn unwrap_notetype_map(&self) -> &[u32] { - match &self.notetype { - Some(CsvNotetype::GlobalNotetype(nt)) => &nt.field_columns, - _ => panic!("no notetype map"), - } - } - } - #[test] fn should_map_default_notetype_fields_by_index_if_no_column_names() { let mut col = Collection::new(); diff --git a/rslib/src/import_export/text/csv/mod.rs b/rslib/src/import_export/text/csv/mod.rs index 8ac4105d3..ba4e309bb 100644 --- a/rslib/src/import_export/text/csv/mod.rs +++ b/rslib/src/import_export/text/csv/mod.rs @@ -3,4 +3,4 @@ mod export; mod import; -mod metadata; +pub mod metadata; diff --git a/rslib/src/import_export/text/import.rs b/rslib/src/import_export/text/import.rs index 78e081266..116f25221 100644 --- a/rslib/src/import_export/text/import.rs +++ b/rslib/src/import_export/text/import.rs @@ -25,7 +25,6 @@ use crate::notes::normalize_field; use crate::notetype::CardGenContext; use crate::notetype::CardTemplate; use crate::notetype::NoteField; -use crate::notetype::NotetypeConfig; use crate::prelude::*; use crate::text::strip_html_preserving_media_filenames; @@ -60,13 +59,11 @@ impl ForeignData { } } -impl NoteLog { - fn new(dupe_resolution: DupeResolution, found_notes: u32) -> Self { - Self { - dupe_resolution: dupe_resolution as i32, - found_notes, - ..Default::default() - } +fn new_note_log(dupe_resolution: DupeResolution, found_notes: u32) -> NoteLog { + NoteLog { + dupe_resolution: dupe_resolution as i32, + found_notes, + ..Default::default() } } @@ -235,7 +232,7 @@ impl<'a> Context<'a> { progress: &mut IncrementableProgress, ) -> Result { let mut incrementor = progress.incrementor(ImportProgress::Notes); - let mut log = NoteLog::new(self.dupe_resolution, notes.len() as u32); + let mut log = new_note_log(self.dupe_resolution, notes.len() as u32); for foreign in notes { incrementor.increment()?; if foreign.first_field_is_the_empty_string() { @@ -612,9 +609,9 @@ impl ForeignNotetype { .map(ForeignTemplate::into_native) .collect(), config: if self.is_cloze { - NotetypeConfig::new_cloze() + Notetype::new_cloze_config() } else { - NotetypeConfig::new() + Notetype::new_config() }, ..Notetype::default() } diff --git a/rslib/src/import_export/text/mod.rs b/rslib/src/import_export/text/mod.rs index dc64903ed..4d799909f 100644 --- a/rslib/src/import_export/text/mod.rs +++ b/rslib/src/import_export/text/mod.rs @@ -5,12 +5,12 @@ pub mod csv; mod import; mod json; -use serde_derive::Deserialize; -use serde_derive::Serialize; +use anki_proto::import_export::csv_metadata::DupeResolution; +use anki_proto::import_export::csv_metadata::MatchScope; +use serde::Deserialize; +use serde::Serialize; use super::LogNote; -use crate::pb::import_export::csv_metadata::DupeResolution; -use crate::pb::import_export::csv_metadata::MatchScope; #[derive(Debug, Clone, Default, Serialize, Deserialize)] #[serde(default)] diff --git a/rslib/src/lib.rs b/rslib/src/lib.rs index eefeb01c6..1cb6e9ccd 100644 --- a/rslib/src/lib.rs +++ b/rslib/src/lib.rs @@ -16,7 +16,6 @@ pub mod deckconfig; pub mod decks; pub mod error; pub mod findreplace; -pub mod i18n; pub mod image_occlusion; pub mod import_export; mod io; @@ -28,7 +27,6 @@ pub mod media; pub mod notes; pub mod notetype; pub mod ops; -pub mod pb; mod preferences; pub mod prelude; pub mod revlog; @@ -56,3 +54,5 @@ use lazy_static::lazy_static; lazy_static! { pub(crate) static ref PYTHON_UNIT_TESTS: bool = env::var("ANKI_TEST_MODE").is_ok(); } + +// temporary during proto migration diff --git a/rslib/src/links.rs b/rslib/src/links.rs index f4e2d1cd6..ac4df5ab6 100644 --- a/rslib/src/links.rs +++ b/rslib/src/links.rs @@ -1,46 +1,42 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -pub use crate::pb::links::help_page_link_request::HelpPage; +pub use anki_proto::links::help_page_link_request::HelpPage; static HELP_SITE: &str = "https://docs.ankiweb.net/"; -impl HelpPage { - pub fn to_link(self) -> String { - format!("{}{}", HELP_SITE, self.to_link_suffix()) - } +pub fn help_page_to_link(page: HelpPage) -> String { + format!("{}{}", HELP_SITE, help_page_link_suffix(page)) +} - pub fn to_link_suffix(self) -> &'static str { - match self { - HelpPage::NoteType => "getting-started.html#note-types", - HelpPage::Browsing => "browsing.html", - HelpPage::BrowsingFindAndReplace => "browsing.html#find-and-replace", - HelpPage::BrowsingNotesMenu => "browsing.html#notes", - HelpPage::KeyboardShortcuts => "studying.html#keyboard-shortcuts", - HelpPage::Editing => "editing.html", - HelpPage::AddingCardAndNote => "editing.html#adding-cards-and-notes", - HelpPage::AddingANoteType => "editing.html#adding-a-note-type", - HelpPage::Latex => "math.html#latex", - HelpPage::Preferences => "preferences.html", - HelpPage::Index => "", - HelpPage::Templates => "templates/intro.html", - HelpPage::FilteredDeck => "filtered-decks.html", - HelpPage::Importing => "importing.html", - HelpPage::CustomizingFields => "editing.html#customizing-fields", - HelpPage::DeckOptions => "deck-options.html", - HelpPage::EditingFeatures => "editing.html#editing-features", - HelpPage::FullScreenIssue => "platform/windows/display-issues.html#full-screen", - HelpPage::CardTypeTemplateError => "templates/errors.html#template-syntax-error", - HelpPage::CardTypeDuplicate => "templates/errors.html#identical-front-sides", - HelpPage::CardTypeNoFrontField => { - "templates/errors.html#no-field-replacement-on-front-side" - } - HelpPage::CardTypeMissingCloze => { - "templates/errors.html#no-cloze-filter-on-cloze-notetype" - } - HelpPage::CardTypeExtraneousCloze => { - "templates/errors.html#cloze-filter-outside-cloze-notetype" - } +pub fn help_page_link_suffix(page: HelpPage) -> &'static str { + match page { + HelpPage::NoteType => "getting-started.html#note-types", + HelpPage::Browsing => "browsing.html", + HelpPage::BrowsingFindAndReplace => "browsing.html#find-and-replace", + HelpPage::BrowsingNotesMenu => "browsing.html#notes", + HelpPage::KeyboardShortcuts => "studying.html#keyboard-shortcuts", + HelpPage::Editing => "editing.html", + HelpPage::AddingCardAndNote => "editing.html#adding-cards-and-notes", + HelpPage::AddingANoteType => "editing.html#adding-a-note-type", + HelpPage::Latex => "math.html#latex", + HelpPage::Preferences => "preferences.html", + HelpPage::Index => "", + HelpPage::Templates => "templates/intro.html", + HelpPage::FilteredDeck => "filtered-decks.html", + HelpPage::Importing => "importing.html", + HelpPage::CustomizingFields => "editing.html#customizing-fields", + HelpPage::DeckOptions => "deck-options.html", + HelpPage::EditingFeatures => "editing.html#editing-features", + HelpPage::FullScreenIssue => "platform/windows/display-issues.html#full-screen", + HelpPage::CardTypeTemplateError => "templates/errors.html#template-syntax-error", + HelpPage::CardTypeDuplicate => "templates/errors.html#identical-front-sides", + HelpPage::CardTypeNoFrontField => { + "templates/errors.html#no-field-replacement-on-front-side" + } + HelpPage::CardTypeMissingCloze => "templates/errors.html#no-cloze-filter-on-cloze-notetype", + HelpPage::CardTypeExtraneousCloze => { + "templates/errors.html#cloze-filter-outside-cloze-notetype" } } } diff --git a/rslib/src/notes/mod.rs b/rslib/src/notes/mod.rs index 699ceff96..d9299a685 100644 --- a/rslib/src/notes/mod.rs +++ b/rslib/src/notes/mod.rs @@ -7,6 +7,7 @@ use std::borrow::Cow; use std::collections::HashMap; use std::collections::HashSet; +use anki_proto::notes::note_fields_check_response::State as NoteFieldsState; use itertools::Itertools; use num_integer::Integer; use sha1::Digest; @@ -17,8 +18,6 @@ use crate::define_newtype; use crate::notetype::CardGenContext; use crate::notetype::NoteField; use crate::ops::StateChanges; -use crate::pb; -use crate::pb::notes::note_fields_check_response::State as NoteFieldsState; use crate::prelude::*; use crate::template::field_is_empty; use crate::text::ensure_string_in_nfc; @@ -259,9 +258,9 @@ pub(crate) fn normalize_field(field: &mut String, normalize_text: bool) { } } -impl From for pb::notes::Note { +impl From for anki_proto::notes::Note { fn from(n: Note) -> Self { - pb::notes::Note { + anki_proto::notes::Note { id: n.id.0, guid: n.guid, notetype_id: n.notetype_id.0, @@ -273,8 +272,8 @@ impl From for pb::notes::Note { } } -impl From for Note { - fn from(n: pb::notes::Note) -> Self { +impl From for Note { + fn from(n: anki_proto::notes::Note) -> Self { Note { id: NoteId(n.id), guid: n.guid, diff --git a/rslib/src/notetype/fields.rs b/rslib/src/notetype/fields.rs index 7b731ae58..ec1a4c875 100644 --- a/rslib/src/notetype/fields.rs +++ b/rslib/src/notetype/fields.rs @@ -3,7 +3,6 @@ use super::NoteFieldConfig; use super::NoteFieldProto; -use crate::pb::generic::UInt32; use crate::prelude::*; #[derive(Debug, PartialEq, Clone)] @@ -16,7 +15,7 @@ pub struct NoteField { impl From for NoteFieldProto { fn from(f: NoteField) -> Self { NoteFieldProto { - ord: f.ord.map(|n| UInt32 { val: n }), + ord: f.ord.map(Into::into), name: f.name, config: Some(f.config), } diff --git a/rslib/src/notetype/mod.rs b/rslib/src/notetype/mod.rs index 5034e5f7b..b11dd8485 100644 --- a/rslib/src/notetype/mod.rs +++ b/rslib/src/notetype/mod.rs @@ -19,6 +19,15 @@ use std::collections::HashSet; use std::iter::FromIterator; use std::sync::Arc; +pub use anki_proto::notetypes::notetype::config::card_requirement::Kind as CardRequirementKind; +pub use anki_proto::notetypes::notetype::config::CardRequirement; +pub use anki_proto::notetypes::notetype::config::Kind as NotetypeKind; +pub use anki_proto::notetypes::notetype::field::Config as NoteFieldConfig; +pub use anki_proto::notetypes::notetype::template::Config as CardTemplateConfig; +pub use anki_proto::notetypes::notetype::Config as NotetypeConfig; +pub use anki_proto::notetypes::notetype::Field as NoteFieldProto; +pub use anki_proto::notetypes::notetype::Template as CardTemplateProto; +pub use anki_proto::notetypes::Notetype as NotetypeProto; pub(crate) use cardgen::AlreadyGeneratedCardInfo; pub(crate) use cardgen::CardGenContext; pub use fields::NoteField; @@ -39,15 +48,6 @@ use crate::error::CardTypeError; use crate::error::CardTypeErrorDetails; use crate::error::CardTypeSnafu; use crate::error::MissingClozeSnafu; -pub use crate::pb::notetypes::notetype::config::card_requirement::Kind as CardRequirementKind; -pub use crate::pb::notetypes::notetype::config::CardRequirement; -pub use crate::pb::notetypes::notetype::config::Kind as NotetypeKind; -pub use crate::pb::notetypes::notetype::field::Config as NoteFieldConfig; -pub use crate::pb::notetypes::notetype::template::Config as CardTemplateConfig; -pub use crate::pb::notetypes::notetype::Config as NotetypeConfig; -pub use crate::pb::notetypes::notetype::Field as NoteFieldProto; -pub use crate::pb::notetypes::notetype::Template as CardTemplateProto; -pub use crate::pb::notetypes::Notetype as NotetypeProto; use crate::prelude::*; use crate::search::JoinSearches; use crate::search::Node; @@ -96,13 +96,13 @@ impl Default for Notetype { usn: Usn(0), fields: vec![], templates: vec![], - config: NotetypeConfig::new(), + config: Notetype::new_config(), } } } -impl NotetypeConfig { - pub(crate) fn new() -> Self { +impl Notetype { + pub(crate) fn new_config() -> NotetypeConfig { NotetypeConfig { css: DEFAULT_CSS.into(), latex_pre: DEFAULT_LATEX_HEADER.into(), @@ -111,8 +111,8 @@ impl NotetypeConfig { } } - pub(crate) fn new_cloze() -> Self { - let mut config = Self::new(); + pub(crate) fn new_cloze_config() -> NotetypeConfig { + let mut config = Self::new_config(); config.css += DEFAULT_CLOZE_CSS; config.kind = NotetypeKind::Cloze as i32; config diff --git a/rslib/src/notetype/restore.rs b/rslib/src/notetype/restore.rs index 25234c5d5..92ca2777f 100644 --- a/rslib/src/notetype/restore.rs +++ b/rslib/src/notetype/restore.rs @@ -1,10 +1,11 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::notetypes::stock_notetype::Kind; +use anki_proto::notetypes::stock_notetype::OriginalStockKind; + use crate::notetype::stock::get_original_stock_notetype; use crate::notetype::stock::StockKind; -use crate::pb::notetypes::stock_notetype::Kind; -use crate::pb::notetypes::stock_notetype::OriginalStockKind; use crate::prelude::*; impl Collection { diff --git a/rslib/src/notetype/schema11.rs b/rslib/src/notetype/schema11.rs index ef8e68c4c..ed19b677f 100644 --- a/rslib/src/notetype/schema11.rs +++ b/rslib/src/notetype/schema11.rs @@ -3,8 +3,8 @@ use std::collections::HashMap; -use serde_derive::Deserialize; -use serde_derive::Serialize; +use serde::Deserialize; +use serde::Serialize; use serde_json::Value; use serde_repr::Deserialize_repr; use serde_repr::Serialize_repr; diff --git a/rslib/src/notetype/stock.rs b/rslib/src/notetype/stock.rs index bb69fe927..ba62735cc 100644 --- a/rslib/src/notetype/stock.rs +++ b/rslib/src/notetype/stock.rs @@ -1,18 +1,19 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_i18n::I18n; +use anki_proto::notetypes::notetype::config::Kind as NotetypeKind; +use anki_proto::notetypes::stock_notetype::Kind; +pub(crate) use anki_proto::notetypes::stock_notetype::Kind as StockKind; +use anki_proto::notetypes::stock_notetype::OriginalStockKind; + use super::NotetypeConfig; use crate::config::ConfigEntry; use crate::config::ConfigKey; use crate::error::Result; -use crate::i18n::I18n; use crate::image_occlusion::notetype::image_occlusion_notetype; use crate::invalid_input; use crate::notetype::Notetype; -use crate::pb::notetypes::notetype::config::Kind as NotetypeKind; -use crate::pb::notetypes::stock_notetype::Kind; -pub(crate) use crate::pb::notetypes::stock_notetype::Kind as StockKind; -use crate::pb::notetypes::stock_notetype::OriginalStockKind; use crate::storage::SqliteStorage; use crate::timestamp::TimestampSecs; @@ -63,9 +64,9 @@ pub(crate) fn empty_stock( kind: nt_kind as i32, original_stock_kind: original_stock_kind as i32, ..if nt_kind == NotetypeKind::Cloze { - NotetypeConfig::new_cloze() + Notetype::new_cloze_config() } else { - NotetypeConfig::new() + Notetype::new_config() } }, ..Default::default() diff --git a/rslib/src/notetype/templates.rs b/rslib/src/notetype/templates.rs index 894dbe3df..b75fbb8e0 100644 --- a/rslib/src/notetype/templates.rs +++ b/rslib/src/notetype/templates.rs @@ -3,7 +3,6 @@ use super::CardTemplateConfig; use super::CardTemplateProto; -use crate::pb::generic::UInt32; use crate::prelude::*; use crate::template::ParsedTemplate; @@ -53,7 +52,7 @@ impl CardTemplate { impl From for CardTemplateProto { fn from(t: CardTemplate) -> Self { CardTemplateProto { - ord: t.ord.map(|n| UInt32 { val: n }), + ord: t.ord.map(Into::into), mtime_secs: t.mtime_secs.0, usn: t.usn.0, name: t.name, diff --git a/rslib/src/pb.rs b/rslib/src/pb.rs deleted file mode 100644 index ce84b5e9d..000000000 --- a/rslib/src/pb.rs +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -macro_rules! protobuf { - ($ident:ident, $name:literal) => { - pub mod $ident { - include!(concat!("../../out/rslib/proto/anki.", $name, ".rs")); - } - }; -} - -include!("../../out/rslib/proto/service_index.rs"); - -protobuf!(ankidroid, "ankidroid"); -protobuf!(backend, "backend"); -protobuf!(card_rendering, "card_rendering"); -protobuf!(cards, "cards"); -protobuf!(collection, "collection"); -protobuf!(config, "config"); -protobuf!(deckconfig, "deckconfig"); -protobuf!(decks, "decks"); -protobuf!(generic, "generic"); -protobuf!(i18n, "i18n"); -protobuf!(image_occlusion, "image_occlusion"); -protobuf!(import_export, "import_export"); -protobuf!(links, "links"); -protobuf!(media, "media"); -protobuf!(notes, "notes"); -protobuf!(notetypes, "notetypes"); -protobuf!(scheduler, "scheduler"); -protobuf!(search, "search"); -protobuf!(stats, "stats"); -protobuf!(sync, "sync"); -protobuf!(tags, "tags"); diff --git a/rslib/src/preferences.rs b/rslib/src/preferences.rs index 01ecb5769..4f7d7ba6e 100644 --- a/rslib/src/preferences.rs +++ b/rslib/src/preferences.rs @@ -1,15 +1,16 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::config::preferences::scheduling::NewReviewMix as NewRevMixPB; +use anki_proto::config::preferences::Editing; +use anki_proto::config::preferences::Reviewing; +use anki_proto::config::preferences::Scheduling; +use anki_proto::config::Preferences; + use crate::collection::Collection; use crate::config::BoolKey; use crate::config::StringKey; use crate::error::Result; -use crate::pb::config::preferences::scheduling::NewReviewMix as NewRevMixPB; -use crate::pb::config::preferences::Editing; -use crate::pb::config::preferences::Reviewing; -use crate::pb::config::preferences::Scheduling; -use crate::pb::config::Preferences; use crate::prelude::*; use crate::scheduler::timing::local_minutes_west_for_stamp; diff --git a/rslib/src/prelude.rs b/rslib/src/prelude.rs index 3157e3214..ee6255f7f 100644 --- a/rslib/src/prelude.rs +++ b/rslib/src/prelude.rs @@ -1,6 +1,7 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +pub use anki_i18n::I18n; pub use snafu::ResultExt; pub use crate::card::Card; @@ -17,7 +18,6 @@ pub use crate::error::AnkiError; pub use crate::error::OrInvalid; pub use crate::error::OrNotFound; pub use crate::error::Result; -pub use crate::i18n::I18n; pub use crate::invalid_input; pub use crate::media::Sha1Hash; pub use crate::notes::Note; diff --git a/rslib/src/scheduler/answering/mod.rs b/rslib/src/scheduler/answering/mod.rs index 4fa9ad268..5c868e126 100644 --- a/rslib/src/scheduler/answering/mod.rs +++ b/rslib/src/scheduler/answering/mod.rs @@ -25,7 +25,6 @@ use crate::card::CardQueue; use crate::deckconfig::DeckConfig; use crate::deckconfig::LeechAction; use crate::decks::Deck; -use crate::pb; use crate::prelude::*; #[derive(Copy, Clone)] @@ -327,7 +326,7 @@ impl Collection { self.update_deck_stats( updater.timing.days_elapsed, usn, - pb::scheduler::UpdateStatsRequest { + anki_proto::scheduler::UpdateStatsRequest { deck_id: updater.deck.id.0, new_delta, review_delta, diff --git a/rslib/src/scheduler/bury_and_suspend.rs b/rslib/src/scheduler/bury_and_suspend.rs index 5a5825262..1b1c55934 100644 --- a/rslib/src/scheduler/bury_and_suspend.rs +++ b/rslib/src/scheduler/bury_and_suspend.rs @@ -1,12 +1,13 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::scheduler::bury_or_suspend_cards_request::Mode as BuryOrSuspendMode; +use anki_proto::scheduler::unbury_deck_request::Mode as UnburyDeckMode; + use super::queue::BuryMode; use super::timing::SchedTimingToday; use crate::card::CardQueue; use crate::config::SchedulerVersion; -use crate::pb::scheduler::bury_or_suspend_cards_request::Mode as BuryOrSuspendMode; -use crate::pb::scheduler::unbury_deck_request::Mode as UnburyDeckMode; use crate::prelude::*; use crate::search::JoinSearches; use crate::search::SearchNode; diff --git a/rslib/src/scheduler/congrats.rs b/rslib/src/scheduler/congrats.rs index 02e6651a7..721bd417b 100644 --- a/rslib/src/scheduler/congrats.rs +++ b/rslib/src/scheduler/congrats.rs @@ -1,7 +1,6 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::pb; use crate::prelude::*; #[derive(Debug)] @@ -15,7 +14,7 @@ pub(crate) struct CongratsInfo { } impl Collection { - pub fn congrats_info(&mut self) -> Result { + pub fn congrats_info(&mut self) -> Result { let deck = self.get_current_deck()?; let today = self.timing_today()?.days_elapsed; let info = self.storage.congrats_info(&deck, today)?; @@ -28,7 +27,7 @@ impl Collection { ((info.next_learn_due as i64) - self.learn_ahead_secs() as i64 - TimestampSecs::now().0) .max(60) as u32 }; - Ok(pb::scheduler::CongratsInfoResponse { + Ok(anki_proto::scheduler::CongratsInfoResponse { learn_remaining: info.learn_count, review_remaining: info.review_remaining, new_remaining: info.new_remaining, @@ -52,7 +51,7 @@ mod test { let info = col.congrats_info().unwrap(); assert_eq!( info, - crate::pb::scheduler::CongratsInfoResponse { + anki_proto::scheduler::CongratsInfoResponse { learn_remaining: 0, review_remaining: false, new_remaining: false, diff --git a/rslib/src/scheduler/filtered/custom_study.rs b/rslib/src/scheduler/filtered/custom_study.rs index afae1980d..32a32950d 100644 --- a/rslib/src/scheduler/filtered/custom_study.rs +++ b/rslib/src/scheduler/filtered/custom_study.rs @@ -3,17 +3,19 @@ use std::collections::HashSet; +use anki_proto::scheduler::custom_study_request::cram::CramKind; +use anki_proto::scheduler::custom_study_request::Cram; +use anki_proto::scheduler::custom_study_request::Value as CustomStudyValue; + use super::FilteredDeckForUpdate; use crate::config::DeckConfigKey; +use crate::decks::tree::get_deck_in_tree; +use crate::decks::tree::sum_deck_tree_node; use crate::decks::FilteredDeck; use crate::decks::FilteredSearchOrder; use crate::decks::FilteredSearchTerm; use crate::error::CustomStudyError; use crate::error::FilteredDeckError; -use crate::pb; -use crate::pb::scheduler::custom_study_request::cram::CramKind; -use crate::pb::scheduler::custom_study_request::Cram; -use crate::pb::scheduler::custom_study_request::Value as CustomStudyValue; use crate::prelude::*; use crate::search::JoinSearches; use crate::search::Negated; @@ -25,7 +27,7 @@ use crate::search::StateKind; impl Collection { pub fn custom_study( &mut self, - input: pb::scheduler::CustomStudyRequest, + input: anki_proto::scheduler::CustomStudyRequest, ) -> Result> { self.transact(Op::CreateCustomStudy, |col| col.custom_study_inner(input)) } @@ -33,19 +35,20 @@ impl Collection { pub fn custom_study_defaults( &mut self, deck_id: DeckId, - ) -> Result { + ) -> Result { // daily counts let deck = self.get_deck(deck_id)?.or_not_found(deck_id)?; let normal = deck.normal()?; let extend_new = normal.extend_new; let extend_review = normal.extend_review; - let subtree = self - .deck_tree(Some(TimestampSecs::now()))? - .get_deck(deck_id) + + let subtree = get_deck_in_tree(self.deck_tree(Some(TimestampSecs::now()))?, deck_id) .or_not_found(deck_id)?; let v3 = self.get_config_bool(BoolKey::Sched2021); - let available_new_including_children = subtree.sum(|node| node.new_uncapped); - let available_review_including_children = subtree.sum(|node| node.review_uncapped); + let available_new_including_children = + sum_deck_tree_node(&subtree, |node| node.new_uncapped); + let available_review_including_children = + sum_deck_tree_node(&subtree, |node| node.review_uncapped); let ( available_new, available_new_in_children, @@ -79,11 +82,11 @@ impl Collection { ); let mut all_tags: Vec<_> = self.all_tags_in_deck(deck_id)?.into_iter().collect(); all_tags.sort_unstable(); - let tags: Vec = all_tags + let tags: Vec = all_tags .into_iter() .map(|tag| { let tag = tag.into_inner(); - pb::scheduler::custom_study_defaults_response::Tag { + anki_proto::scheduler::custom_study_defaults_response::Tag { include: include_tags.contains(&tag), exclude: exclude_tags.contains(&tag), name: tag, @@ -91,7 +94,7 @@ impl Collection { }) .collect(); - Ok(pb::scheduler::CustomStudyDefaultsResponse { + Ok(anki_proto::scheduler::CustomStudyDefaultsResponse { tags, extend_new, extend_review, @@ -104,7 +107,10 @@ impl Collection { } impl Collection { - fn custom_study_inner(&mut self, input: pb::scheduler::CustomStudyRequest) -> Result<()> { + fn custom_study_inner( + &mut self, + input: anki_proto::scheduler::CustomStudyRequest, + ) -> Result<()> { let mut deck = self .storage .get_deck(input.deck_id.into())? @@ -298,11 +304,12 @@ fn tags_to_nodes(tags_to_include: &[String], tags_to_exclude: &[String]) -> Sear #[cfg(test)] mod test { + use anki_proto::scheduler::custom_study_request::cram::CramKind; + use anki_proto::scheduler::custom_study_request::Cram; + use anki_proto::scheduler::custom_study_request::Value; + use anki_proto::scheduler::CustomStudyRequest; + use super::*; - use crate::pb::scheduler::custom_study_request::cram::CramKind; - use crate::pb::scheduler::custom_study_request::Cram; - use crate::pb::scheduler::custom_study_request::Value; - use crate::pb::scheduler::CustomStudyRequest; #[test] fn tag_remembering() -> Result<()> { diff --git a/rslib/src/scheduler/new.rs b/rslib/src/scheduler/new.rs index afd0b6b03..810ad9c31 100644 --- a/rslib/src/scheduler/new.rs +++ b/rslib/src/scheduler/new.rs @@ -4,6 +4,9 @@ use std::collections::HashMap; use std::collections::HashSet; +pub use anki_proto::scheduler::schedule_cards_as_new_request::Context as ScheduleAsNewContext; +pub use anki_proto::scheduler::RepositionDefaultsResponse; +pub use anki_proto::scheduler::ScheduleCardsAsNewDefaultsResponse; use rand::seq::SliceRandom; use crate::card::CardQueue; @@ -11,9 +14,6 @@ use crate::card::CardType; use crate::config::BoolKey; use crate::config::SchedulerVersion; use crate::deckconfig::NewCardInsertOrder; -pub use crate::pb::scheduler::schedule_cards_as_new_request::Context as ScheduleAsNewContext; -pub use crate::pb::scheduler::RepositionDefaultsResponse; -pub use crate::pb::scheduler::ScheduleCardsAsNewDefaultsResponse; use crate::prelude::*; use crate::search::JoinSearches; use crate::search::SearchNode; diff --git a/rslib/src/scheduler/queue/builder/mod.rs b/rslib/src/scheduler/queue/builder/mod.rs index 68a3a9e50..3363325e1 100644 --- a/rslib/src/scheduler/queue/builder/mod.rs +++ b/rslib/src/scheduler/queue/builder/mod.rs @@ -267,11 +267,12 @@ impl Collection { #[cfg(test)] mod test { + use anki_proto::deckconfig::deck_config::config::NewCardGatherPriority; + use anki_proto::deckconfig::deck_config::config::NewCardSortOrder; + use super::*; use crate::card::CardQueue; use crate::card::CardType; - use crate::pb::deckconfig::deck_config::config::NewCardGatherPriority; - use crate::pb::deckconfig::deck_config::config::NewCardSortOrder; impl Collection { fn set_deck_gather_order(&mut self, deck: &mut Deck, order: NewCardGatherPriority) { diff --git a/rslib/src/scheduler/queue/mod.rs b/rslib/src/scheduler/queue/mod.rs index 45621900a..ff19aac72 100644 --- a/rslib/src/scheduler/queue/mod.rs +++ b/rslib/src/scheduler/queue/mod.rs @@ -9,6 +9,7 @@ pub(crate) mod undo; use std::collections::VecDeque; +use anki_proto::scheduler::SchedulingContext; pub(crate) use builder::DueCard; pub(crate) use builder::DueCardKind; pub(crate) use builder::NewCard; @@ -21,7 +22,6 @@ pub(crate) use main::MainQueueEntryKind; use self::undo::QueueUpdate; use super::states::SchedulingStates; use super::timing::SchedTimingToday; -use crate::pb::scheduler::SchedulingContext; use crate::prelude::*; use crate::timestamp::TimestampSecs; @@ -118,7 +118,7 @@ impl Collection { let next_states = self.get_scheduling_states(card.id)?; Ok(QueuedCard { - context: SchedulingContext::new(self, &card)?, + context: new_scheduling_context(self, &card)?, card, states: next_states, kind: entry.kind(), @@ -134,16 +134,14 @@ impl Collection { } } -impl SchedulingContext { - fn new(col: &mut Collection, card: &Card) -> Result { - Ok(Self { - deck_name: col - .get_deck(card.original_or_current_deck_id())? - .or_not_found(card.deck_id)? - .human_name(), - seed: card.review_seed(), - }) - } +fn new_scheduling_context(col: &mut Collection, card: &Card) -> Result { + Ok(SchedulingContext { + deck_name: col + .get_deck(card.original_or_current_deck_id())? + .or_not_found(card.deck_id)? + .human_name(), + seed: card.review_seed(), + }) } impl CardQueues { diff --git a/rslib/src/scheduler/timespan.rs b/rslib/src/scheduler/timespan.rs index 3e9ba2144..f98c1ce69 100644 --- a/rslib/src/scheduler/timespan.rs +++ b/rslib/src/scheduler/timespan.rs @@ -1,7 +1,7 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::i18n::I18n; +use anki_i18n::I18n; /// Short string like '4d' to place above answer buttons. pub fn answer_button_time(seconds: f32, tr: &I18n) -> String { @@ -166,7 +166,8 @@ impl Timespan { #[cfg(test)] mod test { - use crate::i18n::I18n; + use anki_i18n::I18n; + use crate::scheduler::timespan::answer_button_time; use crate::scheduler::timespan::time_span; use crate::scheduler::timespan::MONTH; diff --git a/rslib/src/stats/card.rs b/rslib/src/stats/card.rs index 4a2f385c0..d058c2233 100644 --- a/rslib/src/stats/card.rs +++ b/rslib/src/stats/card.rs @@ -3,12 +3,11 @@ use crate::card::CardQueue; use crate::card::CardType; -use crate::pb; use crate::prelude::*; use crate::revlog::RevlogEntry; impl Collection { - pub fn card_stats(&mut self, cid: CardId) -> Result { + pub fn card_stats(&mut self, cid: CardId) -> Result { let card = self.storage.get_card(cid)?.or_not_found(cid)?; let note = self .storage @@ -26,7 +25,7 @@ impl Collection { let (average_secs, total_secs) = average_and_total_secs_strings(&revlog); let (due_date, due_position) = self.due_date_and_position(&card)?; - Ok(pb::stats::CardStatsResponse { + Ok(anki_proto::stats::CardStatsResponse { card_id: card.id.into(), note_id: card.note_id.into(), deck: deck.human_name(), @@ -92,8 +91,10 @@ fn average_and_total_secs_strings(revlog: &[RevlogEntry]) -> (f32, f32) { } } -fn stats_revlog_entry(entry: &RevlogEntry) -> pb::stats::card_stats_response::StatsRevlogEntry { - pb::stats::card_stats_response::StatsRevlogEntry { +fn stats_revlog_entry( + entry: &RevlogEntry, +) -> anki_proto::stats::card_stats_response::StatsRevlogEntry { + anki_proto::stats::card_stats_response::StatsRevlogEntry { time: entry.id.as_secs().0, review_kind: entry.review_kind.into(), button_chosen: entry.button_chosen as u32, diff --git a/rslib/src/stats/graphs/added.rs b/rslib/src/stats/graphs/added.rs index 95be760cf..3b288fd5c 100644 --- a/rslib/src/stats/graphs/added.rs +++ b/rslib/src/stats/graphs/added.rs @@ -1,8 +1,9 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::stats::graphs_response::Added; + use super::GraphsContext; -use crate::pb::stats::graphs_response::Added; impl GraphsContext { pub(super) fn added_days(&self) -> Added { diff --git a/rslib/src/stats/graphs/buttons.rs b/rslib/src/stats/graphs/buttons.rs index c872f19ef..aedd75d7f 100644 --- a/rslib/src/stats/graphs/buttons.rs +++ b/rslib/src/stats/graphs/buttons.rs @@ -1,9 +1,10 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::stats::graphs_response::buttons::ButtonCounts; +use anki_proto::stats::graphs_response::Buttons; + use super::GraphsContext; -use crate::pb::stats::graphs_response::buttons::ButtonCounts; -use crate::pb::stats::graphs_response::Buttons; use crate::revlog::RevlogEntry; use crate::revlog::RevlogReviewKind; @@ -32,12 +33,12 @@ impl GraphsContext { let Some(interval_bucket) = interval_bucket(review) else { continue; }; let Some(button_idx) = button_index(review.button_chosen) else { continue; }; let review_secs = review.id.as_secs(); - all_time.increment(interval_bucket, button_idx); + increment_button_counts(&mut all_time, interval_bucket, button_idx); for (stamp, bucket) in &mut conditional_buckets { if &review_secs < stamp { continue 'outer; } - bucket.increment(interval_bucket, button_idx); + increment_button_counts(bucket, interval_bucket, button_idx); } } Buttons { @@ -56,13 +57,11 @@ enum IntervalBucket { Mature, } -impl ButtonCounts { - fn increment(&mut self, bucket: IntervalBucket, button_idx: usize) { - match bucket { - IntervalBucket::Learning => self.learning[button_idx] += 1, - IntervalBucket::Young => self.young[button_idx] += 1, - IntervalBucket::Mature => self.mature[button_idx] += 1, - } +fn increment_button_counts(counts: &mut ButtonCounts, bucket: IntervalBucket, button_idx: usize) { + match bucket { + IntervalBucket::Learning => counts.learning[button_idx] += 1, + IntervalBucket::Young => counts.young[button_idx] += 1, + IntervalBucket::Mature => counts.mature[button_idx] += 1, } } diff --git a/rslib/src/stats/graphs/card_counts.rs b/rslib/src/stats/graphs/card_counts.rs index 84f5c931a..0855d5776 100644 --- a/rslib/src/stats/graphs/card_counts.rs +++ b/rslib/src/stats/graphs/card_counts.rs @@ -1,11 +1,12 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::stats::graphs_response::card_counts::Counts; +use anki_proto::stats::graphs_response::CardCounts; + use crate::card::Card; use crate::card::CardQueue; use crate::card::CardType; -use crate::pb::stats::graphs_response::card_counts::Counts; -use crate::pb::stats::graphs_response::CardCounts; use crate::stats::graphs::GraphsContext; impl GraphsContext { @@ -20,9 +21,9 @@ impl GraphsContext { CardQueue::SchedBuried | CardQueue::UserBuried => { excluding_inactive.buried += 1; } - _ => excluding_inactive.increment(card), + _ => increment_counts(&mut excluding_inactive, card), }; - including_inactive.increment(card); + increment_counts(&mut including_inactive, card); } CardCounts { excluding_inactive: Some(excluding_inactive), @@ -31,25 +32,23 @@ impl GraphsContext { } } -impl Counts { - fn increment(&mut self, card: &Card) { - match card.ctype { - CardType::New => { - self.new_cards += 1; - } - CardType::Learn => { - self.learn += 1; - } - CardType::Review => { - if card.interval < 21 { - self.young += 1; - } else { - self.mature += 1; - } - } - CardType::Relearn => { - self.relearn += 1; +fn increment_counts(counts: &mut Counts, card: &Card) { + match card.ctype { + CardType::New => { + counts.new_cards += 1; + } + CardType::Learn => { + counts.learn += 1; + } + CardType::Review => { + if card.interval < 21 { + counts.young += 1; + } else { + counts.mature += 1; } } + CardType::Relearn => { + counts.relearn += 1; + } } } diff --git a/rslib/src/stats/graphs/eases.rs b/rslib/src/stats/graphs/eases.rs index 22453fe98..f4040e703 100644 --- a/rslib/src/stats/graphs/eases.rs +++ b/rslib/src/stats/graphs/eases.rs @@ -1,8 +1,9 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::stats::graphs_response::Eases; + use crate::card::CardType; -use crate::pb::stats::graphs_response::Eases; use crate::stats::graphs::GraphsContext; impl GraphsContext { diff --git a/rslib/src/stats/graphs/future_due.rs b/rslib/src/stats/graphs/future_due.rs index 4b152448e..ff92cd366 100644 --- a/rslib/src/stats/graphs/future_due.rs +++ b/rslib/src/stats/graphs/future_due.rs @@ -3,8 +3,9 @@ use std::collections::HashMap; +use anki_proto::stats::graphs_response::FutureDue; + use super::GraphsContext; -use crate::pb::stats::graphs_response::FutureDue; impl GraphsContext { pub(super) fn future_due(&self) -> FutureDue { diff --git a/rslib/src/stats/graphs/hours.rs b/rslib/src/stats/graphs/hours.rs index 8b1c46af7..b24e5082d 100644 --- a/rslib/src/stats/graphs/hours.rs +++ b/rslib/src/stats/graphs/hours.rs @@ -1,8 +1,9 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::pb::stats::graphs_response::hours::Hour; -use crate::pb::stats::graphs_response::Hours; +use anki_proto::stats::graphs_response::hours::Hour; +use anki_proto::stats::graphs_response::Hours; + use crate::revlog::RevlogReviewKind; use crate::stats::graphs::GraphsContext; @@ -38,23 +39,21 @@ impl GraphsContext { let review_secs = review.id.as_secs(); let hour = (((review_secs.0 + self.local_offset_secs) / 3600) % 24) as usize; let correct = review.button_chosen > 1; - data.all_time[hour].increment(correct); + increment_count_for_hour(&mut data.all_time[hour], correct); for (stamp, bucket) in &mut conditional_buckets { if &review_secs < stamp { continue 'outer; } - bucket[hour].increment(correct) + increment_count_for_hour(&mut bucket[hour], correct); } } data } } -impl Hour { - pub(crate) fn increment(&mut self, correct: bool) { - self.total += 1; - if correct { - self.correct += 1; - } +pub(crate) fn increment_count_for_hour(hour: &mut Hour, correct: bool) { + hour.total += 1; + if correct { + hour.correct += 1; } } diff --git a/rslib/src/stats/graphs/intervals.rs b/rslib/src/stats/graphs/intervals.rs index 4534044bb..f69c33052 100644 --- a/rslib/src/stats/graphs/intervals.rs +++ b/rslib/src/stats/graphs/intervals.rs @@ -1,8 +1,9 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::stats::graphs_response::Intervals; + use crate::card::CardType; -use crate::pb::stats::graphs_response::Intervals; use crate::stats::graphs::GraphsContext; impl GraphsContext { diff --git a/rslib/src/stats/graphs/mod.rs b/rslib/src/stats/graphs/mod.rs index a25ad9e8a..f900fd633 100644 --- a/rslib/src/stats/graphs/mod.rs +++ b/rslib/src/stats/graphs/mod.rs @@ -13,7 +13,6 @@ mod today; use crate::config::BoolKey; use crate::config::Weekday; -use crate::pb; use crate::prelude::*; use crate::revlog::RevlogEntry; use crate::search::SortMode; @@ -31,13 +30,13 @@ impl Collection { &mut self, search: &str, days: u32, - ) -> Result { + ) -> Result { let guard = self.search_cards_into_table(search, SortMode::NoOrder)?; let all = search.trim().is_empty(); guard.col.graph_data(all, days) } - fn graph_data(&mut self, all: bool, days: u32) -> Result { + fn graph_data(&mut self, all: bool, days: u32) -> Result { let timing = self.timing_today()?; let revlog_start = if days > 0 { timing @@ -61,7 +60,7 @@ impl Collection { next_day_start: timing.next_day_at, local_offset_secs, }; - let resp = pb::stats::GraphsResponse { + let resp = anki_proto::stats::GraphsResponse { added: Some(ctx.added_days()), reviews: Some(ctx.review_counts_and_times()), future_due: Some(ctx.future_due()), @@ -76,8 +75,8 @@ impl Collection { Ok(resp) } - pub(crate) fn get_graph_preferences(&self) -> pb::stats::GraphPreferences { - pb::stats::GraphPreferences { + pub(crate) fn get_graph_preferences(&self) -> anki_proto::stats::GraphPreferences { + anki_proto::stats::GraphPreferences { calendar_first_day_of_week: self.get_first_day_of_week() as i32, card_counts_separate_inactive: self .get_config_bool(BoolKey::CardCountsSeparateInactive), @@ -88,7 +87,7 @@ impl Collection { pub(crate) fn set_graph_preferences( &mut self, - prefs: pb::stats::GraphPreferences, + prefs: anki_proto::stats::GraphPreferences, ) -> Result<()> { self.set_first_day_of_week(match prefs.calendar_first_day_of_week { 1 => Weekday::Monday, diff --git a/rslib/src/stats/graphs/reviews.rs b/rslib/src/stats/graphs/reviews.rs index 808b101e2..9db4b9e5d 100644 --- a/rslib/src/stats/graphs/reviews.rs +++ b/rslib/src/stats/graphs/reviews.rs @@ -1,8 +1,9 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::stats::graphs_response::ReviewCountsAndTimes; + use super::GraphsContext; -use crate::pb::stats::graphs_response::ReviewCountsAndTimes; use crate::revlog::RevlogReviewKind; impl GraphsContext { diff --git a/rslib/src/stats/graphs/today.rs b/rslib/src/stats/graphs/today.rs index d4c9e96e2..e2544509a 100644 --- a/rslib/src/stats/graphs/today.rs +++ b/rslib/src/stats/graphs/today.rs @@ -1,7 +1,8 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::pb::stats::graphs_response::Today; +use anki_proto::stats::graphs_response::Today; + use crate::revlog::RevlogReviewKind; use crate::stats::graphs::GraphsContext; diff --git a/rslib/src/stats/today.rs b/rslib/src/stats/today.rs index d7f82fbf2..f856ce271 100644 --- a/rslib/src/stats/today.rs +++ b/rslib/src/stats/today.rs @@ -1,7 +1,8 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::i18n::I18n; +use anki_i18n::I18n; + use crate::prelude::*; use crate::scheduler::timespan::Timespan; @@ -28,8 +29,9 @@ impl Collection { #[cfg(test)] mod test { + use anki_i18n::I18n; + use super::studied_today; - use crate::i18n::I18n; #[test] fn today() { diff --git a/rslib/src/storage/card/data.rs b/rslib/src/storage/card/data.rs index d917c5c07..f66ffa699 100644 --- a/rslib/src/storage/card/data.rs +++ b/rslib/src/storage/card/data.rs @@ -8,8 +8,8 @@ use rusqlite::types::FromSqlError; use rusqlite::types::ToSqlOutput; use rusqlite::types::ValueRef; use rusqlite::ToSql; -use serde_derive::Deserialize; -use serde_derive::Serialize; +use serde::Deserialize; +use serde::Serialize; use serde_json::Value; use crate::prelude::*; diff --git a/rslib/src/storage/card/mod.rs b/rslib/src/storage/card/mod.rs index 1e3dbe038..3dd981b98 100644 --- a/rslib/src/storage/card/mod.rs +++ b/rslib/src/storage/card/mod.rs @@ -764,8 +764,9 @@ impl NewCardSorting { mod test { use std::path::Path; + use anki_i18n::I18n; + use crate::card::Card; - use crate::i18n::I18n; use crate::storage::SqliteStorage; #[test] diff --git a/rslib/src/storage/deckconfig/mod.rs b/rslib/src/storage/deckconfig/mod.rs index 82f84443b..bfa4529bb 100644 --- a/rslib/src/storage/deckconfig/mod.rs +++ b/rslib/src/storage/deckconfig/mod.rs @@ -9,6 +9,7 @@ use rusqlite::Row; use serde_json::Value; use super::SqliteStorage; +use crate::deckconfig::ensure_deck_config_values_valid; use crate::deckconfig::DeckConfSchema11; use crate::deckconfig::DeckConfig; use crate::deckconfig::DeckConfigId; @@ -18,7 +19,7 @@ use crate::prelude::*; fn row_to_deckconf(row: &Row, fix_invalid: bool) -> Result { let mut config = DeckConfigInner::decode(row.get_ref_unwrap(4).as_blob()?)?; if fix_invalid { - config.ensure_values_valid(); + ensure_deck_config_values_valid(&mut config); } Ok(DeckConfig { id: row.get(0)?, diff --git a/rslib/src/sync/collection/protocol.rs b/rslib/src/sync/collection/protocol.rs index 4844f07b9..fbeee3a01 100644 --- a/rslib/src/sync/collection/protocol.rs +++ b/rslib/src/sync/collection/protocol.rs @@ -5,8 +5,8 @@ use std::marker::PhantomData; use ammonia::Url; use async_trait::async_trait; -use serde_derive::Deserialize; -use serde_derive::Serialize; +use serde::Deserialize; +use serde::Serialize; use strum::IntoStaticStr; use crate::prelude::TimestampMillis; diff --git a/rslib/src/sync/collection/status.rs b/rslib/src/sync/collection/status.rs index 9c9b30a11..b77fbf448 100644 --- a/rslib/src/sync/collection/status.rs +++ b/rslib/src/sync/collection/status.rs @@ -1,10 +1,10 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::sync::sync_status_response; use tracing::debug; use crate::error::SyncErrorKind; -use crate::pb::sync::sync_status_response; use crate::prelude::*; use crate::sync::collection::meta::SyncMeta; use crate::sync::collection::normal::ClientSyncState; diff --git a/rslib/src/sync/login.rs b/rslib/src/sync/login.rs index 1d51782be..a8805bc1b 100644 --- a/rslib/src/sync/login.rs +++ b/rslib/src/sync/login.rs @@ -2,8 +2,8 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use reqwest::Url; -use serde_derive::Deserialize; -use serde_derive::Serialize; +use serde::Deserialize; +use serde::Serialize; use crate::prelude::*; use crate::sync::collection::protocol::SyncProtocol; @@ -35,7 +35,7 @@ pub async fn sync_login>( password: S, endpoint: Option, ) -> Result { - let auth = crate::pb::sync::SyncAuth { + let auth = anki_proto::sync::SyncAuth { endpoint, ..Default::default() } diff --git a/rslib/src/sync/media/begin.rs b/rslib/src/sync/media/begin.rs index e76492520..976c5b55a 100644 --- a/rslib/src/sync/media/begin.rs +++ b/rslib/src/sync/media/begin.rs @@ -1,8 +1,8 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use serde_derive::Deserialize; -use serde_derive::Serialize; +use serde::Deserialize; +use serde::Serialize; use crate::prelude::*; diff --git a/rslib/src/sync/media/changes.rs b/rslib/src/sync/media/changes.rs index ca3c5451c..5f94e8553 100644 --- a/rslib/src/sync/media/changes.rs +++ b/rslib/src/sync/media/changes.rs @@ -1,8 +1,8 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use serde_derive::Deserialize; -use serde_derive::Serialize; +use serde::Deserialize; +use serde::Serialize; use serde_tuple::Serialize_tuple; use tracing::debug; diff --git a/rslib/src/sync/media/protocol.rs b/rslib/src/sync/media/protocol.rs index a9733ceb9..f68c9e8c6 100644 --- a/rslib/src/sync/media/protocol.rs +++ b/rslib/src/sync/media/protocol.rs @@ -4,8 +4,8 @@ use async_trait::async_trait; use reqwest::Url; use serde::de::DeserializeOwned; -use serde_derive::Deserialize; -use serde_derive::Serialize; +use serde::Deserialize; +use serde::Serialize; use strum::IntoStaticStr; use crate::error; diff --git a/rslib/src/sync/media/sanity.rs b/rslib/src/sync/media/sanity.rs index ee6fe0b3c..d09ee38ec 100644 --- a/rslib/src/sync/media/sanity.rs +++ b/rslib/src/sync/media/sanity.rs @@ -1,8 +1,8 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use serde_derive::Deserialize; -use serde_derive::Serialize; +use serde::Deserialize; +use serde::Serialize; #[derive(Serialize, Deserialize)] pub struct SanityCheckRequest { diff --git a/rslib/src/sync/media/upload.rs b/rslib/src/sync/media/upload.rs index 499a2c066..1abef5d85 100644 --- a/rslib/src/sync/media/upload.rs +++ b/rslib/src/sync/media/upload.rs @@ -4,7 +4,7 @@ use std::borrow::Cow; use std::path::Path; -use serde_derive::Deserialize; +use serde::Deserialize; use serde_tuple::Serialize_tuple; use tracing::debug; diff --git a/rslib/src/sync/request/header_and_stream.rs b/rslib/src/sync/request/header_and_stream.rs index a4e0dd81d..07878ece2 100644 --- a/rslib/src/sync/request/header_and_stream.rs +++ b/rslib/src/sync/request/header_and_stream.rs @@ -16,8 +16,8 @@ use bytes::Bytes; use futures::Stream; use futures::TryStreamExt; use serde::de::DeserializeOwned; -use serde_derive::Deserialize; -use serde_derive::Serialize; +use serde::Deserialize; +use serde::Serialize; use tokio::io::AsyncReadExt; use tokio_util::io::ReaderStream; diff --git a/rslib/src/sync/version.rs b/rslib/src/sync/version.rs index ff67b5e17..80098dcff 100644 --- a/rslib/src/sync/version.rs +++ b/rslib/src/sync/version.rs @@ -1,8 +1,8 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use serde_derive::Deserialize; -use serde_derive::Serialize; +use serde::Deserialize; +use serde::Serialize; use crate::storage::SchemaVersion; use crate::sync::error::HttpResult; diff --git a/rslib/src/tags/tree.rs b/rslib/src/tags/tree.rs index 0efeed63b..f989cb39d 100644 --- a/rslib/src/tags/tree.rs +++ b/rslib/src/tags/tree.rs @@ -4,11 +4,11 @@ use std::collections::HashSet; use std::iter::Peekable; +use anki_proto::tags::TagTreeNode; use unicase::UniCase; use super::immediate_parent_name_unicase; use super::Tag; -use crate::pb::tags::TagTreeNode; use crate::prelude::*; impl Collection { diff --git a/rslib/src/template.rs b/rslib/src/template.rs index a3acfa300..0b4705253 100644 --- a/rslib/src/template.rs +++ b/rslib/src/template.rs @@ -7,6 +7,7 @@ use std::collections::HashSet; use std::fmt::Write; use std::iter; +use anki_i18n::I18n; use lazy_static::lazy_static; use nom::branch::alt; use nom::bytes::complete::tag; @@ -21,7 +22,6 @@ use crate::cloze::add_cloze_numbers_in_string; use crate::error::AnkiError; use crate::error::Result; use crate::error::TemplateError; -use crate::i18n::I18n; use crate::template_filters::apply_filters; pub type FieldMap<'a> = HashMap<&'a str, u16>; @@ -846,11 +846,12 @@ fn is_cloze_conditional(key: &str) -> bool { mod test { use std::collections::HashMap; + use anki_i18n::I18n; + use super::FieldMap; use super::ParsedNode::*; use super::ParsedTemplate as PT; use crate::error::TemplateError; - use crate::i18n::I18n; use crate::template::field_is_empty; use crate::template::nonempty_fields; use crate::template::FieldRequirements;