diff --git a/Cargo.lock b/Cargo.lock index ae2b35897..c681ec5a0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2583,9 +2583,9 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.11.3" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e330bf1316db56b12c2bcfa399e8edddd4821965ea25ddb2c134b610b1c1c604" +checksum = "276470f7f281b0ed53d2ae42dd52b4a8d08853a3c70e7fe95882acbb98a6ae94" dependencies = [ "bytes", "heck", diff --git a/build/configure/src/python.rs b/build/configure/src/python.rs index 132112e78..32bc9f905 100644 --- a/build/configure/src/python.rs +++ b/build/configure/src/python.rs @@ -160,7 +160,7 @@ impl BuildAction for GenPythonProto { -Iproto $in" } - fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) { + fn files(&mut self, build: &mut impl FilesHandle) { let proto_inputs = build.expand_inputs(&self.proto_files); let python_outputs: Vec<_> = proto_inputs .iter() diff --git a/build/runner/src/bundle/folder.rs b/build/runner/src/bundle/folder.rs index ef32627ff..1600981b2 100644 --- a/build/runner/src/bundle/folder.rs +++ b/build/runner/src/bundle/folder.rs @@ -94,7 +94,7 @@ fn copy_windows_extras(folder_root: &Utf8Path) { fn clean_top_level_files(folder_root: &Utf8Path) { let mut to_remove = vec![]; - for entry in std::fs::read_dir(folder_root).unwrap() { + for entry in fs::read_dir(folder_root).unwrap() { let entry = entry.unwrap(); if entry.file_name() == "lib" { continue; @@ -104,9 +104,9 @@ fn clean_top_level_files(folder_root: &Utf8Path) { } for path in to_remove { if path.is_dir() { - std::fs::remove_dir_all(path).unwrap() + fs::remove_dir_all(path).unwrap() } else { - std::fs::remove_file(path).unwrap() + fs::remove_file(path).unwrap() } } } diff --git a/docs/editing.md b/docs/editing.md index 3c905a8e0..ba3fd6fce 100644 --- a/docs/editing.md +++ b/docs/editing.md @@ -49,12 +49,6 @@ see and install a number of recommended extensions. If you decide to use PyCharm instead of VS Code, there are somethings to be aware of. -### Slowdowns - -Excluding the node_modules folder inside the editor may improve performance: - -https://intellij-support.jetbrains.com/hc/en-us/community/posts/115000721750-Excluding-directories-globally - ### Pylib References You'll need to use File>Project Structure to tell IntelliJ that pylib/ is a diff --git a/qt/bundle/mac/src/main.rs b/qt/bundle/mac/src/main.rs index 350e9ba29..d5f89ff35 100644 --- a/qt/bundle/mac/src/main.rs +++ b/qt/bundle/mac/src/main.rs @@ -90,7 +90,7 @@ enum Commands { BuildDmgs(BuildDmgsArgs), } -fn main() -> anyhow::Result<()> { +fn main() -> Result<()> { match Cli::parse().command { Commands::BuildApp { version, @@ -215,7 +215,7 @@ fn fix_rpath(exe_path: Utf8PathBuf) -> Result<()> { fn get_plist(anki_version: &str) -> plist::Dictionary { let reader = std::io::Cursor::new(include_bytes!("Info.plist")); - let mut plist = plist::Value::from_reader(reader) + let mut plist = Value::from_reader(reader) .unwrap() .into_dictionary() .unwrap(); diff --git a/qt/bundle/win/src/main.rs b/qt/bundle/win/src/main.rs index feb979d57..777a86d68 100644 --- a/qt/bundle/win/src/main.rs +++ b/qt/bundle/win/src/main.rs @@ -17,7 +17,7 @@ struct Args { qt5_setup_path: Utf8PathBuf, } -fn main() -> anyhow::Result<()> { +fn main() -> Result<()> { let args = Args::parse(); let src_win_folder = Utf8Path::new("qt/bundle/win"); diff --git a/rslib/Cargo.toml b/rslib/Cargo.toml index 1513827f1..92896b7f2 100644 --- a/rslib/Cargo.toml +++ b/rslib/Cargo.toml @@ -27,7 +27,7 @@ required-features = ["bench"] # After updating anything below, run ../cargo/update.py [build-dependencies] -prost-build = "0.11.3" +prost-build = "0.11.4" which = "4.3.0" [dev-dependencies] diff --git a/rslib/i18n/build/check.rs b/rslib/i18n/build/check.rs index 4d4afe773..c453c8ebc 100644 --- a/rslib/i18n/build/check.rs +++ b/rslib/i18n/build/check.rs @@ -3,10 +3,11 @@ //! Check the .ftl files at build time to ensure we don't get runtime load failures. -use super::gather::TranslationsByLang; use fluent::{FluentBundle, FluentResource}; use unic_langid::LanguageIdentifier; +use super::gather::TranslationsByLang; + pub fn check(lang_map: &TranslationsByLang) { for (lang, files_map) in lang_map { for (fname, content) in files_map { diff --git a/rslib/i18n/src/lib.rs b/rslib/i18n/src/lib.rs index b89152056..0650187b0 100644 --- a/rslib/i18n/src/lib.rs +++ b/rslib/i18n/src/lib.rs @@ -362,24 +362,21 @@ fn want_comma_as_decimal_separator(langs: &[LanguageIdentifier]) -> bool { } fn format_decimal_with_comma( - val: &fluent::FluentValue, + val: &FluentValue, _intl: &intl_memoizer::concurrent::IntlLangMemoizer, ) -> Option { format_number_values(val, Some(",")) } fn format_decimal_with_period( - val: &fluent::FluentValue, + val: &FluentValue, _intl: &intl_memoizer::concurrent::IntlLangMemoizer, ) -> Option { format_number_values(val, None) } #[inline] -fn format_number_values( - val: &fluent::FluentValue, - alt_separator: Option<&'static str>, -) -> Option { +fn format_number_values(val: &FluentValue, alt_separator: Option<&'static str>) -> Option { match val { FluentValue::Number(num) => { // create a string with desired maximum digits diff --git a/rslib/i18n_helpers/src/serialize.rs b/rslib/i18n_helpers/src/serialize.rs index f05479a4e..a0e5d8021 100644 --- a/rslib/i18n_helpers/src/serialize.rs +++ b/rslib/i18n_helpers/src/serialize.rs @@ -3,9 +3,10 @@ // copied from https://github.com/projectfluent/fluent-rs/pull/241 -use fluent_syntax::{ast::*, parser::Slice}; use std::fmt::{self, Error, Write}; +use fluent_syntax::{ast::*, parser::Slice}; + pub fn serialize<'s, S: Slice<'s>>(resource: &Resource) -> String { serialize_with_options(resource, Options::default()) } diff --git a/rslib/src/backend/adding.rs b/rslib/src/backend/adding.rs index 2f81c056b..515e1e79c 100644 --- a/rslib/src/backend/adding.rs +++ b/rslib/src/backend/adding.rs @@ -1,7 +1,7 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use crate::{adding::DeckAndNotetype, pb::DeckAndNotetype as DeckAndNotetypeProto}; +use crate::{adding::DeckAndNotetype, pb::notes::DeckAndNotetype as DeckAndNotetypeProto}; impl From for DeckAndNotetypeProto { fn from(s: DeckAndNotetype) -> Self { diff --git a/rslib/src/backend/card.rs b/rslib/src/backend/card.rs index 7736985f5..324691d27 100644 --- a/rslib/src/backend/card.rs +++ b/rslib/src/backend/card.rs @@ -2,7 +2,7 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use super::Backend; -pub(super) use crate::pb::cards_service::Service as CardsService; +pub(super) use crate::pb::cards::cards_service::Service as CardsService; use crate::{ card::{CardQueue, CardType}, pb, @@ -10,7 +10,7 @@ use crate::{ }; impl CardsService for Backend { - fn get_card(&self, input: pb::CardId) -> Result { + fn get_card(&self, input: pb::cards::CardId) -> Result { let cid = input.into(); self.with_col(|col| { col.storage @@ -20,7 +20,10 @@ impl CardsService for Backend { }) } - fn update_cards(&self, input: pb::UpdateCardsRequest) -> Result { + fn update_cards( + &self, + input: pb::cards::UpdateCardsRequest, + ) -> Result { self.with_col(|col| { let cards = input .cards @@ -35,7 +38,7 @@ impl CardsService for Backend { .map(Into::into) } - fn remove_cards(&self, input: pb::RemoveCardsRequest) -> Result { + fn remove_cards(&self, input: pb::cards::RemoveCardsRequest) -> Result { self.with_col(|col| { col.transact_no_undo(|col| { col.remove_cards_and_orphaned_notes( @@ -50,13 +53,19 @@ impl CardsService for Backend { }) } - fn set_deck(&self, input: pb::SetDeckRequest) -> Result { + fn set_deck( + &self, + input: pb::cards::SetDeckRequest, + ) -> Result { let cids: Vec<_> = input.card_ids.into_iter().map(CardId).collect(); let deck_id = input.deck_id.into(); self.with_col(|col| col.set_deck(&cids, deck_id).map(Into::into)) } - fn set_flag(&self, input: pb::SetFlagRequest) -> Result { + fn set_flag( + &self, + input: pb::cards::SetFlagRequest, + ) -> Result { self.with_col(|col| { col.set_card_flag(&to_card_ids(input.card_ids), input.flag) .map(Into::into) @@ -64,10 +73,10 @@ impl CardsService for Backend { } } -impl TryFrom for Card { +impl TryFrom for Card { type Error = AnkiError; - fn try_from(c: pb::Card) -> Result { + fn try_from(c: pb::cards::Card) -> Result { let ctype = CardType::try_from(c.ctype as u8).or_invalid("invalid card type")?; let queue = CardQueue::try_from(c.queue as i8).or_invalid("invalid card queue")?; Ok(Card { @@ -94,9 +103,9 @@ impl TryFrom for Card { } } -impl From for pb::Card { +impl From for pb::cards::Card { fn from(c: Card) -> Self { - pb::Card { + pb::cards::Card { id: c.id.0, note_id: c.note_id.0, deck_id: c.deck_id.0, diff --git a/rslib/src/backend/cardrendering.rs b/rslib/src/backend/cardrendering.rs index 252be16cc..2cf51e1ae 100644 --- a/rslib/src/backend/cardrendering.rs +++ b/rslib/src/backend/cardrendering.rs @@ -2,7 +2,7 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use super::Backend; -pub(super) use crate::pb::cardrendering_service::Service as CardRenderingService; +pub(super) use crate::pb::card_rendering::cardrendering_service::Service as CardRenderingService; use crate::{ card_rendering::{extract_av_tags, strip_av_tags}, latex::{extract_latex, extract_latex_expanding_clozes, ExtractedLatex}, @@ -21,16 +21,19 @@ use crate::{ impl CardRenderingService for Backend { fn extract_av_tags( &self, - input: pb::ExtractAvTagsRequest, - ) -> Result { + input: pb::card_rendering::ExtractAvTagsRequest, + ) -> Result { let out = extract_av_tags(input.text, input.question_side, self.i18n()); - Ok(pb::ExtractAvTagsResponse { + Ok(pb::card_rendering::ExtractAvTagsResponse { text: out.0, av_tags: out.1, }) } - fn extract_latex(&self, input: pb::ExtractLatexRequest) -> Result { + fn extract_latex( + &self, + input: pb::card_rendering::ExtractLatexRequest, + ) -> Result { let func = if input.expand_clozes { extract_latex_expanding_clozes } else { @@ -38,11 +41,11 @@ impl CardRenderingService for Backend { }; let (text, extracted) = func(&input.text, input.svg); - Ok(pb::ExtractLatexResponse { + Ok(pb::card_rendering::ExtractLatexResponse { text, latex: extracted .into_iter() - .map(|e: ExtractedLatex| pb::ExtractedLatex { + .map(|e: ExtractedLatex| pb::card_rendering::ExtractedLatex { filename: e.fname, latex_body: e.latex, }) @@ -50,7 +53,10 @@ impl CardRenderingService for Backend { }) } - fn get_empty_cards(&self, _input: pb::Empty) -> Result { + fn get_empty_cards( + &self, + _input: pb::generic::Empty, + ) -> Result { self.with_col(|col| { let mut empty = col.empty_cards()?; let report = col.empty_cards_report(&mut empty)?; @@ -58,14 +64,14 @@ impl CardRenderingService for Backend { let mut outnotes = vec![]; for (_ntid, notes) in empty { outnotes.extend(notes.into_iter().map(|e| { - pb::empty_cards_report::NoteWithEmptyCards { + pb::card_rendering::empty_cards_report::NoteWithEmptyCards { note_id: e.nid.0, will_delete_note: e.empty.len() == e.current_count, card_ids: e.empty.into_iter().map(|(_ord, id)| id.0).collect(), } })) } - Ok(pb::EmptyCardsReport { + Ok(pb::card_rendering::EmptyCardsReport { report, notes: outnotes, }) @@ -74,8 +80,8 @@ impl CardRenderingService for Backend { fn render_existing_card( &self, - input: pb::RenderExistingCardRequest, - ) -> Result { + input: pb::card_rendering::RenderExistingCardRequest, + ) -> Result { self.with_col(|col| { col.render_existing_card(CardId(input.card_id), input.browser) .map(Into::into) @@ -84,8 +90,8 @@ impl CardRenderingService for Backend { fn render_uncommitted_card( &self, - input: pb::RenderUncommittedCardRequest, - ) -> Result { + input: pb::card_rendering::RenderUncommittedCardRequest, + ) -> Result { let template = input.template.or_invalid("missing template")?.into(); let mut note = input.note.or_invalid("missing note")?.into(); let ord = input.card_ord as u16; @@ -98,8 +104,8 @@ impl CardRenderingService for Backend { fn render_uncommitted_card_legacy( &self, - input: pb::RenderUncommittedCardLegacyRequest, - ) -> Result { + input: pb::card_rendering::RenderUncommittedCardLegacyRequest, + ) -> Result { let schema11: CardTemplateSchema11 = serde_json::from_slice(&input.template)?; let template = schema11.into(); let mut note = input.note.or_invalid("missing note")?.into(); @@ -111,11 +117,14 @@ impl CardRenderingService for Backend { }) } - fn strip_av_tags(&self, input: pb::String) -> Result { + fn strip_av_tags(&self, input: pb::generic::String) -> Result { Ok(strip_av_tags(input.val).into()) } - fn render_markdown(&self, input: pb::RenderMarkdownRequest) -> Result { + fn render_markdown( + &self, + input: pb::card_rendering::RenderMarkdownRequest, + ) -> Result { let mut text = render_markdown(&input.markdown); if input.sanitize { // currently no images @@ -124,18 +133,21 @@ impl CardRenderingService for Backend { Ok(text.into()) } - fn encode_iri_paths(&self, input: pb::String) -> Result { + fn encode_iri_paths(&self, input: pb::generic::String) -> Result { Ok(encode_iri_paths(&input.val).to_string().into()) } - fn decode_iri_paths(&self, input: pb::String) -> Result { + fn decode_iri_paths(&self, input: pb::generic::String) -> Result { Ok(decode_iri_paths(&input.val).to_string().into()) } - fn strip_html(&self, input: pb::StripHtmlRequest) -> Result { + fn strip_html( + &self, + input: pb::card_rendering::StripHtmlRequest, + ) -> Result { Ok(match input.mode() { - pb::strip_html_request::Mode::Normal => strip_html(&input.text), - pb::strip_html_request::Mode::PreserveMediaFilenames => { + pb::card_rendering::strip_html_request::Mode::Normal => strip_html(&input.text), + pb::card_rendering::strip_html_request::Mode::PreserveMediaFilenames => { strip_html_preserving_media_filenames(&input.text) } } @@ -143,38 +155,47 @@ impl CardRenderingService for Backend { .into()) } - fn compare_answer(&self, input: pb::CompareAnswerRequest) -> Result { + fn compare_answer( + &self, + input: pb::card_rendering::CompareAnswerRequest, + ) -> Result { Ok(compare_answer(&input.expected, &input.provided).into()) } } -fn rendered_nodes_to_proto(nodes: Vec) -> Vec { +fn rendered_nodes_to_proto( + nodes: Vec, +) -> Vec { nodes .into_iter() - .map(|n| pb::RenderedTemplateNode { + .map(|n| pb::card_rendering::RenderedTemplateNode { value: Some(rendered_node_to_proto(n)), }) .collect() } -fn rendered_node_to_proto(node: RenderedNode) -> pb::rendered_template_node::Value { +fn rendered_node_to_proto(node: RenderedNode) -> pb::card_rendering::rendered_template_node::Value { match node { - RenderedNode::Text { text } => pb::rendered_template_node::Value::Text(text), + RenderedNode::Text { text } => { + pb::card_rendering::rendered_template_node::Value::Text(text) + } RenderedNode::Replacement { field_name, current_text, filters, - } => pb::rendered_template_node::Value::Replacement(pb::RenderedTemplateReplacement { - field_name, - current_text, - filters, - }), + } => pb::card_rendering::rendered_template_node::Value::Replacement( + pb::card_rendering::RenderedTemplateReplacement { + field_name, + current_text, + filters, + }, + ), } } -impl From for pb::RenderCardResponse { +impl From for pb::card_rendering::RenderCardResponse { fn from(o: RenderCardOutput) -> Self { - pb::RenderCardResponse { + pb::card_rendering::RenderCardResponse { question_nodes: rendered_nodes_to_proto(o.qnodes), answer_nodes: rendered_nodes_to_proto(o.anodes), css: o.css, diff --git a/rslib/src/backend/collection.rs b/rslib/src/backend/collection.rs index 201714e4d..16a49c669 100644 --- a/rslib/src/backend/collection.rs +++ b/rslib/src/backend/collection.rs @@ -6,7 +6,7 @@ use std::sync::MutexGuard; use slog::error; use super::{progress::Progress, Backend}; -pub(super) use crate::pb::collection_service::Service as CollectionService; +pub(super) use crate::pb::collection::collection_service::Service as CollectionService; use crate::{ backend::progress::progress_to_proto, collection::CollectionBuilder, @@ -16,17 +16,20 @@ use crate::{ }; impl CollectionService for Backend { - fn latest_progress(&self, _input: pb::Empty) -> Result { + fn latest_progress(&self, _input: pb::generic::Empty) -> Result { let progress = self.progress_state.lock().unwrap().last_progress; Ok(progress_to_proto(progress, &self.tr)) } - fn set_wants_abort(&self, _input: pb::Empty) -> Result { + fn set_wants_abort(&self, _input: pb::generic::Empty) -> Result { self.progress_state.lock().unwrap().want_abort = true; Ok(().into()) } - fn open_collection(&self, input: pb::OpenCollectionRequest) -> Result { + fn open_collection( + &self, + input: pb::collection::OpenCollectionRequest, + ) -> Result { let mut guard = self.lock_closed_collection()?; let mut builder = CollectionBuilder::new(input.collection_path); @@ -45,7 +48,10 @@ impl CollectionService for Backend { Ok(().into()) } - fn close_collection(&self, input: pb::CloseCollectionRequest) -> Result { + fn close_collection( + &self, + input: pb::collection::CloseCollectionRequest, + ) -> Result { let desired_version = if input.downgrade_to_schema11 { Some(SchemaVersion::V11) } else { @@ -63,42 +69,48 @@ impl CollectionService for Backend { Ok(().into()) } - fn check_database(&self, _input: pb::Empty) -> Result { + fn check_database( + &self, + _input: pb::generic::Empty, + ) -> Result { let mut handler = self.new_progress_handler(); let progress_fn = move |progress, throttle| { handler.update(Progress::DatabaseCheck(progress), throttle); }; self.with_col(|col| { col.check_database(progress_fn) - .map(|problems| pb::CheckDatabaseResponse { + .map(|problems| pb::collection::CheckDatabaseResponse { problems: problems.to_i18n_strings(&col.tr), }) }) } - fn get_undo_status(&self, _input: pb::Empty) -> Result { + fn get_undo_status(&self, _input: pb::generic::Empty) -> Result { self.with_col(|col| Ok(col.undo_status().into_protobuf(&col.tr))) } - fn undo(&self, _input: pb::Empty) -> Result { + fn undo(&self, _input: pb::generic::Empty) -> Result { self.with_col(|col| col.undo().map(|out| out.into_protobuf(&col.tr))) } - fn redo(&self, _input: pb::Empty) -> Result { + fn redo(&self, _input: pb::generic::Empty) -> Result { self.with_col(|col| col.redo().map(|out| out.into_protobuf(&col.tr))) } - fn add_custom_undo_entry(&self, input: pb::String) -> Result { + fn add_custom_undo_entry(&self, input: pb::generic::String) -> Result { self.with_col(|col| Ok(col.add_custom_undo_step(input.val).into())) } - fn merge_undo_entries(&self, input: pb::UInt32) -> Result { + fn merge_undo_entries(&self, input: pb::generic::UInt32) -> Result { let starting_from = input.val as usize; self.with_col(|col| col.merge_undoable_ops(starting_from)) .map(Into::into) } - fn create_backup(&self, input: pb::CreateBackupRequest) -> Result { + fn create_backup( + &self, + input: pb::collection::CreateBackupRequest, + ) -> Result { // lock collection let mut col_lock = self.lock_open_collection()?; let col = col_lock.as_mut().unwrap(); @@ -122,7 +134,7 @@ impl CollectionService for Backend { Ok(created.into()) } - fn await_backup_completion(&self, _input: pb::Empty) -> Result { + fn await_backup_completion(&self, _input: pb::generic::Empty) -> Result { self.await_backup_completion()?; Ok(().into()) } diff --git a/rslib/src/backend/config.rs b/rslib/src/backend/config.rs index 2b3641768..9b41030bc 100644 --- a/rslib/src/backend/config.rs +++ b/rslib/src/backend/config.rs @@ -4,11 +4,11 @@ use serde_json::Value; use super::Backend; -pub(super) use crate::pb::config_service::Service as ConfigService; +pub(super) use crate::pb::config::config_service::Service as ConfigService; use crate::{ config::{BoolKey, StringKey}, pb, - pb::config_key::{Bool as BoolKeyProto, String as StringKeyProto}, + pb::config::config_key::{Bool as BoolKeyProto, String as StringKeyProto}, prelude::*, }; @@ -54,7 +54,7 @@ impl From for StringKey { } impl ConfigService for Backend { - fn get_config_json(&self, input: pb::String) -> Result { + fn get_config_json(&self, input: pb::generic::String) -> Result { self.with_col(|col| { let val: Option = col.get_config_optional(input.val.as_str()); val.or_not_found(input.val) @@ -63,7 +63,10 @@ impl ConfigService for Backend { }) } - fn set_config_json(&self, input: pb::SetConfigJsonRequest) -> Result { + fn set_config_json( + &self, + input: pb::config::SetConfigJsonRequest, + ) -> Result { self.with_col(|col| { let val: Value = serde_json::from_slice(&input.value_json)?; col.set_config_json(input.key.as_str(), &val, input.undoable) @@ -71,7 +74,10 @@ impl ConfigService for Backend { .map(Into::into) } - fn set_config_json_no_undo(&self, input: pb::SetConfigJsonRequest) -> Result { + fn set_config_json_no_undo( + &self, + input: pb::config::SetConfigJsonRequest, + ) -> Result { self.with_col(|col| { let val: Value = serde_json::from_slice(&input.value_json)?; col.transact_no_undo(|col| col.set_config(input.key.as_str(), &val).map(|_| ())) @@ -79,12 +85,12 @@ impl ConfigService for Backend { .map(Into::into) } - fn remove_config(&self, input: pb::String) -> Result { + fn remove_config(&self, input: pb::generic::String) -> Result { self.with_col(|col| col.remove_config(input.val.as_str())) .map(Into::into) } - fn get_all_config(&self, _input: pb::Empty) -> Result { + fn get_all_config(&self, _input: pb::generic::Empty) -> Result { self.with_col(|col| { let conf = col.storage.get_all_config()?; serde_json::to_vec(&conf).map_err(Into::into) @@ -92,37 +98,49 @@ impl ConfigService for Backend { .map(Into::into) } - fn get_config_bool(&self, input: pb::GetConfigBoolRequest) -> Result { + fn get_config_bool( + &self, + input: pb::config::GetConfigBoolRequest, + ) -> Result { self.with_col(|col| { - Ok(pb::Bool { + Ok(pb::generic::Bool { val: col.get_config_bool(input.key().into()), }) }) } - fn set_config_bool(&self, input: pb::SetConfigBoolRequest) -> Result { + fn set_config_bool( + &self, + input: pb::config::SetConfigBoolRequest, + ) -> Result { self.with_col(|col| col.set_config_bool(input.key().into(), input.value, input.undoable)) .map(Into::into) } - fn get_config_string(&self, input: pb::GetConfigStringRequest) -> Result { + fn get_config_string( + &self, + input: pb::config::GetConfigStringRequest, + ) -> Result { self.with_col(|col| { - Ok(pb::String { + Ok(pb::generic::String { val: col.get_config_string(input.key().into()), }) }) } - fn set_config_string(&self, input: pb::SetConfigStringRequest) -> Result { + fn set_config_string( + &self, + input: pb::config::SetConfigStringRequest, + ) -> Result { self.with_col(|col| col.set_config_string(input.key().into(), &input.value, input.undoable)) .map(Into::into) } - fn get_preferences(&self, _input: pb::Empty) -> Result { + fn get_preferences(&self, _input: pb::generic::Empty) -> Result { self.with_col(|col| col.get_preferences()) } - fn set_preferences(&self, input: pb::Preferences) -> Result { + fn set_preferences(&self, input: pb::config::Preferences) -> Result { self.with_col(|col| col.set_preferences(input)) .map(Into::into) } diff --git a/rslib/src/backend/deckconfig.rs b/rslib/src/backend/deckconfig.rs index 5c3953902..b6e6f8c45 100644 --- a/rslib/src/backend/deckconfig.rs +++ b/rslib/src/backend/deckconfig.rs @@ -2,7 +2,7 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use super::Backend; -pub(super) use crate::pb::deckconfig_service::Service as DeckConfigService; +pub(super) use crate::pb::deckconfig::deckconfig_service::Service as DeckConfigService; use crate::{ deckconfig::{DeckConfSchema11, DeckConfig, UpdateDeckConfigsRequest}, pb, @@ -10,19 +10,22 @@ use crate::{ }; impl DeckConfigService for Backend { - fn add_or_update_deck_config_legacy(&self, input: pb::Json) -> Result { + fn add_or_update_deck_config_legacy( + &self, + input: pb::generic::Json, + ) -> Result { let conf: DeckConfSchema11 = serde_json::from_slice(&input.json)?; let mut conf: DeckConfig = conf.into(); self.with_col(|col| { col.transact_no_undo(|col| { col.add_or_update_deck_config_legacy(&mut conf)?; - Ok(pb::DeckConfigId { dcid: conf.id.0 }) + Ok(pb::deckconfig::DeckConfigId { dcid: conf.id.0 }) }) }) .map(Into::into) } - fn all_deck_config_legacy(&self, _input: pb::Empty) -> Result { + fn all_deck_config_legacy(&self, _input: pb::generic::Empty) -> Result { self.with_col(|col| { let conf: Vec = col .storage @@ -35,11 +38,17 @@ impl DeckConfigService for Backend { .map(Into::into) } - fn get_deck_config(&self, input: pb::DeckConfigId) -> Result { + fn get_deck_config( + &self, + input: pb::deckconfig::DeckConfigId, + ) -> Result { self.with_col(|col| Ok(col.get_deck_config(input.into(), true)?.unwrap().into())) } - fn get_deck_config_legacy(&self, input: pb::DeckConfigId) -> Result { + fn get_deck_config_legacy( + &self, + input: pb::deckconfig::DeckConfigId, + ) -> Result { self.with_col(|col| { let conf = col.get_deck_config(input.into(), true)?.unwrap(); let conf: DeckConfSchema11 = conf.into(); @@ -48,30 +57,39 @@ impl DeckConfigService for Backend { .map(Into::into) } - fn new_deck_config_legacy(&self, _input: pb::Empty) -> Result { + fn new_deck_config_legacy(&self, _input: pb::generic::Empty) -> Result { serde_json::to_vec(&DeckConfSchema11::default()) .map_err(Into::into) .map(Into::into) } - fn remove_deck_config(&self, input: pb::DeckConfigId) -> Result { + fn remove_deck_config( + &self, + input: pb::deckconfig::DeckConfigId, + ) -> Result { self.with_col(|col| col.transact_no_undo(|col| col.remove_deck_config_inner(input.into()))) .map(Into::into) } - fn get_deck_configs_for_update(&self, input: pb::DeckId) -> Result { + fn get_deck_configs_for_update( + &self, + input: pb::decks::DeckId, + ) -> Result { self.with_col(|col| col.get_deck_configs_for_update(input.into())) } - fn update_deck_configs(&self, input: pb::UpdateDeckConfigsRequest) -> Result { + fn update_deck_configs( + &self, + input: pb::deckconfig::UpdateDeckConfigsRequest, + ) -> Result { self.with_col(|col| col.update_deck_configs(input.into())) .map(Into::into) } } -impl From for pb::DeckConfig { +impl From for pb::deckconfig::DeckConfig { fn from(c: DeckConfig) -> Self { - pb::DeckConfig { + pb::deckconfig::DeckConfig { id: c.id.0, name: c.name, mtime_secs: c.mtime_secs.0, @@ -81,8 +99,8 @@ impl From for pb::DeckConfig { } } -impl From for UpdateDeckConfigsRequest { - fn from(c: pb::UpdateDeckConfigsRequest) -> Self { +impl From for UpdateDeckConfigsRequest { + fn from(c: pb::deckconfig::UpdateDeckConfigsRequest) -> Self { UpdateDeckConfigsRequest { target_deck_id: c.target_deck_id.into(), configs: c.configs.into_iter().map(Into::into).collect(), @@ -94,8 +112,8 @@ impl From for UpdateDeckConfigsRequest { } } -impl From for DeckConfig { - fn from(c: pb::DeckConfig) -> Self { +impl From for DeckConfig { + fn from(c: pb::deckconfig::DeckConfig) -> Self { DeckConfig { id: c.id.into(), name: c.name, diff --git a/rslib/src/backend/decks.rs b/rslib/src/backend/decks.rs index aba633e7c..162d9631c 100644 --- a/rslib/src/backend/decks.rs +++ b/rslib/src/backend/decks.rs @@ -4,7 +4,7 @@ use std::convert::TryFrom; use super::Backend; -pub(super) use crate::pb::decks_service::Service as DecksService; +pub(super) use crate::pb::decks::decks_service::Service as DecksService; use crate::{ decks::{DeckSchema11, FilteredSearchOrder}, pb::{self as pb}, @@ -13,16 +13,16 @@ use crate::{ }; impl DecksService for Backend { - fn new_deck(&self, _input: pb::Empty) -> Result { + fn new_deck(&self, _input: pb::generic::Empty) -> Result { Ok(Deck::new_normal().into()) } - fn add_deck(&self, deck: pb::Deck) -> Result { + fn add_deck(&self, deck: pb::decks::Deck) -> Result { let mut deck: Deck = deck.try_into()?; self.with_col(|col| Ok(col.add_deck(&mut deck)?.map(|_| deck.id.0).into())) } - fn add_deck_legacy(&self, input: pb::Json) -> Result { + fn add_deck_legacy(&self, input: pb::generic::Json) -> Result { let schema11: DeckSchema11 = serde_json::from_slice(&input.json)?; let mut deck: Deck = schema11.into(); self.with_col(|col| { @@ -33,8 +33,8 @@ impl DecksService for Backend { fn add_or_update_deck_legacy( &self, - input: pb::AddOrUpdateDeckLegacyRequest, - ) -> Result { + input: pb::decks::AddOrUpdateDeckLegacyRequest, + ) -> Result { self.with_col(|col| { let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?; let mut deck: Deck = schema11.into(); @@ -46,11 +46,11 @@ impl DecksService for Backend { } else { col.add_or_update_deck(&mut deck)?; } - Ok(pb::DeckId { did: deck.id.0 }) + Ok(pb::decks::DeckId { did: deck.id.0 }) }) } - fn deck_tree(&self, input: pb::DeckTreeRequest) -> Result { + fn deck_tree(&self, input: pb::decks::DeckTreeRequest) -> Result { self.with_col(|col| { let now = if input.now == 0 { None @@ -61,7 +61,7 @@ impl DecksService for Backend { }) } - fn deck_tree_legacy(&self, _input: pb::Empty) -> Result { + fn deck_tree_legacy(&self, _input: pb::generic::Empty) -> Result { self.with_col(|col| { let tree = col.legacy_deck_tree()?; serde_json::to_vec(&tree) @@ -70,7 +70,7 @@ impl DecksService for Backend { }) } - fn get_all_decks_legacy(&self, _input: pb::Empty) -> Result { + fn get_all_decks_legacy(&self, _input: pb::generic::Empty) -> Result { self.with_col(|col| { let decks = col.storage.get_all_decks_as_schema11()?; serde_json::to_vec(&decks).map_err(Into::into) @@ -78,26 +78,28 @@ impl DecksService for Backend { .map(Into::into) } - fn get_deck_id_by_name(&self, input: pb::String) -> Result { + fn get_deck_id_by_name(&self, input: pb::generic::String) -> Result { self.with_col(|col| { - col.get_deck_id(&input.val) - .and_then(|d| d.or_not_found(input.val).map(|d| pb::DeckId { did: d.0 })) + col.get_deck_id(&input.val).and_then(|d| { + d.or_not_found(input.val) + .map(|d| pb::decks::DeckId { did: d.0 }) + }) }) } - fn get_deck(&self, input: pb::DeckId) -> Result { + fn get_deck(&self, input: pb::decks::DeckId) -> Result { let did = input.into(); self.with_col(|col| Ok(col.storage.get_deck(did)?.or_not_found(did)?.into())) } - fn update_deck(&self, input: pb::Deck) -> Result { + fn update_deck(&self, input: pb::decks::Deck) -> Result { self.with_col(|col| { let mut deck = Deck::try_from(input)?; col.update_deck(&mut deck).map(Into::into) }) } - fn update_deck_legacy(&self, input: pb::Json) -> Result { + fn update_deck_legacy(&self, input: pb::generic::Json) -> Result { self.with_col(|col| { let deck: DeckSchema11 = serde_json::from_slice(&input.json)?; let mut deck = deck.into(); @@ -105,7 +107,7 @@ impl DecksService for Backend { }) } - fn get_deck_legacy(&self, input: pb::DeckId) -> Result { + fn get_deck_legacy(&self, input: pb::decks::DeckId) -> Result { let did = input.into(); self.with_col(|col| { let deck: DeckSchema11 = col.storage.get_deck(did)?.or_not_found(did)?.into(); @@ -115,7 +117,10 @@ impl DecksService for Backend { }) } - fn get_deck_names(&self, input: pb::GetDeckNamesRequest) -> Result { + fn get_deck_names( + &self, + input: pb::decks::GetDeckNamesRequest, + ) -> Result { self.with_col(|col| { let names = if input.include_filtered { col.get_all_deck_names(input.skip_empty_default)? @@ -126,14 +131,14 @@ impl DecksService for Backend { }) } - fn get_deck_and_child_names(&self, input: pb::DeckId) -> Result { + fn get_deck_and_child_names(&self, input: pb::decks::DeckId) -> Result { self.with_col(|col| { col.get_deck_and_child_names(input.did.into()) .map(Into::into) }) } - fn new_deck_legacy(&self, input: pb::Bool) -> Result { + fn new_deck_legacy(&self, input: pb::generic::Bool) -> Result { let deck = if input.val { Deck::new_filtered() } else { @@ -145,12 +150,18 @@ impl DecksService for Backend { .map(Into::into) } - fn remove_decks(&self, input: pb::DeckIds) -> Result { + fn remove_decks( + &self, + input: pb::decks::DeckIds, + ) -> Result { self.with_col(|col| col.remove_decks_and_child_decks(&Into::>::into(input))) .map(Into::into) } - fn reparent_decks(&self, input: pb::ReparentDecksRequest) -> Result { + fn reparent_decks( + &self, + input: pb::decks::ReparentDecksRequest, + ) -> Result { let deck_ids: Vec<_> = input.deck_ids.into_iter().map(Into::into).collect(); let new_parent = if input.new_parent == 0 { None @@ -161,68 +172,80 @@ impl DecksService for Backend { .map(Into::into) } - fn rename_deck(&self, input: pb::RenameDeckRequest) -> Result { + fn rename_deck( + &self, + input: pb::decks::RenameDeckRequest, + ) -> Result { self.with_col(|col| col.rename_deck(input.deck_id.into(), &input.new_name)) .map(Into::into) } - fn get_or_create_filtered_deck(&self, input: pb::DeckId) -> Result { + fn get_or_create_filtered_deck( + &self, + input: pb::decks::DeckId, + ) -> Result { self.with_col(|col| col.get_or_create_filtered_deck(input.into())) .map(Into::into) } fn add_or_update_filtered_deck( &self, - input: pb::FilteredDeckForUpdate, - ) -> Result { + input: pb::decks::FilteredDeckForUpdate, + ) -> Result { self.with_col(|col| col.add_or_update_filtered_deck(input.into())) .map(|out| out.map(i64::from)) .map(Into::into) } - fn filtered_deck_order_labels(&self, _input: pb::Empty) -> Result { + fn filtered_deck_order_labels( + &self, + _input: pb::generic::Empty, + ) -> Result { Ok(FilteredSearchOrder::labels(&self.tr).into()) } - fn set_deck_collapsed(&self, input: pb::SetDeckCollapsedRequest) -> Result { + fn set_deck_collapsed( + &self, + input: pb::decks::SetDeckCollapsedRequest, + ) -> Result { self.with_col(|col| { col.set_deck_collapsed(input.deck_id.into(), input.collapsed, input.scope()) }) .map(Into::into) } - fn set_current_deck(&self, input: pb::DeckId) -> Result { + fn set_current_deck(&self, input: pb::decks::DeckId) -> Result { self.with_col(|col| col.set_current_deck(input.did.into())) .map(Into::into) } - fn get_current_deck(&self, _input: pb::Empty) -> Result { + fn get_current_deck(&self, _input: pb::generic::Empty) -> Result { self.with_col(|col| col.get_current_deck()) .map(|deck| (*deck).clone().into()) } } -impl From for DeckId { - fn from(did: pb::DeckId) -> Self { +impl From for DeckId { + fn from(did: pb::decks::DeckId) -> Self { DeckId(did.did) } } -impl From for Vec { - fn from(dids: pb::DeckIds) -> Self { +impl From for Vec { + fn from(dids: pb::decks::DeckIds) -> Self { dids.dids.into_iter().map(DeckId).collect() } } -impl From for pb::DeckId { +impl From for pb::decks::DeckId { fn from(did: DeckId) -> Self { - pb::DeckId { did: did.0 } + pb::decks::DeckId { did: did.0 } } } -impl From for pb::FilteredDeckForUpdate { +impl From for pb::decks::FilteredDeckForUpdate { fn from(deck: FilteredDeckForUpdate) -> Self { - pb::FilteredDeckForUpdate { + pb::decks::FilteredDeckForUpdate { id: deck.id.into(), name: deck.human_name, config: Some(deck.config), @@ -230,8 +253,8 @@ impl From for pb::FilteredDeckForUpdate { } } -impl From for FilteredDeckForUpdate { - fn from(deck: pb::FilteredDeckForUpdate) -> Self { +impl From for FilteredDeckForUpdate { + fn from(deck: pb::decks::FilteredDeckForUpdate) -> Self { FilteredDeckForUpdate { id: deck.id.into(), human_name: deck.name, @@ -240,9 +263,9 @@ impl From for FilteredDeckForUpdate { } } -impl From for pb::Deck { +impl From for pb::decks::Deck { fn from(d: Deck) -> Self { - pb::Deck { + pb::decks::Deck { id: d.id.0, name: d.name.human_name(), mtime_secs: d.mtime_secs.0, @@ -253,10 +276,10 @@ impl From for pb::Deck { } } -impl TryFrom for Deck { +impl TryFrom for Deck { type Error = AnkiError; - fn try_from(d: pb::Deck) -> Result { + fn try_from(d: pb::decks::Deck) -> Result { Ok(Deck { id: DeckId(d.id), name: NativeDeckName::from_human_name(&d.name), @@ -268,42 +291,42 @@ impl TryFrom for Deck { } } -impl From for pb::deck::Kind { +impl From for pb::decks::deck::Kind { fn from(k: DeckKind) -> Self { match k { - DeckKind::Normal(n) => pb::deck::Kind::Normal(n), - DeckKind::Filtered(f) => pb::deck::Kind::Filtered(f), + DeckKind::Normal(n) => pb::decks::deck::Kind::Normal(n), + DeckKind::Filtered(f) => pb::decks::deck::Kind::Filtered(f), } } } -impl From for DeckKind { - fn from(kind: pb::deck::Kind) -> Self { +impl From for DeckKind { + fn from(kind: pb::decks::deck::Kind) -> Self { match kind { - pb::deck::Kind::Normal(normal) => DeckKind::Normal(normal), - pb::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered), + pb::decks::deck::Kind::Normal(normal) => DeckKind::Normal(normal), + pb::decks::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered), } } } -impl From<(DeckId, String)> for pb::DeckNameId { +impl From<(DeckId, String)> for pb::decks::DeckNameId { fn from(id_name: (DeckId, String)) -> Self { - pb::DeckNameId { + pb::decks::DeckNameId { id: id_name.0 .0, name: id_name.1, } } } -impl From> for pb::DeckNames { +impl From> for pb::decks::DeckNames { fn from(id_names: Vec<(DeckId, String)>) -> Self { - pb::DeckNames { + pb::decks::DeckNames { entries: id_names.into_iter().map(Into::into).collect(), } } } -// fn new_deck(&self, input: pb::Bool) -> Result { +// fn new_deck(&self, input: pb::generic::Bool) -> Result { // let deck = if input.val { // Deck::new_filtered() // } else { diff --git a/rslib/src/backend/error.rs b/rslib/src/backend/error.rs index 41dc676c1..052091657 100644 --- a/rslib/src/backend/error.rs +++ b/rslib/src/backend/error.rs @@ -4,12 +4,12 @@ use crate::{ error::{AnkiError, SyncErrorKind}, pb, - pb::backend_error::Kind, + pb::backend::backend_error::Kind, prelude::*, }; impl AnkiError { - pub fn into_protobuf(self, tr: &I18n) -> pb::BackendError { + pub fn into_protobuf(self, tr: &I18n) -> pb::backend::BackendError { let message = self.message(tr); let help_page = self.help_page().map(|page| page as i32); let context = self.context(); @@ -43,7 +43,7 @@ impl AnkiError { AnkiError::InvalidId => Kind::InvalidInput, }; - pb::BackendError { + pb::backend::BackendError { kind: kind as i32, message, help_page, diff --git a/rslib/src/backend/generic.rs b/rslib/src/backend/generic.rs index 85c0c7c0b..6d272618d 100644 --- a/rslib/src/backend/generic.rs +++ b/rslib/src/backend/generic.rs @@ -3,98 +3,98 @@ use crate::{pb, prelude::*}; -impl From> for pb::Json { +impl From> for pb::generic::Json { fn from(json: Vec) -> Self { - pb::Json { json } + pb::generic::Json { json } } } -impl From for pb::String { +impl From for pb::generic::String { fn from(val: String) -> Self { - pb::String { val } + pb::generic::String { val } } } -impl From for pb::Bool { +impl From for pb::generic::Bool { fn from(val: bool) -> Self { - pb::Bool { val } + pb::generic::Bool { val } } } -impl From for pb::Int32 { +impl From for pb::generic::Int32 { fn from(val: i32) -> Self { - pb::Int32 { val } + pb::generic::Int32 { val } } } -impl From for pb::Int64 { +impl From for pb::generic::Int64 { fn from(val: i64) -> Self { - pb::Int64 { val } + pb::generic::Int64 { val } } } -impl From for pb::UInt32 { +impl From for pb::generic::UInt32 { fn from(val: u32) -> Self { - pb::UInt32 { val } + pb::generic::UInt32 { val } } } -impl From for pb::UInt32 { +impl From for pb::generic::UInt32 { fn from(val: usize) -> Self { - pb::UInt32 { val: val as u32 } + pb::generic::UInt32 { val: val as u32 } } } -impl From<()> for pb::Empty { +impl From<()> for pb::generic::Empty { fn from(_val: ()) -> Self { - pb::Empty {} + pb::generic::Empty {} } } -impl From for CardId { - fn from(cid: pb::CardId) -> Self { +impl From for CardId { + fn from(cid: pb::cards::CardId) -> Self { CardId(cid.cid) } } -impl From for Vec { - fn from(c: pb::CardIds) -> Self { +impl From for Vec { + fn from(c: pb::cards::CardIds) -> Self { c.cids.into_iter().map(CardId).collect() } } -impl From for NoteId { - fn from(nid: pb::NoteId) -> Self { +impl From for NoteId { + fn from(nid: pb::notes::NoteId) -> Self { NoteId(nid.nid) } } -impl From for pb::NoteId { +impl From for pb::notes::NoteId { fn from(nid: NoteId) -> Self { - pb::NoteId { nid: nid.0 } + pb::notes::NoteId { nid: nid.0 } } } -impl From for NotetypeId { - fn from(ntid: pb::NotetypeId) -> Self { +impl From for NotetypeId { + fn from(ntid: pb::notetypes::NotetypeId) -> Self { NotetypeId(ntid.ntid) } } -impl From for pb::NotetypeId { +impl From for pb::notetypes::NotetypeId { fn from(ntid: NotetypeId) -> Self { - pb::NotetypeId { ntid: ntid.0 } + pb::notetypes::NotetypeId { ntid: ntid.0 } } } -impl From for DeckConfigId { - fn from(dcid: pb::DeckConfigId) -> Self { +impl From for DeckConfigId { + fn from(dcid: pb::deckconfig::DeckConfigId) -> Self { DeckConfigId(dcid.dcid) } } -impl From> for pb::StringList { +impl From> for pb::generic::StringList { fn from(vals: Vec) -> Self { - pb::StringList { vals } + pb::generic::StringList { vals } } } diff --git a/rslib/src/backend/i18n.rs b/rslib/src/backend/i18n.rs index a200766bf..25261cb0e 100644 --- a/rslib/src/backend/i18n.rs +++ b/rslib/src/backend/i18n.rs @@ -6,7 +6,7 @@ use std::collections::HashMap; use fluent::{FluentArgs, FluentValue}; use super::Backend; -pub(super) use crate::pb::i18n_service::Service as I18nService; +pub(super) use crate::pb::i18n::i18n_service::Service as I18nService; use crate::{ pb, prelude::*, @@ -14,7 +14,10 @@ use crate::{ }; impl I18nService for Backend { - fn translate_string(&self, input: pb::TranslateStringRequest) -> Result { + fn translate_string( + &self, + input: pb::i18n::TranslateStringRequest, + ) -> Result { let args = build_fluent_args(input.args); Ok(self @@ -27,8 +30,11 @@ impl I18nService for Backend { .into()) } - fn format_timespan(&self, input: pb::FormatTimespanRequest) -> Result { - use pb::format_timespan_request::Context; + fn format_timespan( + &self, + input: pb::i18n::FormatTimespanRequest, + ) -> Result { + use pb::i18n::format_timespan_request::Context; Ok(match input.context() { Context::Precise => time_span(input.seconds, &self.tr, true), Context::Intervals => time_span(input.seconds, &self.tr, false), @@ -37,14 +43,14 @@ impl I18nService for Backend { .into()) } - fn i18n_resources(&self, input: pb::I18nResourcesRequest) -> Result { + fn i18n_resources(&self, input: pb::i18n::I18nResourcesRequest) -> Result { serde_json::to_vec(&self.tr.resources_for_js(&input.modules)) .map(Into::into) .map_err(Into::into) } } -fn build_fluent_args(input: HashMap) -> FluentArgs<'static> { +fn build_fluent_args(input: HashMap) -> FluentArgs<'static> { let mut args = FluentArgs::new(); for (key, val) in input { args.set(key, translate_arg_to_fluent_val(&val)); @@ -52,8 +58,8 @@ fn build_fluent_args(input: HashMap) -> FluentArg args } -fn translate_arg_to_fluent_val(arg: &pb::TranslateArgValue) -> FluentValue<'static> { - use pb::translate_arg_value::Value as V; +fn translate_arg_to_fluent_val(arg: &pb::i18n::TranslateArgValue) -> FluentValue<'static> { + use pb::i18n::translate_arg_value::Value as V; match &arg.value { Some(val) => match val { V::Str(s) => FluentValue::String(s.to_owned().into()), diff --git a/rslib/src/backend/import_export.rs b/rslib/src/backend/import_export.rs index f8741c431..8bf829126 100644 --- a/rslib/src/backend/import_export.rs +++ b/rslib/src/backend/import_export.rs @@ -4,10 +4,13 @@ use std::path::Path; use super::{progress::Progress, Backend}; -pub(super) use crate::pb::importexport_service::Service as ImportExportService; +pub(super) use crate::pb::import_export::importexport_service::Service as ImportExportService; use crate::{ import_export::{package::import_colpkg, ExportProgress, ImportProgress, NoteLog}, - pb::{self as pb, export_limit, ExportLimit}, + pb::{ + import_export::{export_limit, ExportLimit}, + {self as pb}, + }, prelude::*, search::SearchNode, }; @@ -15,8 +18,8 @@ use crate::{ impl ImportExportService for Backend { fn export_collection_package( &self, - input: pb::ExportCollectionPackageRequest, - ) -> Result { + input: pb::import_export::ExportCollectionPackageRequest, + ) -> Result { self.abort_media_sync_and_wait(); let mut guard = self.lock_open_collection()?; @@ -34,8 +37,8 @@ impl ImportExportService for Backend { fn import_collection_package( &self, - input: pb::ImportCollectionPackageRequest, - ) -> Result { + input: pb::import_export::ImportCollectionPackageRequest, + ) -> Result { let _guard = self.lock_closed_collection()?; import_colpkg( @@ -51,13 +54,16 @@ impl ImportExportService for Backend { fn import_anki_package( &self, - input: pb::ImportAnkiPackageRequest, - ) -> Result { + input: pb::import_export::ImportAnkiPackageRequest, + ) -> Result { self.with_col(|col| col.import_apkg(&input.package_path, self.import_progress_fn())) .map(Into::into) } - fn export_anki_package(&self, input: pb::ExportAnkiPackageRequest) -> Result { + fn export_anki_package( + &self, + input: pb::import_export::ExportAnkiPackageRequest, + ) -> Result { self.with_col(|col| { col.export_apkg( &input.out_path, @@ -72,7 +78,10 @@ impl ImportExportService for Backend { .map(Into::into) } - fn get_csv_metadata(&self, input: pb::CsvMetadataRequest) -> Result { + fn get_csv_metadata( + &self, + input: pb::import_export::CsvMetadataRequest, + ) -> Result { let delimiter = input.delimiter.is_some().then(|| input.delimiter()); self.with_col(|col| { col.get_csv_metadata( @@ -84,7 +93,10 @@ impl ImportExportService for Backend { }) } - fn import_csv(&self, input: pb::ImportCsvRequest) -> Result { + fn import_csv( + &self, + input: pb::import_export::ImportCsvRequest, + ) -> Result { self.with_col(|col| { col.import_csv( &input.path, @@ -95,12 +107,18 @@ impl ImportExportService for Backend { .map(Into::into) } - fn export_note_csv(&self, input: pb::ExportNoteCsvRequest) -> Result { + fn export_note_csv( + &self, + input: pb::import_export::ExportNoteCsvRequest, + ) -> Result { self.with_col(|col| col.export_note_csv(input, self.export_progress_fn())) .map(Into::into) } - fn export_card_csv(&self, input: pb::ExportCardCsvRequest) -> Result { + fn export_card_csv( + &self, + input: pb::import_export::ExportCardCsvRequest, + ) -> Result { self.with_col(|col| { col.export_card_csv( &input.out_path, @@ -112,12 +130,18 @@ impl ImportExportService for Backend { .map(Into::into) } - fn import_json_file(&self, input: pb::String) -> Result { + fn import_json_file( + &self, + input: pb::generic::String, + ) -> Result { self.with_col(|col| col.import_json_file(&input.val, self.import_progress_fn())) .map(Into::into) } - fn import_json_string(&self, input: pb::String) -> Result { + fn import_json_string( + &self, + input: pb::generic::String, + ) -> Result { self.with_col(|col| col.import_json_string(&input.val, self.import_progress_fn())) .map(Into::into) } @@ -135,7 +159,7 @@ impl Backend { } } -impl From> for pb::ImportResponse { +impl From> for pb::import_export::ImportResponse { fn from(output: OpOutput) -> Self { Self { changes: Some(output.changes.into()), @@ -149,7 +173,7 @@ impl From for SearchNode { use export_limit::Limit; let limit = export_limit .limit - .unwrap_or(Limit::WholeCollection(pb::Empty {})); + .unwrap_or(Limit::WholeCollection(pb::generic::Empty {})); match limit { Limit::WholeCollection(_) => Self::WholeCollection, Limit::DeckId(did) => Self::from_deck_id(did, true), diff --git a/rslib/src/backend/links.rs b/rslib/src/backend/links.rs index 469b89b07..59661cb6f 100644 --- a/rslib/src/backend/links.rs +++ b/rslib/src/backend/links.rs @@ -2,11 +2,11 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use super::Backend; -pub(super) use crate::pb::links_service::Service as LinksService; +pub(super) use crate::pb::links::links_service::Service as LinksService; use crate::{pb, pb::links::help_page_link_request::HelpPage, prelude::*}; impl LinksService for Backend { - fn help_page_link(&self, input: pb::HelpPageLinkRequest) -> Result { + fn help_page_link(&self, input: pb::links::HelpPageLinkRequest) -> Result { Ok(HelpPage::from_i32(input.page) .unwrap_or(HelpPage::Index) .to_link() diff --git a/rslib/src/backend/media.rs b/rslib/src/backend/media.rs index dda0a2f5e..e241e293c 100644 --- a/rslib/src/backend/media.rs +++ b/rslib/src/backend/media.rs @@ -2,7 +2,7 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use super::{progress::Progress, Backend}; -pub(super) use crate::pb::media_service::Service as MediaService; +pub(super) use crate::pb::media::media_service::Service as MediaService; use crate::{ media::{check::MediaChecker, MediaManager}, pb, @@ -13,7 +13,7 @@ impl MediaService for Backend { // media //----------------------------------------------- - fn check_media(&self, _input: pb::Empty) -> Result { + fn check_media(&self, _input: pb::generic::Empty) -> Result { let mut handler = self.new_progress_handler(); let progress_fn = move |progress| handler.update(Progress::MediaCheck(progress as u32), true); @@ -26,7 +26,7 @@ impl MediaService for Backend { let mut report = checker.summarize_output(&mut output); ctx.report_media_field_referencing_templates(&mut report)?; - Ok(pb::CheckMediaResponse { + Ok(pb::media::CheckMediaResponse { unused: output.unused, missing: output.missing, report, @@ -36,7 +36,10 @@ impl MediaService for Backend { }) } - fn trash_media_files(&self, input: pb::TrashMediaFilesRequest) -> Result { + fn trash_media_files( + &self, + input: pb::media::TrashMediaFilesRequest, + ) -> Result { self.with_col(|col| { let mgr = MediaManager::new(&col.media_folder, &col.media_db)?; let mut ctx = mgr.dbctx(); @@ -45,7 +48,7 @@ impl MediaService for Backend { .map(Into::into) } - fn add_media_file(&self, input: pb::AddMediaFileRequest) -> Result { + fn add_media_file(&self, input: pb::media::AddMediaFileRequest) -> Result { self.with_col(|col| { let mgr = MediaManager::new(&col.media_folder, &col.media_db)?; let mut ctx = mgr.dbctx(); @@ -56,7 +59,7 @@ impl MediaService for Backend { }) } - fn empty_trash(&self, _input: pb::Empty) -> Result { + fn empty_trash(&self, _input: pb::generic::Empty) -> Result { let mut handler = self.new_progress_handler(); let progress_fn = move |progress| handler.update(Progress::MediaCheck(progress as u32), true); @@ -69,7 +72,7 @@ impl MediaService for Backend { .map(Into::into) } - fn restore_trash(&self, _input: pb::Empty) -> Result { + fn restore_trash(&self, _input: pb::generic::Empty) -> Result { let mut handler = self.new_progress_handler(); let progress_fn = move |progress| handler.update(Progress::MediaCheck(progress as u32), true); diff --git a/rslib/src/backend/mod.rs b/rslib/src/backend/mod.rs index a3cffbb05..cd3b2ff70 100644 --- a/rslib/src/backend/mod.rs +++ b/rslib/src/backend/mod.rs @@ -38,7 +38,9 @@ use once_cell::sync::OnceCell; use progress::AbortHandleSlot; use prost::Message; use slog::Logger; -use tokio::runtime::{self, Runtime}; +use tokio::runtime::{ + Runtime, {self}, +}; use self::{ card::CardsService, @@ -60,7 +62,7 @@ use self::{ sync::{SyncService, SyncState}, tags::TagsService, }; -use crate::{backend::dbproxy::db_command_bytes, log, pb, prelude::*}; +use crate::{backend::dbproxy::db_command_bytes, log, pb, pb::backend::ServiceIndex, prelude::*}; pub struct Backend { col: Arc>>, @@ -79,8 +81,8 @@ struct BackendState { sync: SyncState, } -pub fn init_backend(init_msg: &[u8], log: Option) -> std::result::Result { - let input: pb::BackendInit = match pb::BackendInit::decode(init_msg) { +pub fn init_backend(init_msg: &[u8], log: Option) -> result::Result { + let input: pb::backend::BackendInit = match pb::backend::BackendInit::decode(init_msg) { Ok(req) => req, Err(_) => return Err("couldn't decode init request".into()), }; @@ -119,30 +121,28 @@ impl Backend { method: u32, input: &[u8], ) -> result::Result, Vec> { - pb::ServiceIndex::from_i32(service as i32) + ServiceIndex::from_i32(service as i32) .or_invalid("invalid service") .and_then(|service| match service { - pb::ServiceIndex::Scheduler => SchedulerService::run_method(self, method, input), - pb::ServiceIndex::Decks => DecksService::run_method(self, method, input), - pb::ServiceIndex::Notes => NotesService::run_method(self, method, input), - pb::ServiceIndex::Notetypes => NotetypesService::run_method(self, method, input), - pb::ServiceIndex::Config => ConfigService::run_method(self, method, input), - pb::ServiceIndex::Sync => SyncService::run_method(self, method, input), - pb::ServiceIndex::Tags => TagsService::run_method(self, method, input), - pb::ServiceIndex::DeckConfig => DeckConfigService::run_method(self, method, input), - pb::ServiceIndex::CardRendering => { + ServiceIndex::Scheduler => SchedulerService::run_method(self, method, input), + ServiceIndex::Decks => DecksService::run_method(self, method, input), + ServiceIndex::Notes => NotesService::run_method(self, method, input), + ServiceIndex::Notetypes => NotetypesService::run_method(self, method, input), + ServiceIndex::Config => ConfigService::run_method(self, method, input), + ServiceIndex::Sync => SyncService::run_method(self, method, input), + ServiceIndex::Tags => TagsService::run_method(self, method, input), + ServiceIndex::DeckConfig => DeckConfigService::run_method(self, method, input), + ServiceIndex::CardRendering => { CardRenderingService::run_method(self, method, input) } - pb::ServiceIndex::Media => MediaService::run_method(self, method, input), - pb::ServiceIndex::Stats => StatsService::run_method(self, method, input), - pb::ServiceIndex::Search => SearchService::run_method(self, method, input), - pb::ServiceIndex::I18n => I18nService::run_method(self, method, input), - pb::ServiceIndex::Links => LinksService::run_method(self, method, input), - pb::ServiceIndex::Collection => CollectionService::run_method(self, method, input), - pb::ServiceIndex::Cards => CardsService::run_method(self, method, input), - pb::ServiceIndex::ImportExport => { - ImportExportService::run_method(self, method, input) - } + ServiceIndex::Media => MediaService::run_method(self, method, input), + ServiceIndex::Stats => StatsService::run_method(self, method, input), + ServiceIndex::Search => SearchService::run_method(self, method, input), + ServiceIndex::I18n => I18nService::run_method(self, method, input), + ServiceIndex::Links => LinksService::run_method(self, method, input), + ServiceIndex::Collection => CollectionService::run_method(self, method, input), + ServiceIndex::Cards => CardsService::run_method(self, method, input), + ServiceIndex::ImportExport => ImportExportService::run_method(self, method, input), }) .map_err(|err| { let backend_err = err.into_protobuf(&self.tr); @@ -152,7 +152,7 @@ impl Backend { }) } - pub fn run_db_command_bytes(&self, input: &[u8]) -> std::result::Result, Vec> { + pub fn run_db_command_bytes(&self, input: &[u8]) -> result::Result, Vec> { self.db_command(input).map_err(|err| { let backend_err = err.into_protobuf(&self.tr); let mut bytes = Vec::new(); diff --git a/rslib/src/backend/notes.rs b/rslib/src/backend/notes.rs index e8c51c118..62dfa8935 100644 --- a/rslib/src/backend/notes.rs +++ b/rslib/src/backend/notes.rs @@ -4,7 +4,7 @@ use std::collections::HashSet; use super::Backend; -pub(super) use crate::pb::notes_service::Service as NotesService; +pub(super) use crate::pb::notes::notes_service::Service as NotesService; use crate::{ cloze::add_cloze_numbers_in_string, pb::{self as pb}, @@ -12,7 +12,7 @@ use crate::{ }; impl NotesService for Backend { - fn new_note(&self, input: pb::NotetypeId) -> Result { + fn new_note(&self, input: pb::notetypes::NotetypeId) -> Result { let ntid = input.into(); self.with_col(|col| { let nt = col.get_notetype(ntid)?.or_not_found(ntid)?; @@ -20,11 +20,11 @@ impl NotesService for Backend { }) } - fn add_note(&self, input: pb::AddNoteRequest) -> Result { + fn add_note(&self, input: pb::notes::AddNoteRequest) -> Result { self.with_col(|col| { let mut note: Note = input.note.or_invalid("no note provided")?.into(); let changes = col.add_note(&mut note, DeckId(input.deck_id))?; - Ok(pb::AddNoteResponse { + Ok(pb::notes::AddNoteResponse { note_id: note.id.0, changes: Some(changes.into()), }) @@ -33,15 +33,18 @@ impl NotesService for Backend { fn defaults_for_adding( &self, - input: pb::DefaultsForAddingRequest, - ) -> Result { + input: pb::notes::DefaultsForAddingRequest, + ) -> Result { self.with_col(|col| { let home_deck: DeckId = input.home_deck_of_current_review_card.into(); col.defaults_for_adding(home_deck).map(Into::into) }) } - fn default_deck_for_notetype(&self, input: pb::NotetypeId) -> Result { + fn default_deck_for_notetype( + &self, + input: pb::notetypes::NotetypeId, + ) -> Result { self.with_col(|col| { Ok(col .default_deck_for_notetype(input.into())? @@ -50,7 +53,10 @@ impl NotesService for Backend { }) } - fn update_notes(&self, input: pb::UpdateNotesRequest) -> Result { + fn update_notes( + &self, + input: pb::notes::UpdateNotesRequest, + ) -> Result { self.with_col(|col| { let notes = input .notes @@ -62,12 +68,15 @@ impl NotesService for Backend { .map(Into::into) } - fn get_note(&self, input: pb::NoteId) -> Result { + fn get_note(&self, input: pb::notes::NoteId) -> Result { let nid = input.into(); self.with_col(|col| col.storage.get_note(nid)?.or_not_found(nid).map(Into::into)) } - fn remove_notes(&self, input: pb::RemoveNotesRequest) -> Result { + fn remove_notes( + &self, + input: pb::notes::RemoveNotesRequest, + ) -> Result { self.with_col(|col| { if !input.note_ids.is_empty() { col.remove_notes( @@ -91,20 +100,23 @@ impl NotesService for Backend { }) } - fn cloze_numbers_in_note(&self, note: pb::Note) -> Result { + fn cloze_numbers_in_note( + &self, + note: pb::notes::Note, + ) -> Result { let mut set = HashSet::with_capacity(4); for field in ¬e.fields { add_cloze_numbers_in_string(field, &mut set); } - Ok(pb::ClozeNumbersInNoteResponse { + Ok(pb::notes::ClozeNumbersInNoteResponse { numbers: set.into_iter().map(|n| n as u32).collect(), }) } fn after_note_updates( &self, - input: pb::AfterNoteUpdatesRequest, - ) -> Result { + input: pb::notes::AfterNoteUpdatesRequest, + ) -> Result { self.with_col(|col| { col.after_note_updates( &to_note_ids(input.nids), @@ -117,35 +129,41 @@ impl NotesService for Backend { fn field_names_for_notes( &self, - input: pb::FieldNamesForNotesRequest, - ) -> Result { + input: pb::notes::FieldNamesForNotesRequest, + ) -> Result { self.with_col(|col| { let nids: Vec<_> = input.nids.into_iter().map(NoteId).collect(); col.storage .field_names_for_notes(&nids) - .map(|fields| pb::FieldNamesForNotesResponse { fields }) + .map(|fields| pb::notes::FieldNamesForNotesResponse { fields }) }) } - fn note_fields_check(&self, input: pb::Note) -> Result { + fn note_fields_check( + &self, + input: pb::notes::Note, + ) -> Result { let note: Note = input.into(); self.with_col(|col| { col.note_fields_check(¬e) - .map(|r| pb::NoteFieldsCheckResponse { state: r as i32 }) + .map(|r| pb::notes::NoteFieldsCheckResponse { state: r as i32 }) }) } - fn cards_of_note(&self, input: pb::NoteId) -> Result { + fn cards_of_note(&self, input: pb::notes::NoteId) -> Result { self.with_col(|col| { col.storage .all_card_ids_of_note_in_template_order(NoteId(input.nid)) - .map(|v| pb::CardIds { + .map(|v| pb::cards::CardIds { cids: v.into_iter().map(Into::into).collect(), }) }) } - fn get_single_notetype_of_notes(&self, input: pb::NoteIds) -> Result { + fn get_single_notetype_of_notes( + &self, + input: pb::notes::NoteIds, + ) -> Result { self.with_col(|col| { col.get_single_notetype_of_notes(&input.note_ids.into_newtype(NoteId)) .map(Into::into) diff --git a/rslib/src/backend/notetypes.rs b/rslib/src/backend/notetypes.rs index c2d72ba41..6fab603e8 100644 --- a/rslib/src/backend/notetypes.rs +++ b/rslib/src/backend/notetypes.rs @@ -2,7 +2,7 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use super::Backend; -pub(super) use crate::pb::notetypes_service::Service as NotetypesService; +pub(super) use crate::pb::notetypes::notetypes_service::Service as NotetypesService; use crate::{ config::get_aux_notetype_config_key, notetype::{ @@ -13,7 +13,10 @@ use crate::{ }; impl NotetypesService for Backend { - fn add_notetype(&self, input: pb::Notetype) -> Result { + fn add_notetype( + &self, + input: pb::notetypes::Notetype, + ) -> Result { let mut notetype: Notetype = input.into(); self.with_col(|col| { Ok(col @@ -23,13 +26,16 @@ impl NotetypesService for Backend { }) } - fn update_notetype(&self, input: pb::Notetype) -> Result { + fn update_notetype(&self, input: pb::notetypes::Notetype) -> Result { let mut notetype: Notetype = input.into(); self.with_col(|col| col.update_notetype(&mut notetype, false)) .map(Into::into) } - fn add_notetype_legacy(&self, input: pb::Json) -> Result { + fn add_notetype_legacy( + &self, + input: pb::generic::Json, + ) -> Result { let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?; let mut notetype: Notetype = legacy.into(); self.with_col(|col| { @@ -40,7 +46,10 @@ impl NotetypesService for Backend { }) } - fn update_notetype_legacy(&self, input: pb::Json) -> Result { + fn update_notetype_legacy( + &self, + input: pb::generic::Json, + ) -> Result { let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?; let mut notetype: Notetype = legacy.into(); self.with_col(|col| col.update_notetype(&mut notetype, false)) @@ -49,8 +58,8 @@ impl NotetypesService for Backend { fn add_or_update_notetype( &self, - input: pb::AddOrUpdateNotetypeRequest, - ) -> Result { + input: pb::notetypes::AddOrUpdateNotetypeRequest, + ) -> Result { self.with_col(|col| { let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?; let mut nt: Notetype = legacy.into(); @@ -64,11 +73,14 @@ impl NotetypesService for Backend { } else { col.add_or_update_notetype_with_existing_id(&mut nt, input.skip_checks)?; } - Ok(pb::NotetypeId { ntid: nt.id.0 }) + Ok(pb::notetypes::NotetypeId { ntid: nt.id.0 }) }) } - fn get_stock_notetype_legacy(&self, input: pb::StockNotetype) -> Result { + fn get_stock_notetype_legacy( + &self, + input: pb::notetypes::StockNotetype, + ) -> Result { // fixme: use individual functions instead of full vec let mut all = all_stock_notetypes(&self.tr); let idx = (input.kind as usize).min(all.len() - 1); @@ -79,7 +91,7 @@ impl NotetypesService for Backend { .map(Into::into) } - fn get_notetype(&self, input: pb::NotetypeId) -> Result { + fn get_notetype(&self, input: pb::notetypes::NotetypeId) -> Result { let ntid = input.into(); self.with_col(|col| { col.storage @@ -89,7 +101,7 @@ impl NotetypesService for Backend { }) } - fn get_notetype_legacy(&self, input: pb::NotetypeId) -> Result { + fn get_notetype_legacy(&self, input: pb::notetypes::NotetypeId) -> Result { let ntid = input.into(); self.with_col(|col| { let schema11: NotetypeSchema11 = @@ -98,56 +110,73 @@ impl NotetypesService for Backend { }) } - fn get_notetype_names(&self, _input: pb::Empty) -> Result { + fn get_notetype_names( + &self, + _input: pb::generic::Empty, + ) -> Result { self.with_col(|col| { let entries: Vec<_> = col .storage .get_all_notetype_names()? .into_iter() - .map(|(id, name)| pb::NotetypeNameId { id: id.0, name }) + .map(|(id, name)| pb::notetypes::NotetypeNameId { id: id.0, name }) .collect(); - Ok(pb::NotetypeNames { entries }) + Ok(pb::notetypes::NotetypeNames { entries }) }) } - fn get_notetype_names_and_counts(&self, _input: pb::Empty) -> Result { + fn get_notetype_names_and_counts( + &self, + _input: pb::generic::Empty, + ) -> Result { self.with_col(|col| { let entries: Vec<_> = col .storage .get_notetype_use_counts()? .into_iter() - .map(|(id, name, use_count)| pb::NotetypeNameIdUseCount { - id: id.0, - name, - use_count, - }) + .map( + |(id, name, use_count)| pb::notetypes::NotetypeNameIdUseCount { + id: id.0, + name, + use_count, + }, + ) .collect(); - Ok(pb::NotetypeUseCounts { entries }) + Ok(pb::notetypes::NotetypeUseCounts { entries }) }) } - fn get_notetype_id_by_name(&self, input: pb::String) -> Result { + fn get_notetype_id_by_name( + &self, + input: pb::generic::String, + ) -> Result { self.with_col(|col| { col.storage .get_notetype_id(&input.val) .and_then(|nt| nt.or_not_found(input.val)) - .map(|ntid| pb::NotetypeId { ntid: ntid.0 }) + .map(|ntid| pb::notetypes::NotetypeId { ntid: ntid.0 }) }) } - fn remove_notetype(&self, input: pb::NotetypeId) -> Result { + fn remove_notetype( + &self, + input: pb::notetypes::NotetypeId, + ) -> Result { self.with_col(|col| col.remove_notetype(input.into())) .map(Into::into) } - fn get_aux_notetype_config_key(&self, input: pb::GetAuxConfigKeyRequest) -> Result { + fn get_aux_notetype_config_key( + &self, + input: pb::notetypes::GetAuxConfigKeyRequest, + ) -> Result { Ok(get_aux_notetype_config_key(input.id.into(), &input.key).into()) } fn get_aux_template_config_key( &self, - input: pb::GetAuxTemplateConfigKeyRequest, - ) -> Result { + input: pb::notetypes::GetAuxTemplateConfigKeyRequest, + ) -> Result { self.with_col(|col| { col.get_aux_template_config_key( input.notetype_id.into(), @@ -160,26 +189,29 @@ impl NotetypesService for Backend { fn get_change_notetype_info( &self, - input: pb::GetChangeNotetypeInfoRequest, - ) -> Result { + input: pb::notetypes::GetChangeNotetypeInfoRequest, + ) -> Result { self.with_col(|col| { col.notetype_change_info(input.old_notetype_id.into(), input.new_notetype_id.into()) .map(Into::into) }) } - fn change_notetype(&self, input: pb::ChangeNotetypeRequest) -> Result { + fn change_notetype( + &self, + input: pb::notetypes::ChangeNotetypeRequest, + ) -> Result { self.with_col(|col| col.change_notetype_of_notes(input.into()).map(Into::into)) } - fn get_field_names(&self, input: pb::NotetypeId) -> Result { + fn get_field_names(&self, input: pb::notetypes::NotetypeId) -> Result { self.with_col(|col| col.storage.get_field_names(input.into())) .map(Into::into) } } -impl From for Notetype { - fn from(n: pb::Notetype) -> Self { +impl From for Notetype { + fn from(n: pb::notetypes::Notetype) -> Self { Notetype { id: n.id.into(), name: n.name, @@ -192,9 +224,9 @@ impl From for Notetype { } } -impl From for pb::ChangeNotetypeInfo { +impl From for pb::notetypes::ChangeNotetypeInfo { fn from(i: NotetypeChangeInfo) -> Self { - pb::ChangeNotetypeInfo { + pb::notetypes::ChangeNotetypeInfo { old_notetype_name: i.old_notetype_name, old_field_names: i.old_field_names, old_template_names: i.old_template_names, @@ -205,8 +237,8 @@ impl From for pb::ChangeNotetypeInfo { } } -impl From for ChangeNotetypeInput { - fn from(i: pb::ChangeNotetypeRequest) -> Self { +impl From for ChangeNotetypeInput { + fn from(i: pb::notetypes::ChangeNotetypeRequest) -> Self { ChangeNotetypeInput { current_schema: i.current_schema.into(), note_ids: i.note_ids.into_newtype(NoteId), @@ -234,9 +266,9 @@ impl From for ChangeNotetypeInput { } } -impl From for pb::ChangeNotetypeRequest { +impl From for pb::notetypes::ChangeNotetypeRequest { fn from(i: ChangeNotetypeInput) -> Self { - pb::ChangeNotetypeRequest { + pb::notetypes::ChangeNotetypeRequest { current_schema: i.current_schema.into(), note_ids: i.note_ids.into_iter().map(Into::into).collect(), old_notetype_name: i.old_notetype_name, diff --git a/rslib/src/backend/ops.rs b/rslib/src/backend/ops.rs index 9e303d9c3..c08875ae9 100644 --- a/rslib/src/backend/ops.rs +++ b/rslib/src/backend/ops.rs @@ -8,9 +8,9 @@ use crate::{ undo::{UndoOutput, UndoStatus}, }; -impl From for pb::OpChanges { +impl From for pb::collection::OpChanges { fn from(c: OpChanges) -> Self { - pb::OpChanges { + pb::collection::OpChanges { card: c.changes.card, note: c.changes.note, deck: c.changes.deck, @@ -28,8 +28,8 @@ impl From for pb::OpChanges { } impl UndoStatus { - pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::UndoStatus { - pb::UndoStatus { + pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::collection::UndoStatus { + pb::collection::UndoStatus { undo: self.undo.map(|op| op.describe(tr)).unwrap_or_default(), redo: self.redo.map(|op| op.describe(tr)).unwrap_or_default(), last_step: self.last_step as u32, @@ -37,24 +37,24 @@ impl UndoStatus { } } -impl From> for pb::OpChanges { +impl From> for pb::collection::OpChanges { fn from(o: OpOutput<()>) -> Self { o.changes.into() } } -impl From> for pb::OpChangesWithCount { +impl From> for pb::collection::OpChangesWithCount { fn from(out: OpOutput) -> Self { - pb::OpChangesWithCount { + pb::collection::OpChangesWithCount { count: out.output as u32, changes: Some(out.changes.into()), } } } -impl From> for pb::OpChangesWithId { +impl From> for pb::collection::OpChangesWithId { fn from(out: OpOutput) -> Self { - pb::OpChangesWithId { + pb::collection::OpChangesWithId { id: out.output, changes: Some(out.changes.into()), } @@ -62,8 +62,8 @@ impl From> for pb::OpChangesWithId { } impl OpOutput { - pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::OpChangesAfterUndo { - pb::OpChangesAfterUndo { + pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::collection::OpChangesAfterUndo { + pb::collection::OpChangesAfterUndo { changes: Some(self.changes.into()), operation: self.output.undone_op.describe(tr), reverted_to_timestamp: self.output.reverted_to.0, diff --git a/rslib/src/backend/progress.rs b/rslib/src/backend/progress.rs index f9247afaa..c69ca4ac9 100644 --- a/rslib/src/backend/progress.rs +++ b/rslib/src/backend/progress.rs @@ -55,17 +55,21 @@ pub(super) enum Progress { Export(ExportProgress), } -pub(super) fn progress_to_proto(progress: Option, tr: &I18n) -> pb::Progress { +pub(super) fn progress_to_proto(progress: Option, tr: &I18n) -> pb::collection::Progress { let progress = if let Some(progress) = progress { match progress { - Progress::MediaSync(p) => pb::progress::Value::MediaSync(media_sync_progress(p, tr)), - Progress::MediaCheck(n) => { - pb::progress::Value::MediaCheck(tr.media_check_checked(n).into()) + Progress::MediaSync(p) => { + pb::collection::progress::Value::MediaSync(media_sync_progress(p, tr)) + } + Progress::MediaCheck(n) => { + pb::collection::progress::Value::MediaCheck(tr.media_check_checked(n).into()) + } + Progress::FullSync(p) => { + pb::collection::progress::Value::FullSync(pb::collection::progress::FullSync { + transferred: p.transferred_bytes as u32, + total: p.total_bytes as u32, + }) } - Progress::FullSync(p) => pb::progress::Value::FullSync(pb::progress::FullSync { - transferred: p.transferred_bytes as u32, - total: p.total_bytes as u32, - }), Progress::NormalSync(p) => { let stage = match p.stage { SyncStage::Connecting => tr.sync_syncing(), @@ -79,7 +83,7 @@ pub(super) fn progress_to_proto(progress: Option, tr: &I18n) -> pb::Pr let removed = tr .sync_media_removed_count(p.local_remove, p.remote_remove) .into(); - pb::progress::Value::NormalSync(pb::progress::NormalSync { + pb::collection::progress::Value::NormalSync(pb::collection::progress::NormalSync { stage, added, removed, @@ -100,13 +104,15 @@ pub(super) fn progress_to_proto(progress: Option, tr: &I18n) -> pb::Pr DatabaseCheckProgress::History => tr.database_check_checking_history(), } .to_string(); - pb::progress::Value::DatabaseCheck(pb::progress::DatabaseCheck { - stage, - stage_total, - stage_current, - }) + pb::collection::progress::Value::DatabaseCheck( + pb::collection::progress::DatabaseCheck { + stage, + stage_total, + stage_current, + }, + ) } - Progress::Import(progress) => pb::progress::Value::Importing( + Progress::Import(progress) => pb::collection::progress::Value::Importing( match progress { ImportProgress::File => tr.importing_importing_file(), ImportProgress::Media(n) => tr.importing_processed_media_file(n), @@ -117,7 +123,7 @@ pub(super) fn progress_to_proto(progress: Option, tr: &I18n) -> pb::Pr } .into(), ), - Progress::Export(progress) => pb::progress::Value::Exporting( + Progress::Export(progress) => pb::collection::progress::Value::Exporting( match progress { ExportProgress::File => tr.exporting_exporting_file(), ExportProgress::Media(n) => tr.exporting_processed_media_files(n), @@ -129,15 +135,15 @@ pub(super) fn progress_to_proto(progress: Option, tr: &I18n) -> pb::Pr ), } } else { - pb::progress::Value::None(pb::Empty {}) + pb::collection::progress::Value::None(pb::generic::Empty {}) }; - pb::Progress { + pb::collection::Progress { value: Some(progress), } } -fn media_sync_progress(p: MediaSyncProgress, tr: &I18n) -> pb::progress::MediaSync { - pb::progress::MediaSync { +fn media_sync_progress(p: MediaSyncProgress, tr: &I18n) -> pb::collection::progress::MediaSync { + pb::collection::progress::MediaSync { checked: tr.sync_media_checked_count(p.checked).into(), added: tr .sync_media_added_count(p.uploaded_files, p.downloaded_files) diff --git a/rslib/src/backend/scheduler/answering.rs b/rslib/src/backend/scheduler/answering.rs index d10559fef..d6c376cd5 100644 --- a/rslib/src/backend/scheduler/answering.rs +++ b/rslib/src/backend/scheduler/answering.rs @@ -12,8 +12,8 @@ use crate::{ }, }; -impl From for CardAnswer { - fn from(mut answer: pb::CardAnswer) -> Self { +impl From for CardAnswer { + fn from(mut answer: pb::scheduler::CardAnswer) -> Self { let mut new_state = mem::take(&mut answer.new_state).unwrap_or_default(); let custom_data = mem::take(&mut new_state.custom_data); CardAnswer { @@ -28,34 +28,38 @@ impl From for CardAnswer { } } -impl From for Rating { - fn from(rating: pb::card_answer::Rating) -> Self { +impl From for Rating { + fn from(rating: pb::scheduler::card_answer::Rating) -> Self { match rating { - pb::card_answer::Rating::Again => Rating::Again, - pb::card_answer::Rating::Hard => Rating::Hard, - pb::card_answer::Rating::Good => Rating::Good, - pb::card_answer::Rating::Easy => Rating::Easy, + pb::scheduler::card_answer::Rating::Again => Rating::Again, + pb::scheduler::card_answer::Rating::Hard => Rating::Hard, + pb::scheduler::card_answer::Rating::Good => Rating::Good, + pb::scheduler::card_answer::Rating::Easy => Rating::Easy, } } } -impl From for pb::queued_cards::QueuedCard { +impl From for pb::scheduler::queued_cards::QueuedCard { fn from(queued_card: QueuedCard) -> Self { Self { card: Some(queued_card.card.into()), states: Some(queued_card.states.into()), queue: match queued_card.kind { - crate::scheduler::queue::QueueEntryKind::New => pb::queued_cards::Queue::New, - crate::scheduler::queue::QueueEntryKind::Review => pb::queued_cards::Queue::Review, + crate::scheduler::queue::QueueEntryKind::New => { + pb::scheduler::queued_cards::Queue::New + } + crate::scheduler::queue::QueueEntryKind::Review => { + pb::scheduler::queued_cards::Queue::Review + } crate::scheduler::queue::QueueEntryKind::Learning => { - pb::queued_cards::Queue::Learning + pb::scheduler::queued_cards::Queue::Learning } } as i32, } } } -impl From for pb::QueuedCards { +impl From for pb::scheduler::QueuedCards { fn from(queued_cards: QueuedCards) -> Self { Self { cards: queued_cards.cards.into_iter().map(Into::into).collect(), diff --git a/rslib/src/backend/scheduler/mod.rs b/rslib/src/backend/scheduler/mod.rs index 635ab0db3..164a1574d 100644 --- a/rslib/src/backend/scheduler/mod.rs +++ b/rslib/src/backend/scheduler/mod.rs @@ -5,7 +5,7 @@ mod answering; mod states; use super::Backend; -pub(super) use crate::pb::scheduler_service::Service as SchedulerService; +pub(super) use crate::pb::scheduler::scheduler_service::Service as SchedulerService; use crate::{ pb, prelude::*, @@ -19,7 +19,10 @@ use crate::{ impl SchedulerService for Backend { /// This behaves like _updateCutoff() in older code - it also unburies at the start of /// a new day. - fn sched_timing_today(&self, _input: pb::Empty) -> Result { + fn sched_timing_today( + &self, + _input: pb::generic::Empty, + ) -> Result { self.with_col(|col| { let timing = col.timing_today()?; col.unbury_if_day_rolled_over(timing)?; @@ -28,16 +31,19 @@ impl SchedulerService for Backend { } /// Fetch data from DB and return rendered string. - fn studied_today(&self, _input: pb::Empty) -> Result { + fn studied_today(&self, _input: pb::generic::Empty) -> Result { self.with_col(|col| col.studied_today().map(Into::into)) } /// Message rendering only, for old graphs. - fn studied_today_message(&self, input: pb::StudiedTodayMessageRequest) -> Result { + fn studied_today_message( + &self, + input: pb::scheduler::StudiedTodayMessageRequest, + ) -> Result { Ok(studied_today(input.cards, input.seconds as f32, &self.tr).into()) } - fn update_stats(&self, input: pb::UpdateStatsRequest) -> Result { + fn update_stats(&self, input: pb::scheduler::UpdateStatsRequest) -> Result { self.with_col(|col| { col.transact_no_undo(|col| { let today = col.current_due_day(0)?; @@ -47,7 +53,10 @@ impl SchedulerService for Backend { }) } - fn extend_limits(&self, input: pb::ExtendLimitsRequest) -> Result { + fn extend_limits( + &self, + input: pb::scheduler::ExtendLimitsRequest, + ) -> Result { self.with_col(|col| { col.transact_no_undo(|col| { let today = col.current_due_day(0)?; @@ -64,20 +73,32 @@ impl SchedulerService for Backend { }) } - fn counts_for_deck_today(&self, input: pb::DeckId) -> Result { + fn counts_for_deck_today( + &self, + input: pb::decks::DeckId, + ) -> Result { self.with_col(|col| col.counts_for_deck_today(input.did.into())) } - fn congrats_info(&self, _input: pb::Empty) -> Result { + fn congrats_info( + &self, + _input: pb::generic::Empty, + ) -> Result { self.with_col(|col| col.congrats_info()) } - fn restore_buried_and_suspended_cards(&self, input: pb::CardIds) -> Result { + fn restore_buried_and_suspended_cards( + &self, + input: pb::cards::CardIds, + ) -> Result { let cids: Vec<_> = input.into(); self.with_col(|col| col.unbury_or_unsuspend_cards(&cids).map(Into::into)) } - fn unbury_deck(&self, input: pb::UnburyDeckRequest) -> Result { + fn unbury_deck( + &self, + input: pb::scheduler::UnburyDeckRequest, + ) -> Result { self.with_col(|col| { col.unbury_deck(input.deck_id.into(), input.mode()) .map(Into::into) @@ -86,8 +107,8 @@ impl SchedulerService for Backend { fn bury_or_suspend_cards( &self, - input: pb::BuryOrSuspendCardsRequest, - ) -> Result { + input: pb::scheduler::BuryOrSuspendCardsRequest, + ) -> Result { self.with_col(|col| { let mode = input.mode(); let cids = if input.card_ids.is_empty() { @@ -100,15 +121,21 @@ impl SchedulerService for Backend { }) } - fn empty_filtered_deck(&self, input: pb::DeckId) -> Result { + fn empty_filtered_deck(&self, input: pb::decks::DeckId) -> Result { self.with_col(|col| col.empty_filtered_deck(input.did.into()).map(Into::into)) } - fn rebuild_filtered_deck(&self, input: pb::DeckId) -> Result { + fn rebuild_filtered_deck( + &self, + input: pb::decks::DeckId, + ) -> Result { self.with_col(|col| col.rebuild_filtered_deck(input.did.into()).map(Into::into)) } - fn schedule_cards_as_new(&self, input: pb::ScheduleCardsAsNewRequest) -> Result { + fn schedule_cards_as_new( + &self, + input: pb::scheduler::ScheduleCardsAsNewRequest, + ) -> Result { self.with_col(|col| { let cids = input.card_ids.into_newtype(CardId); col.reschedule_cards_as_new( @@ -118,7 +145,7 @@ impl SchedulerService for Backend { input.reset_counts, input .context - .and_then(pb::schedule_cards_as_new_request::Context::from_i32), + .and_then(pb::scheduler::schedule_cards_as_new_request::Context::from_i32), ) .map(Into::into) }) @@ -126,19 +153,25 @@ impl SchedulerService for Backend { fn schedule_cards_as_new_defaults( &self, - input: pb::ScheduleCardsAsNewDefaultsRequest, - ) -> Result { + input: pb::scheduler::ScheduleCardsAsNewDefaultsRequest, + ) -> Result { self.with_col(|col| Ok(col.reschedule_cards_as_new_defaults(input.context()))) } - fn set_due_date(&self, input: pb::SetDueDateRequest) -> Result { + fn set_due_date( + &self, + input: pb::scheduler::SetDueDateRequest, + ) -> Result { let config = input.config_key.map(|v| v.key().into()); let days = input.days; let cids = input.card_ids.into_newtype(CardId); self.with_col(|col| col.set_due_date(&cids, &days, config).map(Into::into)) } - fn sort_cards(&self, input: pb::SortCardsRequest) -> Result { + fn sort_cards( + &self, + input: pb::scheduler::SortCardsRequest, + ) -> Result { let cids = input.card_ids.into_newtype(CardId); let (start, step, random, shift) = ( input.starting_from, @@ -157,66 +190,86 @@ impl SchedulerService for Backend { }) } - fn reposition_defaults(&self, _input: pb::Empty) -> Result { + fn reposition_defaults( + &self, + _input: pb::generic::Empty, + ) -> Result { self.with_col(|col| Ok(col.reposition_defaults())) } - fn sort_deck(&self, input: pb::SortDeckRequest) -> Result { + fn sort_deck( + &self, + input: pb::scheduler::SortDeckRequest, + ) -> Result { self.with_col(|col| { col.sort_deck_legacy(input.deck_id.into(), input.randomize) .map(Into::into) }) } - fn get_scheduling_states(&self, input: pb::CardId) -> Result { + fn get_scheduling_states( + &self, + input: pb::cards::CardId, + ) -> Result { let cid: CardId = input.into(); self.with_col(|col| col.get_scheduling_states(cid)) .map(Into::into) } - fn describe_next_states(&self, input: pb::SchedulingStates) -> Result { + fn describe_next_states( + &self, + input: pb::scheduler::SchedulingStates, + ) -> Result { let states: SchedulingStates = input.into(); self.with_col(|col| col.describe_next_states(states)) .map(Into::into) } - fn state_is_leech(&self, input: pb::SchedulingState) -> Result { + fn state_is_leech(&self, input: pb::scheduler::SchedulingState) -> Result { let state: CardState = input.into(); Ok(state.leeched().into()) } - fn answer_card(&self, input: pb::CardAnswer) -> Result { + fn answer_card(&self, input: pb::scheduler::CardAnswer) -> Result { self.with_col(|col| col.answer_card(&mut input.into())) .map(Into::into) } - fn upgrade_scheduler(&self, _input: pb::Empty) -> Result { + fn upgrade_scheduler(&self, _input: pb::generic::Empty) -> Result { self.with_col(|col| col.transact_no_undo(|col| col.upgrade_to_v2_scheduler())) .map(Into::into) } - fn get_queued_cards(&self, input: pb::GetQueuedCardsRequest) -> Result { + fn get_queued_cards( + &self, + input: pb::scheduler::GetQueuedCardsRequest, + ) -> Result { self.with_col(|col| { col.get_queued_cards(input.fetch_limit as usize, input.intraday_learning_only) .map(Into::into) }) } - fn custom_study(&self, input: pb::CustomStudyRequest) -> Result { + fn custom_study( + &self, + input: pb::scheduler::CustomStudyRequest, + ) -> Result { self.with_col(|col| col.custom_study(input)).map(Into::into) } fn custom_study_defaults( &self, - input: pb::CustomStudyDefaultsRequest, - ) -> Result { + input: pb::scheduler::CustomStudyDefaultsRequest, + ) -> Result { self.with_col(|col| col.custom_study_defaults(input.deck_id.into())) } } -impl From for pb::SchedTimingTodayResponse { - fn from(t: crate::scheduler::timing::SchedTimingToday) -> pb::SchedTimingTodayResponse { - pb::SchedTimingTodayResponse { +impl From for pb::scheduler::SchedTimingTodayResponse { + fn from( + t: crate::scheduler::timing::SchedTimingToday, + ) -> pb::scheduler::SchedTimingTodayResponse { + pb::scheduler::SchedTimingTodayResponse { days_elapsed: t.days_elapsed, next_day_at: t.next_day_at.0, } diff --git a/rslib/src/backend/scheduler/states/filtered.rs b/rslib/src/backend/scheduler/states/filtered.rs index 48acbe718..744379947 100644 --- a/rslib/src/backend/scheduler/states/filtered.rs +++ b/rslib/src/backend/scheduler/states/filtered.rs @@ -3,31 +3,30 @@ use crate::{pb, scheduler::states::FilteredState}; -impl From for pb::scheduling_state::Filtered { +impl From for pb::scheduler::scheduling_state::Filtered { fn from(state: FilteredState) -> Self { - pb::scheduling_state::Filtered { + pb::scheduler::scheduling_state::Filtered { value: Some(match state { FilteredState::Preview(state) => { - pb::scheduling_state::filtered::Value::Preview(state.into()) + pb::scheduler::scheduling_state::filtered::Value::Preview(state.into()) } FilteredState::Rescheduling(state) => { - pb::scheduling_state::filtered::Value::Rescheduling(state.into()) + pb::scheduler::scheduling_state::filtered::Value::Rescheduling(state.into()) } }), } } } -impl From for FilteredState { - fn from(state: pb::scheduling_state::Filtered) -> Self { - match state - .value - .unwrap_or_else(|| pb::scheduling_state::filtered::Value::Preview(Default::default())) - { - pb::scheduling_state::filtered::Value::Preview(state) => { +impl From for FilteredState { + fn from(state: pb::scheduler::scheduling_state::Filtered) -> Self { + match state.value.unwrap_or_else(|| { + pb::scheduler::scheduling_state::filtered::Value::Preview(Default::default()) + }) { + pb::scheduler::scheduling_state::filtered::Value::Preview(state) => { FilteredState::Preview(state.into()) } - pb::scheduling_state::filtered::Value::Rescheduling(state) => { + pb::scheduler::scheduling_state::filtered::Value::Rescheduling(state) => { FilteredState::Rescheduling(state.into()) } } diff --git a/rslib/src/backend/scheduler/states/learning.rs b/rslib/src/backend/scheduler/states/learning.rs index 30b87d8e6..34bd78a30 100644 --- a/rslib/src/backend/scheduler/states/learning.rs +++ b/rslib/src/backend/scheduler/states/learning.rs @@ -3,8 +3,8 @@ use crate::{pb, scheduler::states::LearnState}; -impl From for LearnState { - fn from(state: pb::scheduling_state::Learning) -> Self { +impl From for LearnState { + fn from(state: pb::scheduler::scheduling_state::Learning) -> Self { LearnState { remaining_steps: state.remaining_steps, scheduled_secs: state.scheduled_secs, @@ -12,9 +12,9 @@ impl From for LearnState { } } -impl From for pb::scheduling_state::Learning { +impl From for pb::scheduler::scheduling_state::Learning { fn from(state: LearnState) -> Self { - pb::scheduling_state::Learning { + pb::scheduler::scheduling_state::Learning { remaining_steps: state.remaining_steps, scheduled_secs: state.scheduled_secs, } diff --git a/rslib/src/backend/scheduler/states/mod.rs b/rslib/src/backend/scheduler/states/mod.rs index e627bd17e..7e1f55198 100644 --- a/rslib/src/backend/scheduler/states/mod.rs +++ b/rslib/src/backend/scheduler/states/mod.rs @@ -15,9 +15,9 @@ use crate::{ scheduler::states::{CardState, NewState, NormalState, SchedulingStates}, }; -impl From for pb::SchedulingStates { +impl From for pb::scheduler::SchedulingStates { fn from(choices: SchedulingStates) -> Self { - pb::SchedulingStates { + pb::scheduler::SchedulingStates { current: Some(choices.current.into()), again: Some(choices.again.into()), hard: Some(choices.hard.into()), @@ -27,8 +27,8 @@ impl From for pb::SchedulingStates { } } -impl From for SchedulingStates { - fn from(choices: pb::SchedulingStates) -> Self { +impl From for SchedulingStates { + fn from(choices: pb::scheduler::SchedulingStates) -> Self { SchedulingStates { current: choices.current.unwrap_or_default().into(), again: choices.again.unwrap_or_default().into(), @@ -39,24 +39,30 @@ impl From for SchedulingStates { } } -impl From for pb::SchedulingState { +impl From for pb::scheduler::SchedulingState { fn from(state: CardState) -> Self { - pb::SchedulingState { + pb::scheduler::SchedulingState { value: Some(match state { - CardState::Normal(state) => pb::scheduling_state::Value::Normal(state.into()), - CardState::Filtered(state) => pb::scheduling_state::Value::Filtered(state.into()), + CardState::Normal(state) => { + pb::scheduler::scheduling_state::Value::Normal(state.into()) + } + CardState::Filtered(state) => { + pb::scheduler::scheduling_state::Value::Filtered(state.into()) + } }), custom_data: None, } } } -impl From for CardState { - fn from(state: pb::SchedulingState) -> Self { +impl From for CardState { + fn from(state: pb::scheduler::SchedulingState) -> Self { if let Some(value) = state.value { match value { - pb::scheduling_state::Value::Normal(normal) => CardState::Normal(normal.into()), - pb::scheduling_state::Value::Filtered(filtered) => { + pb::scheduler::scheduling_state::Value::Normal(normal) => { + CardState::Normal(normal.into()) + } + pb::scheduler::scheduling_state::Value::Filtered(filtered) => { CardState::Filtered(filtered.into()) } } diff --git a/rslib/src/backend/scheduler/states/new.rs b/rslib/src/backend/scheduler/states/new.rs index ca508a49a..48eadbacb 100644 --- a/rslib/src/backend/scheduler/states/new.rs +++ b/rslib/src/backend/scheduler/states/new.rs @@ -3,17 +3,17 @@ use crate::{pb, scheduler::states::NewState}; -impl From for NewState { - fn from(state: pb::scheduling_state::New) -> Self { +impl From for NewState { + fn from(state: pb::scheduler::scheduling_state::New) -> Self { NewState { position: state.position, } } } -impl From for pb::scheduling_state::New { +impl From for pb::scheduler::scheduling_state::New { fn from(state: NewState) -> Self { - pb::scheduling_state::New { + pb::scheduler::scheduling_state::New { position: state.position, } } diff --git a/rslib/src/backend/scheduler/states/normal.rs b/rslib/src/backend/scheduler/states/normal.rs index d3d96db2e..c9e366808 100644 --- a/rslib/src/backend/scheduler/states/normal.rs +++ b/rslib/src/backend/scheduler/states/normal.rs @@ -3,37 +3,42 @@ use crate::{pb, scheduler::states::NormalState}; -impl From for pb::scheduling_state::Normal { +impl From for pb::scheduler::scheduling_state::Normal { fn from(state: NormalState) -> Self { - pb::scheduling_state::Normal { + pb::scheduler::scheduling_state::Normal { value: Some(match state { - NormalState::New(state) => pb::scheduling_state::normal::Value::New(state.into()), + NormalState::New(state) => { + pb::scheduler::scheduling_state::normal::Value::New(state.into()) + } NormalState::Learning(state) => { - pb::scheduling_state::normal::Value::Learning(state.into()) + pb::scheduler::scheduling_state::normal::Value::Learning(state.into()) } NormalState::Review(state) => { - pb::scheduling_state::normal::Value::Review(state.into()) + pb::scheduler::scheduling_state::normal::Value::Review(state.into()) } NormalState::Relearning(state) => { - pb::scheduling_state::normal::Value::Relearning(state.into()) + pb::scheduler::scheduling_state::normal::Value::Relearning(state.into()) } }), } } } -impl From for NormalState { - fn from(state: pb::scheduling_state::Normal) -> Self { - match state - .value - .unwrap_or_else(|| pb::scheduling_state::normal::Value::New(Default::default())) - { - pb::scheduling_state::normal::Value::New(state) => NormalState::New(state.into()), - pb::scheduling_state::normal::Value::Learning(state) => { +impl From for NormalState { + fn from(state: pb::scheduler::scheduling_state::Normal) -> Self { + match state.value.unwrap_or_else(|| { + pb::scheduler::scheduling_state::normal::Value::New(Default::default()) + }) { + pb::scheduler::scheduling_state::normal::Value::New(state) => { + NormalState::New(state.into()) + } + pb::scheduler::scheduling_state::normal::Value::Learning(state) => { NormalState::Learning(state.into()) } - pb::scheduling_state::normal::Value::Review(state) => NormalState::Review(state.into()), - pb::scheduling_state::normal::Value::Relearning(state) => { + pb::scheduler::scheduling_state::normal::Value::Review(state) => { + NormalState::Review(state.into()) + } + pb::scheduler::scheduling_state::normal::Value::Relearning(state) => { NormalState::Relearning(state.into()) } } diff --git a/rslib/src/backend/scheduler/states/preview.rs b/rslib/src/backend/scheduler/states/preview.rs index d696b908f..96c228e95 100644 --- a/rslib/src/backend/scheduler/states/preview.rs +++ b/rslib/src/backend/scheduler/states/preview.rs @@ -3,8 +3,8 @@ use crate::{pb, scheduler::states::PreviewState}; -impl From for PreviewState { - fn from(state: pb::scheduling_state::Preview) -> Self { +impl From for PreviewState { + fn from(state: pb::scheduler::scheduling_state::Preview) -> Self { PreviewState { scheduled_secs: state.scheduled_secs, finished: state.finished, @@ -12,9 +12,9 @@ impl From for PreviewState { } } -impl From for pb::scheduling_state::Preview { +impl From for pb::scheduler::scheduling_state::Preview { fn from(state: PreviewState) -> Self { - pb::scheduling_state::Preview { + pb::scheduler::scheduling_state::Preview { scheduled_secs: state.scheduled_secs, finished: state.finished, } diff --git a/rslib/src/backend/scheduler/states/relearning.rs b/rslib/src/backend/scheduler/states/relearning.rs index 6a77c7f6f..f99e4a969 100644 --- a/rslib/src/backend/scheduler/states/relearning.rs +++ b/rslib/src/backend/scheduler/states/relearning.rs @@ -3,8 +3,8 @@ use crate::{pb, scheduler::states::RelearnState}; -impl From for RelearnState { - fn from(state: pb::scheduling_state::Relearning) -> Self { +impl From for RelearnState { + fn from(state: pb::scheduler::scheduling_state::Relearning) -> Self { RelearnState { review: state.review.unwrap_or_default().into(), learning: state.learning.unwrap_or_default().into(), @@ -12,9 +12,9 @@ impl From for RelearnState { } } -impl From for pb::scheduling_state::Relearning { +impl From for pb::scheduler::scheduling_state::Relearning { fn from(state: RelearnState) -> Self { - pb::scheduling_state::Relearning { + pb::scheduler::scheduling_state::Relearning { review: Some(state.review.into()), learning: Some(state.learning.into()), } diff --git a/rslib/src/backend/scheduler/states/rescheduling.rs b/rslib/src/backend/scheduler/states/rescheduling.rs index c23d3b512..5fe37a4fa 100644 --- a/rslib/src/backend/scheduler/states/rescheduling.rs +++ b/rslib/src/backend/scheduler/states/rescheduling.rs @@ -3,17 +3,17 @@ use crate::{pb, scheduler::states::ReschedulingFilterState}; -impl From for ReschedulingFilterState { - fn from(state: pb::scheduling_state::ReschedulingFilter) -> Self { +impl From for ReschedulingFilterState { + fn from(state: pb::scheduler::scheduling_state::ReschedulingFilter) -> Self { ReschedulingFilterState { original_state: state.original_state.unwrap_or_default().into(), } } } -impl From for pb::scheduling_state::ReschedulingFilter { +impl From for pb::scheduler::scheduling_state::ReschedulingFilter { fn from(state: ReschedulingFilterState) -> Self { - pb::scheduling_state::ReschedulingFilter { + pb::scheduler::scheduling_state::ReschedulingFilter { original_state: Some(state.original_state.into()), } } diff --git a/rslib/src/backend/scheduler/states/review.rs b/rslib/src/backend/scheduler/states/review.rs index dce2f1246..ab4eb5386 100644 --- a/rslib/src/backend/scheduler/states/review.rs +++ b/rslib/src/backend/scheduler/states/review.rs @@ -3,8 +3,8 @@ use crate::{pb, scheduler::states::ReviewState}; -impl From for ReviewState { - fn from(state: pb::scheduling_state::Review) -> Self { +impl From for ReviewState { + fn from(state: pb::scheduler::scheduling_state::Review) -> Self { ReviewState { scheduled_days: state.scheduled_days, elapsed_days: state.elapsed_days, @@ -15,9 +15,9 @@ impl From for ReviewState { } } -impl From for pb::scheduling_state::Review { +impl From for pb::scheduler::scheduling_state::Review { fn from(state: ReviewState) -> Self { - pb::scheduling_state::Review { + pb::scheduler::scheduling_state::Review { scheduled_days: state.scheduled_days, elapsed_days: state.elapsed_days, ease_factor: state.ease_factor, diff --git a/rslib/src/backend/search/browser_table.rs b/rslib/src/backend/search/browser_table.rs index fa8f6462d..c743266fb 100644 --- a/rslib/src/backend/search/browser_table.rs +++ b/rslib/src/backend/search/browser_table.rs @@ -6,8 +6,8 @@ use std::str::FromStr; use crate::{browser_table, i18n::I18n, pb}; impl browser_table::Column { - pub fn to_pb_column(self, i18n: &I18n) -> pb::browser_columns::Column { - pb::browser_columns::Column { + pub fn to_pb_column(self, i18n: &I18n) -> pb::search::browser_columns::Column { + pb::search::browser_columns::Column { key: self.to_string(), cards_mode_label: self.cards_mode_label(i18n), notes_mode_label: self.notes_mode_label(i18n), @@ -20,8 +20,8 @@ impl browser_table::Column { } } -impl From for Vec { - fn from(input: pb::StringList) -> Self { +impl From for Vec { + fn from(input: pb::generic::StringList) -> Self { input .vals .iter() diff --git a/rslib/src/backend/search/mod.rs b/rslib/src/backend/search/mod.rs index 98075662c..5066edf58 100644 --- a/rslib/src/backend/search/mod.rs +++ b/rslib/src/backend/search/mod.rs @@ -7,56 +7,62 @@ mod search_node; use std::{str::FromStr, sync::Arc}; use super::{notes::to_note_ids, Backend}; -pub(super) use crate::pb::search_service::Service as SearchService; +pub(super) use crate::pb::search::search_service::Service as SearchService; use crate::{ browser_table::Column, pb, - pb::sort_order::Value as SortOrderProto, + pb::search::sort_order::Value as SortOrderProto, prelude::*, search::{replace_search_node, JoinSearches, Node, SortMode}, }; impl SearchService for Backend { - fn build_search_string(&self, input: pb::SearchNode) -> Result { + fn build_search_string(&self, input: pb::search::SearchNode) -> Result { let node: Node = input.try_into()?; Ok(SearchBuilder::from_root(node).write().into()) } - fn search_cards(&self, input: pb::SearchRequest) -> Result { + fn search_cards(&self, input: pb::search::SearchRequest) -> Result { self.with_col(|col| { let order = input.order.unwrap_or_default().value.into(); let cids = col.search_cards(&input.search, order)?; - Ok(pb::SearchResponse { + Ok(pb::search::SearchResponse { ids: cids.into_iter().map(|v| v.0).collect(), }) }) } - fn search_notes(&self, input: pb::SearchRequest) -> Result { + fn search_notes(&self, input: pb::search::SearchRequest) -> Result { self.with_col(|col| { let order = input.order.unwrap_or_default().value.into(); let nids = col.search_notes(&input.search, order)?; - Ok(pb::SearchResponse { + Ok(pb::search::SearchResponse { ids: nids.into_iter().map(|v| v.0).collect(), }) }) } - fn join_search_nodes(&self, input: pb::JoinSearchNodesRequest) -> Result { + fn join_search_nodes( + &self, + input: pb::search::JoinSearchNodesRequest, + ) -> Result { let existing_node: Node = input.existing_node.unwrap_or_default().try_into()?; let additional_node: Node = input.additional_node.unwrap_or_default().try_into()?; Ok( - match pb::search_node::group::Joiner::from_i32(input.joiner).unwrap_or_default() { - pb::search_node::group::Joiner::And => existing_node.and_flat(additional_node), - pb::search_node::group::Joiner::Or => existing_node.or_flat(additional_node), + match pb::search::search_node::group::Joiner::from_i32(input.joiner).unwrap_or_default() { + pb::search::search_node::group::Joiner::And => existing_node.and_flat(additional_node), + pb::search::search_node::group::Joiner::Or => existing_node.or_flat(additional_node), } .write() .into(), ) } - fn replace_search_node(&self, input: pb::ReplaceSearchNodeRequest) -> Result { + fn replace_search_node( + &self, + input: pb::search::ReplaceSearchNodeRequest, + ) -> Result { let existing = { let node = input.existing_node.unwrap_or_default().try_into()?; if let Node::Group(nodes) = node { @@ -69,7 +75,10 @@ impl SearchService for Backend { Ok(replace_search_node(existing, replacement).into()) } - fn find_and_replace(&self, input: pb::FindAndReplaceRequest) -> Result { + fn find_and_replace( + &self, + input: pb::search::FindAndReplaceRequest, + ) -> Result { let mut search = if input.regex { input.search } else { @@ -94,11 +103,17 @@ impl SearchService for Backend { }) } - fn all_browser_columns(&self, _input: pb::Empty) -> Result { + fn all_browser_columns( + &self, + _input: pb::generic::Empty, + ) -> Result { self.with_col(|col| Ok(col.all_browser_columns())) } - fn set_active_browser_columns(&self, input: pb::StringList) -> Result { + fn set_active_browser_columns( + &self, + input: pb::generic::StringList, + ) -> Result { self.with_col(|col| { col.state.active_browser_columns = Some(Arc::new(input.into())); Ok(()) @@ -106,15 +121,15 @@ impl SearchService for Backend { .map(Into::into) } - fn browser_row_for_id(&self, input: pb::Int64) -> Result { + fn browser_row_for_id(&self, input: pb::generic::Int64) -> Result { self.with_col(|col| col.browser_row_for_id(input.val).map(Into::into)) } } impl From> for SortMode { fn from(order: Option) -> Self { - use pb::sort_order::Value as V; - match order.unwrap_or(V::None(pb::Empty {})) { + use pb::search::sort_order::Value as V; + match order.unwrap_or(V::None(pb::generic::Empty {})) { V::None(_) => SortMode::NoOrder, V::Custom(s) => SortMode::Custom(s), V::Builtin(b) => SortMode::Builtin { diff --git a/rslib/src/backend/search/search_node.rs b/rslib/src/backend/search/search_node.rs index 0ae6cae11..bd8e3f119 100644 --- a/rslib/src/backend/search/search_node.rs +++ b/rslib/src/backend/search/search_node.rs @@ -12,11 +12,11 @@ use crate::{ text::{escape_anki_wildcards, escape_anki_wildcards_for_search_node}, }; -impl TryFrom for Node { +impl TryFrom for Node { type Error = AnkiError; - fn try_from(msg: pb::SearchNode) -> std::result::Result { - use pb::search_node::{group::Joiner, Filter, Flag}; + fn try_from(msg: pb::search::SearchNode) -> std::result::Result { + use pb::search::search_node::{group::Joiner, Filter, Flag}; Ok(if let Some(filter) = msg.filter { match filter { Filter::Tag(s) => SearchNode::from_tag_name(&s).into(), @@ -52,7 +52,7 @@ impl TryFrom for Node { }), Filter::EditedInDays(u) => Node::Search(SearchNode::EditedInDays(u)), Filter::CardState(state) => Node::Search(SearchNode::State( - pb::search_node::CardState::from_i32(state) + pb::search::search_node::CardState::from_i32(state) .unwrap_or_default() .into(), )), @@ -114,33 +114,33 @@ impl TryFrom for Node { } } -impl From for RatingKind { - fn from(r: pb::search_node::Rating) -> Self { +impl From for RatingKind { + fn from(r: pb::search::search_node::Rating) -> Self { match r { - pb::search_node::Rating::Again => RatingKind::AnswerButton(1), - pb::search_node::Rating::Hard => RatingKind::AnswerButton(2), - pb::search_node::Rating::Good => RatingKind::AnswerButton(3), - pb::search_node::Rating::Easy => RatingKind::AnswerButton(4), - pb::search_node::Rating::Any => RatingKind::AnyAnswerButton, - pb::search_node::Rating::ByReschedule => RatingKind::ManualReschedule, + pb::search::search_node::Rating::Again => RatingKind::AnswerButton(1), + pb::search::search_node::Rating::Hard => RatingKind::AnswerButton(2), + pb::search::search_node::Rating::Good => RatingKind::AnswerButton(3), + pb::search::search_node::Rating::Easy => RatingKind::AnswerButton(4), + pb::search::search_node::Rating::Any => RatingKind::AnyAnswerButton, + pb::search::search_node::Rating::ByReschedule => RatingKind::ManualReschedule, } } } -impl From for StateKind { - fn from(k: pb::search_node::CardState) -> Self { +impl From for StateKind { + fn from(k: pb::search::search_node::CardState) -> Self { match k { - pb::search_node::CardState::New => StateKind::New, - pb::search_node::CardState::Learn => StateKind::Learning, - pb::search_node::CardState::Review => StateKind::Review, - pb::search_node::CardState::Due => StateKind::Due, - pb::search_node::CardState::Suspended => StateKind::Suspended, - pb::search_node::CardState::Buried => StateKind::Buried, + pb::search::search_node::CardState::New => StateKind::New, + pb::search::search_node::CardState::Learn => StateKind::Learning, + pb::search::search_node::CardState::Review => StateKind::Review, + pb::search::search_node::CardState::Due => StateKind::Due, + pb::search::search_node::CardState::Suspended => StateKind::Suspended, + pb::search::search_node::CardState::Buried => StateKind::Buried, } } } -impl pb::search_node::IdList { +impl pb::search::search_node::IdList { fn into_id_string(self) -> String { self.ids .iter() diff --git a/rslib/src/backend/stats.rs b/rslib/src/backend/stats.rs index adaef28e9..ee3470eb0 100644 --- a/rslib/src/backend/stats.rs +++ b/rslib/src/backend/stats.rs @@ -2,23 +2,29 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use super::Backend; -pub(super) use crate::pb::stats_service::Service as StatsService; +pub(super) use crate::pb::stats::stats_service::Service as StatsService; use crate::{pb, prelude::*, revlog::RevlogReviewKind}; impl StatsService for Backend { - fn card_stats(&self, input: pb::CardId) -> Result { + fn card_stats(&self, input: pb::cards::CardId) -> Result { self.with_col(|col| col.card_stats(input.into())) } - fn graphs(&self, input: pb::GraphsRequest) -> Result { + fn graphs(&self, input: pb::stats::GraphsRequest) -> Result { self.with_col(|col| col.graph_data_for_search(&input.search, input.days)) } - fn get_graph_preferences(&self, _input: pb::Empty) -> Result { + fn get_graph_preferences( + &self, + _input: pb::generic::Empty, + ) -> Result { self.with_col(|col| Ok(col.get_graph_preferences())) } - fn set_graph_preferences(&self, input: pb::GraphPreferences) -> Result { + fn set_graph_preferences( + &self, + input: pb::stats::GraphPreferences, + ) -> Result { self.with_col(|col| col.set_graph_preferences(input)) .map(Into::into) } @@ -27,11 +33,11 @@ impl StatsService for Backend { impl From for i32 { fn from(kind: RevlogReviewKind) -> Self { (match kind { - RevlogReviewKind::Learning => pb::revlog_entry::ReviewKind::Learning, - RevlogReviewKind::Review => pb::revlog_entry::ReviewKind::Review, - RevlogReviewKind::Relearning => pb::revlog_entry::ReviewKind::Relearning, - RevlogReviewKind::Filtered => pb::revlog_entry::ReviewKind::Filtered, - RevlogReviewKind::Manual => pb::revlog_entry::ReviewKind::Manual, + RevlogReviewKind::Learning => pb::stats::revlog_entry::ReviewKind::Learning, + RevlogReviewKind::Review => pb::stats::revlog_entry::ReviewKind::Review, + RevlogReviewKind::Relearning => pb::stats::revlog_entry::ReviewKind::Relearning, + RevlogReviewKind::Filtered => pb::stats::revlog_entry::ReviewKind::Filtered, + RevlogReviewKind::Manual => pb::stats::revlog_entry::ReviewKind::Manual, }) as i32 } } diff --git a/rslib/src/backend/sync/mod.rs b/rslib/src/backend/sync/mod.rs index df98606eb..c30ef0345 100644 --- a/rslib/src/backend/sync/mod.rs +++ b/rslib/src/backend/sync/mod.rs @@ -9,7 +9,7 @@ use futures::future::{AbortHandle, AbortRegistration, Abortable}; use slog::warn; use super::{progress::AbortHandleSlot, Backend}; -pub(super) use crate::pb::sync_service::Service as SyncService; +pub(super) use crate::pb::sync::sync_service::Service as SyncService; use crate::{ media::MediaManager, pb, @@ -30,47 +30,47 @@ pub(super) struct SyncState { #[derive(Default, Debug)] pub(super) struct RemoteSyncStatus { pub last_check: TimestampSecs, - pub last_response: pb::sync_status_response::Required, + pub last_response: pb::sync::sync_status_response::Required, } impl RemoteSyncStatus { - pub(super) fn update(&mut self, required: pb::sync_status_response::Required) { + pub(super) fn update(&mut self, required: pb::sync::sync_status_response::Required) { self.last_check = TimestampSecs::now(); self.last_response = required } } -impl From for pb::SyncCollectionResponse { +impl From for pb::sync::SyncCollectionResponse { fn from(o: SyncOutput) -> Self { - pb::SyncCollectionResponse { + pb::sync::SyncCollectionResponse { host_number: o.host_number, server_message: o.server_message, required: match o.required { SyncActionRequired::NoChanges => { - pb::sync_collection_response::ChangesRequired::NoChanges as i32 + pb::sync::sync_collection_response::ChangesRequired::NoChanges as i32 } SyncActionRequired::FullSyncRequired { upload_ok, download_ok, } => { if !upload_ok { - pb::sync_collection_response::ChangesRequired::FullDownload as i32 + pb::sync::sync_collection_response::ChangesRequired::FullDownload as i32 } else if !download_ok { - pb::sync_collection_response::ChangesRequired::FullUpload as i32 + pb::sync::sync_collection_response::ChangesRequired::FullUpload as i32 } else { - pb::sync_collection_response::ChangesRequired::FullSync as i32 + pb::sync::sync_collection_response::ChangesRequired::FullSync as i32 } } SyncActionRequired::NormalSyncRequired => { - pb::sync_collection_response::ChangesRequired::NormalSync as i32 + pb::sync::sync_collection_response::ChangesRequired::NormalSync as i32 } }, } } } -impl From for SyncAuth { - fn from(a: pb::SyncAuth) -> Self { +impl From for SyncAuth { + fn from(a: pb::sync::SyncAuth) -> Self { SyncAuth { hkey: a.hkey, host_number: a.host_number, @@ -79,11 +79,11 @@ impl From for SyncAuth { } impl SyncService for Backend { - fn sync_media(&self, input: pb::SyncAuth) -> Result { + fn sync_media(&self, input: pb::sync::SyncAuth) -> Result { self.sync_media_inner(input).map(Into::into) } - fn abort_sync(&self, _input: pb::Empty) -> Result { + fn abort_sync(&self, _input: pb::generic::Empty) -> Result { if let Some(handle) = self.sync_abort.lock().unwrap().take() { handle.abort(); } @@ -91,7 +91,7 @@ impl SyncService for Backend { } /// Abort the media sync. Does not wait for completion. - fn abort_media_sync(&self, _input: pb::Empty) -> Result { + fn abort_media_sync(&self, _input: pb::generic::Empty) -> Result { let guard = self.state.lock().unwrap(); if let Some(handle) = &guard.sync.media_sync_abort { handle.abort(); @@ -99,33 +99,39 @@ impl SyncService for Backend { Ok(().into()) } - fn before_upload(&self, _input: pb::Empty) -> Result { + fn before_upload(&self, _input: pb::generic::Empty) -> Result { self.with_col(|col| col.before_upload().map(Into::into)) } - fn sync_login(&self, input: pb::SyncLoginRequest) -> Result { + fn sync_login(&self, input: pb::sync::SyncLoginRequest) -> Result { self.sync_login_inner(input) } - fn sync_status(&self, input: pb::SyncAuth) -> Result { + fn sync_status(&self, input: pb::sync::SyncAuth) -> Result { self.sync_status_inner(input) } - fn sync_collection(&self, input: pb::SyncAuth) -> Result { + fn sync_collection( + &self, + input: pb::sync::SyncAuth, + ) -> Result { self.sync_collection_inner(input) } - fn full_upload(&self, input: pb::SyncAuth) -> Result { + fn full_upload(&self, input: pb::sync::SyncAuth) -> Result { self.full_sync_inner(input, true)?; Ok(().into()) } - fn full_download(&self, input: pb::SyncAuth) -> Result { + fn full_download(&self, input: pb::sync::SyncAuth) -> Result { self.full_sync_inner(input, false)?; Ok(().into()) } - fn sync_server_method(&self, input: pb::SyncServerMethodRequest) -> Result { + fn sync_server_method( + &self, + input: pb::sync::SyncServerMethodRequest, + ) -> Result { let req = SyncRequest::from_method_and_data(input.method(), input.data)?; self.sync_server_method_inner(req).map(Into::into) } @@ -160,7 +166,7 @@ impl Backend { Ok((guard, abort_reg)) } - pub(super) fn sync_media_inner(&self, input: pb::SyncAuth) -> Result<()> { + pub(super) fn sync_media_inner(&self, input: pb::sync::SyncAuth) -> Result<()> { // mark media sync as active let (abort_handle, abort_reg) = AbortHandle::new_pair(); { @@ -220,7 +226,10 @@ impl Backend { } } - pub(super) fn sync_login_inner(&self, input: pb::SyncLoginRequest) -> Result { + pub(super) fn sync_login_inner( + &self, + input: pb::sync::SyncLoginRequest, + ) -> Result { let (_guard, abort_reg) = self.sync_abort_handle()?; let rt = self.runtime_handle(); @@ -230,16 +239,19 @@ impl Backend { Ok(sync_result) => sync_result, Err(_) => Err(AnkiError::Interrupted), }; - ret.map(|a| pb::SyncAuth { + ret.map(|a| pb::sync::SyncAuth { hkey: a.hkey, host_number: a.host_number, }) } - pub(super) fn sync_status_inner(&self, input: pb::SyncAuth) -> Result { + pub(super) fn sync_status_inner( + &self, + input: pb::sync::SyncAuth, + ) -> Result { // any local changes mean we can skip the network round-trip let req = self.with_col(|col| col.get_local_sync_status())?; - if req != pb::sync_status_response::Required::NoChanges { + if req != pb::sync::sync_status_response::Required::NoChanges { return Ok(req.into()); } @@ -273,8 +285,8 @@ impl Backend { pub(super) fn sync_collection_inner( &self, - input: pb::SyncAuth, - ) -> Result { + input: pb::sync::SyncAuth, + ) -> Result { let (_guard, abort_reg) = self.sync_abort_handle()?; let rt = self.runtime_handle(); @@ -314,7 +326,7 @@ impl Backend { Ok(output.into()) } - pub(super) fn full_sync_inner(&self, input: pb::SyncAuth, upload: bool) -> Result<()> { + pub(super) fn full_sync_inner(&self, input: pb::sync::SyncAuth, upload: bool) -> Result<()> { self.abort_media_sync_and_wait(); let rt = self.runtime_handle(); @@ -356,7 +368,7 @@ impl Backend { .unwrap() .sync .remote_sync_status - .update(pb::sync_status_response::Required::NoChanges); + .update(pb::sync::sync_status_response::Required::NoChanges); } sync_result } diff --git a/rslib/src/backend/tags.rs b/rslib/src/backend/tags.rs index 9f19b1737..df18b7085 100644 --- a/rslib/src/backend/tags.rs +++ b/rslib/src/backend/tags.rs @@ -2,16 +2,19 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use super::{notes::to_note_ids, Backend}; -pub(super) use crate::pb::tags_service::Service as TagsService; +pub(super) use crate::pb::tags::tags_service::Service as TagsService; use crate::{pb, prelude::*}; impl TagsService for Backend { - fn clear_unused_tags(&self, _input: pb::Empty) -> Result { + fn clear_unused_tags( + &self, + _input: pb::generic::Empty, + ) -> Result { self.with_col(|col| col.clear_unused_tags().map(Into::into)) } - fn all_tags(&self, _input: pb::Empty) -> Result { - Ok(pb::StringList { + fn all_tags(&self, _input: pb::generic::Empty) -> Result { + Ok(pb::generic::StringList { vals: self.with_col(|col| { Ok(col .storage @@ -23,22 +26,28 @@ impl TagsService for Backend { }) } - fn remove_tags(&self, tags: pb::String) -> Result { + fn remove_tags(&self, tags: pb::generic::String) -> Result { self.with_col(|col| col.remove_tags(tags.val.as_str()).map(Into::into)) } - fn set_tag_collapsed(&self, input: pb::SetTagCollapsedRequest) -> Result { + fn set_tag_collapsed( + &self, + input: pb::tags::SetTagCollapsedRequest, + ) -> Result { self.with_col(|col| { col.set_tag_collapsed(&input.name, input.collapsed) .map(Into::into) }) } - fn tag_tree(&self, _input: pb::Empty) -> Result { + fn tag_tree(&self, _input: pb::generic::Empty) -> Result { self.with_col(|col| col.tag_tree()) } - fn reparent_tags(&self, input: pb::ReparentTagsRequest) -> Result { + fn reparent_tags( + &self, + input: pb::tags::ReparentTagsRequest, + ) -> Result { let source_tags = input.tags; let target_tag = if input.new_parent.is_empty() { None @@ -49,19 +58,28 @@ impl TagsService for Backend { .map(Into::into) } - fn rename_tags(&self, input: pb::RenameTagsRequest) -> Result { + fn rename_tags( + &self, + input: pb::tags::RenameTagsRequest, + ) -> Result { self.with_col(|col| col.rename_tag(&input.current_prefix, &input.new_prefix)) .map(Into::into) } - fn add_note_tags(&self, input: pb::NoteIdsAndTagsRequest) -> Result { + fn add_note_tags( + &self, + input: pb::tags::NoteIdsAndTagsRequest, + ) -> Result { self.with_col(|col| { col.add_tags_to_notes(&to_note_ids(input.note_ids), &input.tags) .map(Into::into) }) } - fn remove_note_tags(&self, input: pb::NoteIdsAndTagsRequest) -> Result { + fn remove_note_tags( + &self, + input: pb::tags::NoteIdsAndTagsRequest, + ) -> Result { self.with_col(|col| { col.remove_tags_from_notes(&to_note_ids(input.note_ids), &input.tags) .map(Into::into) @@ -70,8 +88,8 @@ impl TagsService for Backend { fn find_and_replace_tag( &self, - input: pb::FindAndReplaceTagRequest, - ) -> Result { + input: pb::tags::FindAndReplaceTagRequest, + ) -> Result { self.with_col(|col| { let note_ids = if input.note_ids.is_empty() { col.search_notes_unordered("")? @@ -89,10 +107,13 @@ impl TagsService for Backend { }) } - fn complete_tag(&self, input: pb::CompleteTagRequest) -> Result { + fn complete_tag( + &self, + input: pb::tags::CompleteTagRequest, + ) -> Result { self.with_col(|col| { let tags = col.complete_tag(&input.input, input.match_limit as usize)?; - Ok(pb::CompleteTagResponse { tags }) + Ok(pb::tags::CompleteTagResponse { tags }) }) } } diff --git a/rslib/src/browser_table.rs b/rslib/src/browser_table.rs index a1b85e0fd..d8dbc5087 100644 --- a/rslib/src/browser_table.rs +++ b/rslib/src/browser_table.rs @@ -182,8 +182,8 @@ impl Column { .into() } - pub fn default_order(self) -> pb::browser_columns::Sorting { - use pb::browser_columns::Sorting; + pub fn default_order(self) -> pb::search::browser_columns::Sorting { + use pb::search::browser_columns::Sorting; match self { Column::Question | Column::Answer | Column::Custom => Sorting::None, Column::SortField | Column::Tags | Column::Notetype | Column::Deck => { @@ -205,8 +205,8 @@ impl Column { matches!(self, Self::Question | Self::Answer | Self::SortField) } - pub fn alignment(self) -> pb::browser_columns::Alignment { - use pb::browser_columns::Alignment; + pub fn alignment(self) -> pb::search::browser_columns::Alignment { + use pb::search::browser_columns::Alignment; match self { Self::Question | Self::Answer @@ -221,16 +221,16 @@ impl Column { } impl Collection { - pub fn all_browser_columns(&self) -> pb::BrowserColumns { - let mut columns: Vec = Column::iter() + pub fn all_browser_columns(&self) -> pb::search::BrowserColumns { + let mut columns: Vec = Column::iter() .filter(|&c| c != Column::Custom) .map(|c| c.to_pb_column(&self.tr)) .collect(); columns.sort_by(|c1, c2| c1.cards_mode_label.cmp(&c2.cards_mode_label)); - pb::BrowserColumns { columns } + pb::search::BrowserColumns { columns } } - pub fn browser_row_for_id(&mut self, id: i64) -> Result { + pub fn browser_row_for_id(&mut self, id: i64) -> Result { let notes_mode = self.get_config_bool(BoolKey::BrowserTableShowNotesMode); let columns = Arc::clone( self.state @@ -361,8 +361,8 @@ impl RowContext { }) } - fn browser_row(&self, columns: &[Column]) -> Result { - Ok(pb::BrowserRow { + fn browser_row(&self, columns: &[Column]) -> Result { + Ok(pb::search::BrowserRow { cells: columns .iter() .map(|&column| self.get_cell(column)) @@ -373,8 +373,8 @@ impl RowContext { }) } - fn get_cell(&self, column: Column) -> Result { - Ok(pb::browser_row::Cell { + fn get_cell(&self, column: Column) -> Result { + Ok(pb::search::browser_row::Cell { text: self.get_cell_text(column)?, is_rtl: self.get_is_rtl(column), }) @@ -546,8 +546,8 @@ impl RowContext { Ok(self.template()?.config.browser_font_size) } - fn get_row_color(&self) -> pb::browser_row::Color { - use pb::browser_row::Color; + fn get_row_color(&self) -> pb::search::browser_row::Color { + use pb::search::browser_row::Color; if self.notes_mode { if self.note.is_marked() { Color::Marked diff --git a/rslib/src/card_rendering/mod.rs b/rslib/src/card_rendering/mod.rs index 4ce02b9bc..9b528301f 100644 --- a/rslib/src/card_rendering/mod.rs +++ b/rslib/src/card_rendering/mod.rs @@ -18,7 +18,7 @@ pub fn extract_av_tags + AsRef>( txt: S, question_side: bool, tr: &I18n, -) -> (String, Vec) { +) -> (String, Vec) { nodes_or_text_only(txt.as_ref()) .map(|nodes| nodes.write_and_extract_av_tags(question_side, tr)) .unwrap_or_else(|| (txt.into(), vec![])) @@ -122,17 +122,21 @@ mod test { ( "foo [anki:play:q:0] baz [anki:play:q:1]", vec![ - pb::AvTag { - value: Some(pb::av_tag::Value::SoundOrVideo("bar.mp3".to_string())) + pb::card_rendering::AvTag { + value: Some(pb::card_rendering::av_tag::Value::SoundOrVideo( + "bar.mp3".to_string() + )) }, - pb::AvTag { - value: Some(pb::av_tag::Value::Tts(pb::TtsTag { - field_text: tr.card_templates_blank().to_string(), - lang: "en_US".to_string(), - voices: vec![], - speed: 1.0, - other_args: vec![], - })) + pb::card_rendering::AvTag { + value: Some(pb::card_rendering::av_tag::Value::Tts( + pb::card_rendering::TtsTag { + field_text: tr.card_templates_blank().to_string(), + lang: "en_US".to_string(), + voices: vec![], + speed: 1.0, + other_args: vec![], + } + )) } ], ), diff --git a/rslib/src/card_rendering/writer.rs b/rslib/src/card_rendering/writer.rs index 77679e987..f2d1d58bb 100644 --- a/rslib/src/card_rendering/writer.rs +++ b/rslib/src/card_rendering/writer.rs @@ -19,7 +19,7 @@ impl<'a> CardNodes<'a> { &self, question_side: bool, tr: &I18n, - ) -> (String, Vec) { + ) -> (String, Vec) { let mut extractor = AvExtractor::new(question_side, tr); (extractor.write(self), extractor.tags) } @@ -119,7 +119,7 @@ impl Write for AvStripper { struct AvExtractor<'a> { side: char, - tags: Vec, + tags: Vec, tr: &'a I18n, } @@ -147,8 +147,8 @@ impl<'a> AvExtractor<'a> { impl Write for AvExtractor<'_> { fn write_sound(&mut self, buf: &mut String, resource: &str) { self.write_play_tag(buf); - self.tags.push(pb::AvTag { - value: Some(pb::av_tag::Value::SoundOrVideo( + self.tags.push(pb::card_rendering::AvTag { + value: Some(pb::card_rendering::av_tag::Value::SoundOrVideo( decode_entities(resource).into(), )), }); @@ -161,18 +161,20 @@ impl Write for AvExtractor<'_> { } self.write_play_tag(buf); - self.tags.push(pb::AvTag { - value: Some(pb::av_tag::Value::Tts(pb::TtsTag { - field_text: self.transform_tts_content(directive), - lang: directive.lang.into(), - voices: directive.voices.iter().map(ToString::to_string).collect(), - speed: directive.speed, - other_args: directive - .options - .iter() - .map(|(key, val)| format!("{}={}", key, val)) - .collect(), - })), + self.tags.push(pb::card_rendering::AvTag { + value: Some(pb::card_rendering::av_tag::Value::Tts( + pb::card_rendering::TtsTag { + field_text: self.transform_tts_content(directive), + lang: directive.lang.into(), + voices: directive.voices.iter().map(ToString::to_string).collect(), + speed: directive.speed, + other_args: directive + .options + .iter() + .map(|(key, val)| format!("{}={}", key, val)) + .collect(), + }, + )), }); } } diff --git a/rslib/src/collection/backup.rs b/rslib/src/collection/backup.rs index bc9a43928..b5e3a836f 100644 --- a/rslib/src/collection/backup.rs +++ b/rslib/src/collection/backup.rs @@ -5,7 +5,9 @@ use std::{ ffi::OsStr, fs::{read_dir, remove_file, DirEntry}, path::{Path, PathBuf}, - thread::{self, JoinHandle}, + thread::{ + JoinHandle, {self}, + }, time::SystemTime, }; @@ -15,7 +17,7 @@ use log::error; use crate::{ import_export::package::export_colpkg_from_data, io::read_file, log, - pb::preferences::BackupLimits, prelude::*, + pb::config::preferences::BackupLimits, prelude::*, }; const BACKUP_FORMAT_STRING: &str = "backup-%Y-%m-%d-%H.%M.%S.colpkg"; diff --git a/rslib/src/config/mod.rs b/rslib/src/config/mod.rs index 8b1c9d994..9674562ad 100644 --- a/rslib/src/config/mod.rs +++ b/rslib/src/config/mod.rs @@ -18,7 +18,7 @@ pub use self::{ bool::BoolKey, deck::DeckConfigKey, notetype::get_aux_notetype_config_key, number::I32ConfigKey, string::StringKey, }; -use crate::{pb::preferences::BackupLimits, prelude::*}; +use crate::{pb::config::preferences::BackupLimits, prelude::*}; /// Only used when updating/undoing. #[derive(Debug)] diff --git a/rslib/src/deckconfig/mod.rs b/rslib/src/deckconfig/mod.rs index c724d02de..b299b0294 100644 --- a/rslib/src/deckconfig/mod.rs +++ b/rslib/src/deckconfig/mod.rs @@ -8,7 +8,7 @@ mod update; pub use schema11::{DeckConfSchema11, NewCardOrderSchema11}; pub use update::UpdateDeckConfigsRequest; -pub use crate::pb::deck_config::{ +pub use crate::pb::deckconfig::deck_config::{ config::{ LeechAction, NewCardGatherPriority, NewCardInsertOrder, NewCardSortOrder, ReviewCardOrder, ReviewMix, diff --git a/rslib/src/deckconfig/update.rs b/rslib/src/deckconfig/update.rs index b9c152a81..29891a41f 100644 --- a/rslib/src/deckconfig/update.rs +++ b/rslib/src/deckconfig/update.rs @@ -13,8 +13,8 @@ use crate::{ decks::NormalDeck, pb, pb::{ - deck::normal::DayLimit, - deck_configs_for_update::{current_deck::Limits, ConfigWithExtra, CurrentDeck}, + deckconfig::deck_configs_for_update::{current_deck::Limits, ConfigWithExtra, CurrentDeck}, + decks::deck::normal::DayLimit, }, prelude::*, search::{JoinSearches, SearchNode}, @@ -36,8 +36,8 @@ impl Collection { pub fn get_deck_configs_for_update( &mut self, deck: DeckId, - ) -> Result { - Ok(pb::DeckConfigsForUpdate { + ) -> Result { + Ok(pb::deckconfig::DeckConfigsForUpdate { all_config: self.get_deck_config_with_extra_for_update()?, current_deck: Some(self.get_current_deck_for_update(deck)?), defaults: Some(DeckConfig::default().into()), diff --git a/rslib/src/decks/counts.rs b/rslib/src/decks/counts.rs index f4e0f5453..dca78af31 100644 --- a/rslib/src/decks/counts.rs +++ b/rslib/src/decks/counts.rs @@ -42,11 +42,11 @@ impl Collection { pub(crate) fn counts_for_deck_today( &mut self, did: DeckId, - ) -> Result { + ) -> Result { let today = self.current_due_day(0)?; let mut deck = self.storage.get_deck(did)?.or_not_found(did)?; deck.reset_stats_if_day_changed(today); - Ok(pb::CountsForDeckTodayResponse { + Ok(pb::scheduler::CountsForDeckTodayResponse { new: deck.common.new_studied, review: deck.common.review_studied, }) diff --git a/rslib/src/decks/mod.rs b/rslib/src/decks/mod.rs index 6d3e9ba67..73468b134 100644 --- a/rslib/src/decks/mod.rs +++ b/rslib/src/decks/mod.rs @@ -21,7 +21,7 @@ pub(crate) use name::immediate_parent_name; pub use name::NativeDeckName; pub use schema11::DeckSchema11; -pub use crate::pb::{ +pub use crate::pb::decks::{ deck::{ filtered::{search_term::Order as FilteredSearchOrder, SearchTerm as FilteredSearchTerm}, kind_container::Kind as DeckKind, diff --git a/rslib/src/decks/schema11.rs b/rslib/src/decks/schema11.rs index d30c4ad64..5a7206913 100644 --- a/rslib/src/decks/schema11.rs +++ b/rslib/src/decks/schema11.rs @@ -23,7 +23,9 @@ pub enum DeckSchema11 { // serde doesn't support integer/bool enum tags, so we manually pick the correct variant mod dynfix { - use serde::de::{self, Deserialize, Deserializer}; + use serde::de::{ + Deserialize, Deserializer, {self}, + }; use serde_json::{Map, Value}; use super::{DeckSchema11, FilteredDeckSchema11, NormalDeckSchema11}; diff --git a/rslib/src/decks/stats.rs b/rslib/src/decks/stats.rs index 3375b93d9..421512c61 100644 --- a/rslib/src/decks/stats.rs +++ b/rslib/src/decks/stats.rs @@ -23,7 +23,7 @@ impl Collection { &mut self, today: u32, usn: Usn, - input: pb::UpdateStatsRequest, + input: pb::scheduler::UpdateStatsRequest, ) -> Result<()> { let did = input.deck_id.into(); let mutator = |c: &mut DeckCommon| { diff --git a/rslib/src/decks/tree.rs b/rslib/src/decks/tree.rs index ccf8d6357..de7ff6b0e 100644 --- a/rslib/src/decks/tree.rs +++ b/rslib/src/decks/tree.rs @@ -14,8 +14,10 @@ use super::{ limits::{remaining_limits_map, RemainingLimits}, DueCounts, }; -pub use crate::pb::set_deck_collapsed_request::Scope as DeckCollapseScope; -use crate::{config::SchedulerVersion, ops::OpOutput, pb::DeckTreeNode, prelude::*, undo::Op}; +pub use crate::pb::decks::set_deck_collapsed_request::Scope as DeckCollapseScope; +use crate::{ + config::SchedulerVersion, ops::OpOutput, pb::decks::DeckTreeNode, prelude::*, undo::Op, +}; fn deck_names_to_tree(names: impl Iterator) -> DeckTreeNode { let mut top = DeckTreeNode::default(); diff --git a/rslib/src/error/invalid_input.rs b/rslib/src/error/invalid_input.rs index 75f473a56..97080c94f 100644 --- a/rslib/src/error/invalid_input.rs +++ b/rslib/src/error/invalid_input.rs @@ -38,8 +38,8 @@ impl PartialEq for InvalidInputError { impl Eq for InvalidInputError {} -/// Allows generating [AnkiError::InvalidInput] from [Option::None] and the -/// typical [core::result::Result::Err]. +/// Allows generating [AnkiError::InvalidInput] from [None] and the +/// typical [Err]. pub trait OrInvalid { type Value; fn or_invalid(self, message: impl Into) -> Result; diff --git a/rslib/src/error/not_found.rs b/rslib/src/error/not_found.rs index 6fd4353ef..05e3ad328 100644 --- a/rslib/src/error/not_found.rs +++ b/rslib/src/error/not_found.rs @@ -35,7 +35,7 @@ impl PartialEq for NotFoundError { impl Eq for NotFoundError {} -/// Allows generating [AnkiError::NotFound] from [Option::None]. +/// Allows generating [AnkiError::NotFound] from [None]. pub trait OrNotFound { type Value; fn or_not_found(self, identifier: impl fmt::Display) -> Result; @@ -67,9 +67,6 @@ mod test { #[test] fn test_unqualified_lowercase_type_name() { - assert_eq!( - unqualified_lowercase_type_name::(), - "card id" - ); + assert_eq!(unqualified_lowercase_type_name::(), "card id"); } } diff --git a/rslib/src/import_export/mod.rs b/rslib/src/import_export/mod.rs index 5047936e5..d9f3c00bf 100644 --- a/rslib/src/import_export/mod.rs +++ b/rslib/src/import_export/mod.rs @@ -8,7 +8,7 @@ pub mod text; use std::marker::PhantomData; -pub use crate::pb::import_response::{Log as NoteLog, Note as LogNote}; +pub use crate::pb::import_export::import_response::{Log as NoteLog, Note as LogNote}; use crate::{ prelude::*, text::{ diff --git a/rslib/src/import_export/package/apkg/import/notes.rs b/rslib/src/import_export/package/apkg/import/notes.rs index 00ecaa8c5..29e0c17d9 100644 --- a/rslib/src/import_export/package/apkg/import/notes.rs +++ b/rslib/src/import_export/package/apkg/import/notes.rs @@ -153,7 +153,7 @@ impl<'n> NoteContext<'n> { } fn add_notetype_with_remapped_id(&mut self, notetype: &mut Notetype) -> Result<()> { - let old_id = std::mem::take(&mut notetype.id); + let old_id = mem::take(&mut notetype.id); notetype.usn = self.usn; self.target_col .add_notetype_inner(notetype, self.usn, true)?; diff --git a/rslib/src/import_export/package/apkg/tests.rs b/rslib/src/import_export/package/apkg/tests.rs index f37866f1c..539ce28aa 100644 --- a/rslib/src/import_export/package/apkg/tests.rs +++ b/rslib/src/import_export/package/apkg/tests.rs @@ -141,7 +141,7 @@ impl Collection { for file in [SAMPLE_JPG, SAMPLE_JS, &new_mp3_name] { assert!(self.media_folder.join(file).exists()); - assert!(*csums.get(file).unwrap() != [0; 20]); + assert_ne!(*csums.get(file).unwrap(), [0; 20]); } let imported_note = self.storage.get_note(note.id).unwrap().unwrap(); diff --git a/rslib/src/import_export/package/colpkg/export.rs b/rslib/src/import_export/package/colpkg/export.rs index 55fdbe273..9f7ee8516 100644 --- a/rslib/src/import_export/package/colpkg/export.rs +++ b/rslib/src/import_export/package/colpkg/export.rs @@ -6,7 +6,9 @@ use std::{ collections::HashMap, ffi::OsStr, fs::File, - io::{self, Read, Write}, + io::{ + Read, Write, {self}, + }, path::{Path, PathBuf}, }; @@ -268,7 +270,7 @@ fn write_media_map( buf }; let size = encoded_bytes.len(); - let mut cursor = std::io::Cursor::new(encoded_bytes); + let mut cursor = io::Cursor::new(encoded_bytes); if meta.zstd_compressed() { zstd_copy(&mut cursor, zip, size)?; } else { diff --git a/rslib/src/import_export/package/colpkg/import.rs b/rslib/src/import_export/package/colpkg/import.rs index e907374d3..0479930a2 100644 --- a/rslib/src/import_export/package/colpkg/import.rs +++ b/rslib/src/import_export/package/colpkg/import.rs @@ -3,12 +3,17 @@ use std::{ fs::File, - io::{self, Write}, + io::{ + Write, {self}, + }, path::{Path, PathBuf}, }; use zip::{read::ZipFile, ZipArchive}; -use zstd::{self, stream::copy_decode}; +use zstd::{ + stream::copy_decode, + {self}, +}; use crate::{ collection::CollectionBuilder, diff --git a/rslib/src/import_export/package/media.rs b/rslib/src/import_export/package/media.rs index 742a1b5bf..e41a78731 100644 --- a/rslib/src/import_export/package/media.rs +++ b/rslib/src/import_export/package/media.rs @@ -4,7 +4,9 @@ use std::{ borrow::Cow, collections::HashMap, - fs::{self, File}, + fs::{ + File, {self}, + }, io, path::{Path, PathBuf}, }; diff --git a/rslib/src/import_export/package/meta.rs b/rslib/src/import_export/package/meta.rs index 10ac98752..a001d3d1c 100644 --- a/rslib/src/import_export/package/meta.rs +++ b/rslib/src/import_export/package/meta.rs @@ -3,14 +3,16 @@ use std::{ fs::File, - io::{self, Read}, + io::{ + Read, {self}, + }, }; use prost::Message; use zip::ZipArchive; use zstd::stream::copy_decode; -pub(super) use crate::pb::{package_metadata::Version, PackageMetadata as Meta}; +pub(super) use crate::pb::import_export::{package_metadata::Version, PackageMetadata as Meta}; use crate::{error::ImportError, prelude::*, storage::SchemaVersion}; impl Version { @@ -98,7 +100,7 @@ impl Meta { pub(super) fn copy( &self, - reader: &mut impl io::Read, + reader: &mut impl Read, writer: &mut impl io::Write, ) -> io::Result<()> { if self.zstd_compressed() { diff --git a/rslib/src/import_export/package/mod.rs b/rslib/src/import_export/package/mod.rs index 244dca986..c52a8a063 100644 --- a/rslib/src/import_export/package/mod.rs +++ b/rslib/src/import_export/package/mod.rs @@ -11,4 +11,4 @@ pub(crate) use colpkg::export::export_colpkg_from_data; pub use colpkg::import::import_colpkg; pub(self) use meta::{Meta, Version}; -pub(self) use crate::pb::{media_entries::MediaEntry, MediaEntries}; +pub(self) use crate::pb::import_export::{media_entries::MediaEntry, MediaEntries}; diff --git a/rslib/src/import_export/text/csv/export.rs b/rslib/src/import_export/text/csv/export.rs index 8a7d09e6d..a31ab8c6c 100644 --- a/rslib/src/import_export/text/csv/export.rs +++ b/rslib/src/import_export/text/csv/export.rs @@ -11,7 +11,7 @@ use super::metadata::Delimiter; use crate::{ import_export::{ExportProgress, IncrementableProgress}, notetype::RenderCardOutput, - pb::ExportNoteCsvRequest, + pb::import_export::ExportNoteCsvRequest, prelude::*, search::{SearchNode, SortMode}, template::RenderedNode, diff --git a/rslib/src/import_export/text/csv/metadata.rs b/rslib/src/import_export/text/csv/metadata.rs index 6e7b2e39e..44d9f97a1 100644 --- a/rslib/src/import_export/text/csv/metadata.rs +++ b/rslib/src/import_export/text/csv/metadata.rs @@ -22,7 +22,7 @@ use crate::{ import_export::text::NameOrId, io::open_file, notetype::NoteField, - pb::StringList, + pb::generic::StringList, prelude::*, text::{html_to_text_line, is_html}, }; diff --git a/rslib/src/import_export/text/mod.rs b/rslib/src/import_export/text/mod.rs index ba3822d9f..d9b6adf7d 100644 --- a/rslib/src/import_export/text/mod.rs +++ b/rslib/src/import_export/text/mod.rs @@ -8,7 +8,7 @@ mod json; use serde_derive::{Deserialize, Serialize}; use super::LogNote; -use crate::pb::csv_metadata::DupeResolution; +use crate::pb::import_export::csv_metadata::DupeResolution; #[derive(Debug, Clone, Default, Serialize, Deserialize)] #[serde(default)] diff --git a/rslib/src/io.rs b/rslib/src/io.rs index 4e66fcc49..2aee5b3b9 100644 --- a/rslib/src/io.rs +++ b/rslib/src/io.rs @@ -15,7 +15,7 @@ use crate::{ pub(crate) type Result = std::result::Result; -/// See [std::fs::File::open]. +/// See [File::open]. pub(crate) fn open_file(path: impl AsRef) -> Result { File::open(&path).context(FileIoSnafu { path: path.as_ref(), diff --git a/rslib/src/media/files.rs b/rslib/src/media/files.rs index bd565b78c..3305d0566 100644 --- a/rslib/src/media/files.rs +++ b/rslib/src/media/files.rs @@ -286,7 +286,7 @@ pub(crate) fn sha1_of_file(path: &Path) -> Result { } /// Return the SHA1 of a stream. -pub(crate) fn sha1_of_reader(reader: &mut impl Read) -> std::io::Result { +pub(crate) fn sha1_of_reader(reader: &mut impl Read) -> io::Result { let mut hasher = Sha1::new(); let mut buf = [0; 64 * 1024]; loop { diff --git a/rslib/src/media/sync.rs b/rslib/src/media/sync.rs index 3ee76fd34..58770192e 100644 --- a/rslib/src/media/sync.rs +++ b/rslib/src/media/sync.rs @@ -712,7 +712,7 @@ fn zip_files<'a>( let buf = vec![]; let mut invalid_entries = vec![]; - let w = std::io::Cursor::new(buf); + let w = io::Cursor::new(buf); let mut zip = zip::ZipWriter::new(w); let options = diff --git a/rslib/src/notes/mod.rs b/rslib/src/notes/mod.rs index 9b29ff8ea..3e26e8283 100644 --- a/rslib/src/notes/mod.rs +++ b/rslib/src/notes/mod.rs @@ -18,7 +18,7 @@ use crate::{ notetype::{CardGenContext, NoteField}, ops::StateChanges, pb, - pb::note_fields_check_response::State as NoteFieldsState, + pb::notes::note_fields_check_response::State as NoteFieldsState, prelude::*, template::field_is_empty, text::{ensure_string_in_nfc, normalize_to_nfc, strip_html_preserving_media_filenames}, @@ -169,7 +169,7 @@ impl Note { /// Prepare note for saving to the database. Does not mark it as modified. pub(crate) fn prepare_for_update(&mut self, nt: &Notetype, normalize_text: bool) -> Result<()> { - assert!(nt.id == self.notetype_id); + assert_eq!(nt.id, self.notetype_id); let notetype_field_count = nt.fields.len().max(1); require!( notetype_field_count == self.fields.len(), @@ -258,9 +258,9 @@ pub(crate) fn normalize_field(field: &mut String, normalize_text: bool) { } } -impl From for pb::Note { +impl From for pb::notes::Note { fn from(n: Note) -> Self { - pb::Note { + pb::notes::Note { id: n.id.0, guid: n.guid, notetype_id: n.notetype_id.0, @@ -272,8 +272,8 @@ impl From for pb::Note { } } -impl From for Note { - fn from(n: pb::Note) -> Self { +impl From for Note { + fn from(n: pb::notes::Note) -> Self { Note { id: NoteId(n.id), guid: n.guid, diff --git a/rslib/src/notetype/fields.rs b/rslib/src/notetype/fields.rs index b87e9635e..505447daa 100644 --- a/rslib/src/notetype/fields.rs +++ b/rslib/src/notetype/fields.rs @@ -2,7 +2,7 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use super::{NoteFieldConfig, NoteFieldProto}; -use crate::{pb::UInt32, prelude::*}; +use crate::{pb::generic::UInt32, prelude::*}; #[derive(Debug, PartialEq, Clone)] pub struct NoteField { diff --git a/rslib/src/notetype/mod.rs b/rslib/src/notetype/mod.rs index d97157540..16999786a 100644 --- a/rslib/src/notetype/mod.rs +++ b/rslib/src/notetype/mod.rs @@ -30,7 +30,7 @@ pub use stock::all_stock_notetypes; pub use templates::CardTemplate; use unicase::UniCase; -pub use crate::pb::{ +pub use crate::pb::notetypes::{ notetype::{ config::{ card_requirement::Kind as CardRequirementKind, CardRequirement, Kind as NotetypeKind, diff --git a/rslib/src/notetype/stock.rs b/rslib/src/notetype/stock.rs index c44c5b3d2..7b4578e0e 100644 --- a/rslib/src/notetype/stock.rs +++ b/rslib/src/notetype/stock.rs @@ -7,7 +7,7 @@ use crate::{ error::Result, i18n::I18n, notetype::Notetype, - pb::stock_notetype::Kind, + pb::notetypes::stock_notetype::Kind, storage::SqliteStorage, timestamp::TimestampSecs, }; diff --git a/rslib/src/notetype/templates.rs b/rslib/src/notetype/templates.rs index b2f11c897..39631a616 100644 --- a/rslib/src/notetype/templates.rs +++ b/rslib/src/notetype/templates.rs @@ -2,7 +2,7 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use super::{CardTemplateConfig, CardTemplateProto}; -use crate::{pb::UInt32, prelude::*, template::ParsedTemplate}; +use crate::{pb::generic::UInt32, prelude::*, template::ParsedTemplate}; #[derive(Debug, PartialEq, Clone)] pub struct CardTemplate { diff --git a/rslib/src/pb.rs b/rslib/src/pb.rs index 4bed294d0..ae4e01dc8 100644 --- a/rslib/src/pb.rs +++ b/rslib/src/pb.rs @@ -2,34 +2,29 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html macro_rules! protobuf { - ($ident:ident) => { + ($ident:ident, $name:literal) => { pub mod $ident { - #![allow(clippy::derive_partial_eq_without_eq)] - include!(concat!( - env!("OUT_DIR"), - concat!("/anki.", stringify!($ident), ".rs") - )); + include!(concat!(env!("OUT_DIR"), "/anki.", $name, ".rs")); } - pub use $ident::*; }; } -protobuf!(backend); -protobuf!(card_rendering); -protobuf!(cards); -protobuf!(collection); -protobuf!(config); -protobuf!(deckconfig); -protobuf!(decks); -protobuf!(generic); -protobuf!(i18n); -protobuf!(import_export); -protobuf!(links); -protobuf!(media); -protobuf!(notes); -protobuf!(notetypes); -protobuf!(scheduler); -protobuf!(search); -protobuf!(stats); -protobuf!(sync); -protobuf!(tags); +protobuf!(backend, "backend"); +protobuf!(card_rendering, "card_rendering"); +protobuf!(cards, "cards"); +protobuf!(collection, "collection"); +protobuf!(config, "config"); +protobuf!(deckconfig, "deckconfig"); +protobuf!(decks, "decks"); +protobuf!(generic, "generic"); +protobuf!(i18n, "i18n"); +protobuf!(import_export, "import_export"); +protobuf!(links, "links"); +protobuf!(media, "media"); +protobuf!(notes, "notes"); +protobuf!(notetypes, "notetypes"); +protobuf!(scheduler, "scheduler"); +protobuf!(search, "search"); +protobuf!(stats, "stats"); +protobuf!(sync, "sync"); +protobuf!(tags, "tags"); diff --git a/rslib/src/preferences.rs b/rslib/src/preferences.rs index 9a0af60e6..6ed4f7f1f 100644 --- a/rslib/src/preferences.rs +++ b/rslib/src/preferences.rs @@ -5,7 +5,7 @@ use crate::{ collection::Collection, config::{BoolKey, StringKey}, error::Result, - pb::{ + pb::config::{ preferences::{scheduling::NewReviewMix as NewRevMixPB, Editing, Reviewing, Scheduling}, Preferences, }, diff --git a/rslib/src/scheduler/answering/mod.rs b/rslib/src/scheduler/answering/mod.rs index 01bb9f65f..f68dcd005 100644 --- a/rslib/src/scheduler/answering/mod.rs +++ b/rslib/src/scheduler/answering/mod.rs @@ -330,7 +330,7 @@ impl Collection { self.update_deck_stats( updater.timing.days_elapsed, usn, - pb::UpdateStatsRequest { + pb::scheduler::UpdateStatsRequest { deck_id: updater.deck.id.0, new_delta, review_delta, diff --git a/rslib/src/scheduler/bury_and_suspend.rs b/rslib/src/scheduler/bury_and_suspend.rs index 5a235ae7d..d54c361d7 100644 --- a/rslib/src/scheduler/bury_and_suspend.rs +++ b/rslib/src/scheduler/bury_and_suspend.rs @@ -5,7 +5,7 @@ use super::timing::SchedTimingToday; use crate::{ card::CardQueue, config::SchedulerVersion, - pb::{ + pb::scheduler::{ bury_or_suspend_cards_request::Mode as BuryOrSuspendMode, unbury_deck_request::Mode as UnburyDeckMode, }, diff --git a/rslib/src/scheduler/congrats.rs b/rslib/src/scheduler/congrats.rs index 6a95cd56f..e17d5e197 100644 --- a/rslib/src/scheduler/congrats.rs +++ b/rslib/src/scheduler/congrats.rs @@ -14,7 +14,7 @@ pub(crate) struct CongratsInfo { } impl Collection { - pub fn congrats_info(&mut self) -> Result { + pub fn congrats_info(&mut self) -> Result { let deck = self.get_current_deck()?; let today = self.timing_today()?.days_elapsed; let info = self.storage.congrats_info(&deck, today)?; @@ -27,7 +27,7 @@ impl Collection { ((info.next_learn_due as i64) - self.learn_ahead_secs() as i64 - TimestampSecs::now().0) .max(60) as u32 }; - Ok(pb::CongratsInfoResponse { + Ok(pb::scheduler::CongratsInfoResponse { learn_remaining: info.learn_count, review_remaining: info.review_remaining, new_remaining: info.new_remaining, @@ -51,7 +51,7 @@ mod test { let info = col.congrats_info().unwrap(); assert_eq!( info, - crate::pb::CongratsInfoResponse { + crate::pb::scheduler::CongratsInfoResponse { learn_remaining: 0, review_remaining: false, new_remaining: false, diff --git a/rslib/src/scheduler/filtered/custom_study.rs b/rslib/src/scheduler/filtered/custom_study.rs index 8753a39f6..ec3738503 100644 --- a/rslib/src/scheduler/filtered/custom_study.rs +++ b/rslib/src/scheduler/filtered/custom_study.rs @@ -9,22 +9,25 @@ use crate::{ decks::{FilteredDeck, FilteredSearchOrder, FilteredSearchTerm}, error::{CustomStudyError, FilteredDeckError}, pb::{ - self as pb, - custom_study_request::{cram::CramKind, Cram, Value as CustomStudyValue}, + scheduler::custom_study_request::{cram::CramKind, Cram, Value as CustomStudyValue}, + {self as pb}, }, prelude::*, search::{JoinSearches, Negated, PropertyKind, RatingKind, SearchNode, StateKind}, }; impl Collection { - pub fn custom_study(&mut self, input: pb::CustomStudyRequest) -> Result> { + pub fn custom_study( + &mut self, + input: pb::scheduler::CustomStudyRequest, + ) -> Result> { self.transact(Op::CreateCustomStudy, |col| col.custom_study_inner(input)) } pub fn custom_study_defaults( &mut self, deck_id: DeckId, - ) -> Result { + ) -> Result { // daily counts let deck = self.get_deck(deck_id)?.or_not_found(deck_id)?; let normal = deck.normal()?; @@ -70,11 +73,11 @@ impl Collection { ); let mut all_tags: Vec<_> = self.all_tags_in_deck(deck_id)?.into_iter().collect(); all_tags.sort_unstable(); - let tags: Vec = all_tags + let tags: Vec = all_tags .into_iter() .map(|tag| { let tag = tag.into_inner(); - pb::custom_study_defaults_response::Tag { + pb::scheduler::custom_study_defaults_response::Tag { include: include_tags.contains(&tag), exclude: exclude_tags.contains(&tag), name: tag, @@ -82,7 +85,7 @@ impl Collection { }) .collect(); - Ok(pb::CustomStudyDefaultsResponse { + Ok(pb::scheduler::CustomStudyDefaultsResponse { tags, extend_new, extend_review, @@ -95,7 +98,7 @@ impl Collection { } impl Collection { - fn custom_study_inner(&mut self, input: pb::CustomStudyRequest) -> Result<()> { + fn custom_study_inner(&mut self, input: pb::scheduler::CustomStudyRequest) -> Result<()> { let mut deck = self .storage .get_deck(input.deck_id.into())? @@ -292,8 +295,8 @@ mod test { use super::*; use crate::{ collection::open_test_collection, - pb::{ - scheduler::custom_study_request::{cram::CramKind, Cram, Value}, + pb::scheduler::{ + custom_study_request::{cram::CramKind, Cram, Value}, CustomStudyRequest, }, }; diff --git a/rslib/src/scheduler/queue/builder/mod.rs b/rslib/src/scheduler/queue/builder/mod.rs index 5090bd8f4..e63af1028 100644 --- a/rslib/src/scheduler/queue/builder/mod.rs +++ b/rslib/src/scheduler/queue/builder/mod.rs @@ -266,7 +266,7 @@ mod test { use crate::{ card::{CardQueue, CardType}, collection::open_test_collection, - pb::deck_config::config::{NewCardGatherPriority, NewCardSortOrder}, + pb::deckconfig::deck_config::config::{NewCardGatherPriority, NewCardSortOrder}, }; impl Collection { diff --git a/rslib/src/search/parser.rs b/rslib/src/search/parser.rs index a08a5287d..861a56aa1 100644 --- a/rslib/src/search/parser.rs +++ b/rslib/src/search/parser.rs @@ -737,8 +737,8 @@ mod test { use Node::*; use SearchNode::*; - assert_eq!(parse("")?, vec![Search(SearchNode::WholeCollection)]); - assert_eq!(parse(" ")?, vec![Search(SearchNode::WholeCollection)]); + assert_eq!(parse("")?, vec![Search(WholeCollection)]); + assert_eq!(parse(" ")?, vec![Search(WholeCollection)]); // leading/trailing/interspersed whitespace assert_eq!( diff --git a/rslib/src/serde.rs b/rslib/src/serde.rs index 48fa78f98..853f20591 100644 --- a/rslib/src/serde.rs +++ b/rslib/src/serde.rs @@ -14,7 +14,7 @@ use crate::timestamp::TimestampSecs; pub(crate) fn default_on_invalid<'de, T, D>(deserializer: D) -> Result where T: Default + DeTrait<'de>, - D: serde::de::Deserializer<'de>, + D: Deserializer<'de>, { let v: Value = DeTrait::deserialize(deserializer)?; Ok(T::deserialize(v).unwrap_or_default()) diff --git a/rslib/src/stats/card.rs b/rslib/src/stats/card.rs index 76b08ef40..f9ba3bdb0 100644 --- a/rslib/src/stats/card.rs +++ b/rslib/src/stats/card.rs @@ -9,7 +9,7 @@ use crate::{ }; impl Collection { - pub fn card_stats(&mut self, cid: CardId) -> Result { + pub fn card_stats(&mut self, cid: CardId) -> Result { let card = self.storage.get_card(cid)?.or_not_found(cid)?; let note = self .storage @@ -27,7 +27,7 @@ impl Collection { let (average_secs, total_secs) = average_and_total_secs_strings(&revlog); let (due_date, due_position) = self.due_date_and_position(&card)?; - Ok(pb::CardStatsResponse { + Ok(pb::stats::CardStatsResponse { card_id: card.id.into(), note_id: card.note_id.into(), deck: deck.human_name(), @@ -92,8 +92,8 @@ fn average_and_total_secs_strings(revlog: &[RevlogEntry]) -> (f32, f32) { } } -fn stats_revlog_entry(entry: &RevlogEntry) -> pb::card_stats_response::StatsRevlogEntry { - pb::card_stats_response::StatsRevlogEntry { +fn stats_revlog_entry(entry: &RevlogEntry) -> pb::stats::card_stats_response::StatsRevlogEntry { + pb::stats::card_stats_response::StatsRevlogEntry { time: entry.id.as_secs().0, review_kind: entry.review_kind.into(), button_chosen: entry.button_chosen as u32, diff --git a/rslib/src/stats/graphs.rs b/rslib/src/stats/graphs.rs index 8eeb311eb..722c2205d 100644 --- a/rslib/src/stats/graphs.rs +++ b/rslib/src/stats/graphs.rs @@ -14,13 +14,13 @@ impl Collection { &mut self, search: &str, days: u32, - ) -> Result { + ) -> Result { let guard = self.search_cards_into_table(search, SortMode::NoOrder)?; let all = search.trim().is_empty(); guard.col.graph_data(all, days) } - fn graph_data(&mut self, all: bool, days: u32) -> Result { + fn graph_data(&mut self, all: bool, days: u32) -> Result { let timing = self.timing_today()?; let revlog_start = if days > 0 { timing @@ -41,7 +41,7 @@ impl Collection { .get_pb_revlog_entries_for_searched_cards(revlog_start)? }; - Ok(pb::GraphsResponse { + Ok(pb::stats::GraphsResponse { cards: cards.into_iter().map(Into::into).collect(), revlog, days_elapsed: timing.days_elapsed, @@ -51,8 +51,8 @@ impl Collection { }) } - pub(crate) fn get_graph_preferences(&self) -> pb::GraphPreferences { - pb::GraphPreferences { + pub(crate) fn get_graph_preferences(&self) -> pb::stats::GraphPreferences { + pb::stats::GraphPreferences { calendar_first_day_of_week: self.get_first_day_of_week() as i32, card_counts_separate_inactive: self .get_config_bool(BoolKey::CardCountsSeparateInactive), @@ -61,7 +61,10 @@ impl Collection { } } - pub(crate) fn set_graph_preferences(&mut self, prefs: pb::GraphPreferences) -> Result<()> { + pub(crate) fn set_graph_preferences( + &mut self, + prefs: pb::stats::GraphPreferences, + ) -> Result<()> { self.set_first_day_of_week(match prefs.calendar_first_day_of_week { 1 => Weekday::Monday, 5 => Weekday::Friday, @@ -77,9 +80,9 @@ impl Collection { } } -impl From for pb::RevlogEntry { +impl From for pb::stats::RevlogEntry { fn from(e: RevlogEntry) -> Self { - pb::RevlogEntry { + pb::stats::RevlogEntry { id: e.id.0, cid: e.cid.0, usn: e.usn.0, diff --git a/rslib/src/storage/card/mod.rs b/rslib/src/storage/card/mod.rs index adc81731b..b569bb070 100644 --- a/rslib/src/storage/card/mod.rs +++ b/rslib/src/storage/card/mod.rs @@ -29,7 +29,7 @@ use crate::{ }; impl FromSql for CardType { - fn column_result(value: ValueRef<'_>) -> std::result::Result { + fn column_result(value: ValueRef<'_>) -> result::Result { if let ValueRef::Integer(i) = value { Ok(Self::try_from(i as u8).map_err(|_| FromSqlError::InvalidType)?) } else { @@ -39,7 +39,7 @@ impl FromSql for CardType { } impl FromSql for CardQueue { - fn column_result(value: ValueRef<'_>) -> std::result::Result { + fn column_result(value: ValueRef<'_>) -> result::Result { if let ValueRef::Integer(i) = value { Ok(Self::try_from(i as i8).map_err(|_| FromSqlError::InvalidType)?) } else { diff --git a/rslib/src/storage/deck/mod.rs b/rslib/src/storage/deck/mod.rs index ccc3d0ca4..bdbfbfc80 100644 --- a/rslib/src/storage/deck/mod.rs +++ b/rslib/src/storage/deck/mod.rs @@ -153,7 +153,7 @@ impl SqliteStorage { // caller should ensure name unique pub(crate) fn add_deck(&self, deck: &mut Deck) -> Result<()> { - assert!(deck.id.0 == 0); + assert_eq!(deck.id.0, 0); deck.id.0 = self .db .prepare(include_str!("alloc_id.sql"))? diff --git a/rslib/src/storage/note/mod.rs b/rslib/src/storage/note/mod.rs index d74db2d45..c6a1d6e3f 100644 --- a/rslib/src/storage/note/mod.rs +++ b/rslib/src/storage/note/mod.rs @@ -51,7 +51,7 @@ impl super::SqliteStorage { /// If fields have been modified, caller must call note.prepare_for_update() prior to calling this. pub(crate) fn update_note(&self, note: &Note) -> Result<()> { - assert!(note.id.0 != 0); + assert_ne!(note.id.0, 0); let mut stmt = self.db.prepare_cached(include_str!("update.sql"))?; stmt.execute(params![ note.guid, @@ -68,7 +68,7 @@ impl super::SqliteStorage { } pub(crate) fn add_note(&self, note: &mut Note) -> Result<()> { - assert!(note.id.0 == 0); + assert_eq!(note.id.0, 0); let mut stmt = self.db.prepare_cached(include_str!("add.sql"))?; stmt.execute(params![ TimestampMillis::now(), diff --git a/rslib/src/storage/notetype/mod.rs b/rslib/src/storage/notetype/mod.rs index fa2eca591..dab440e04 100644 --- a/rslib/src/storage/notetype/mod.rs +++ b/rslib/src/storage/notetype/mod.rs @@ -226,7 +226,7 @@ impl SqliteStorage { } pub(crate) fn add_notetype(&self, nt: &mut Notetype) -> Result<()> { - assert!(nt.id.0 == 0); + assert_eq!(nt.id.0, 0); let mut stmt = self.db.prepare_cached(include_str!("add_notetype.sql"))?; let mut config_bytes = vec![]; diff --git a/rslib/src/storage/revlog/mod.rs b/rslib/src/storage/revlog/mod.rs index cceee4eb4..8242d5e40 100644 --- a/rslib/src/storage/revlog/mod.rs +++ b/rslib/src/storage/revlog/mod.rs @@ -113,7 +113,7 @@ impl SqliteStorage { pub(crate) fn get_pb_revlog_entries_for_searched_cards( &self, after: TimestampSecs, - ) -> Result> { + ) -> Result> { self.db .prepare_cached(concat!( include_str!("get.sql"), @@ -137,7 +137,7 @@ impl SqliteStorage { pub(crate) fn get_all_revlog_entries( &self, after: TimestampSecs, - ) -> Result> { + ) -> Result> { self.db .prepare_cached(concat!(include_str!("get.sql"), " where id >= ?"))? .query_and_then([after.0 * 1000], |r| row_to_revlog_entry(r).map(Into::into))? diff --git a/rslib/src/storage/upgrades/mod.rs b/rslib/src/storage/upgrades/mod.rs index 8740a83ec..2f6a2b5d5 100644 --- a/rslib/src/storage/upgrades/mod.rs +++ b/rslib/src/storage/upgrades/mod.rs @@ -81,8 +81,8 @@ mod test { #[test] #[allow(clippy::assertions_on_constants)] fn assert_18_is_latest_schema_version() { - assert!( - 18 == SCHEMA_MAX_VERSION, + assert_eq!( + 18, SCHEMA_MAX_VERSION, "must implement SqliteStorage::downgrade_to(SchemaVersion::V18)" ); } diff --git a/rslib/src/sync/http.rs b/rslib/src/sync/http.rs index 5b9b8a9cc..88e3a9469 100644 --- a/rslib/src/sync/http.rs +++ b/rslib/src/sync/http.rs @@ -6,7 +6,7 @@ use std::path::PathBuf; use serde::{Deserialize, Serialize}; use super::{Chunk, Graves, SanityCheckCounts, UnchunkedChanges}; -use crate::{io::read_file, pb::sync_server_method_request::Method, prelude::*}; +use crate::{io::read_file, pb::sync::sync_server_method_request::Method, prelude::*}; #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub enum SyncRequest { diff --git a/rslib/src/sync/http_client.rs b/rslib/src/sync/http_client.rs index 099fbbc44..6bc7e4cbe 100644 --- a/rslib/src/sync/http_client.rs +++ b/rslib/src/sync/http_client.rs @@ -65,7 +65,7 @@ pub struct Timeouts { impl Timeouts { pub fn new() -> Self { - let io_secs = if std::env::var("LONG_IO_TIMEOUT").is_ok() { + let io_secs = if env::var("LONG_IO_TIMEOUT").is_ok() { 3600 } else { 300 @@ -314,7 +314,7 @@ impl HttpSyncClient { usize, impl Stream>, )> { - let resp: reqwest::Response = self.request_bytes("download", b"{}", true).await?; + let resp: Response = self.request_bytes("download", b"{}", true).await?; let len = resp.content_length().unwrap_or_default(); Ok((len as usize, resp.bytes_stream())) } @@ -379,7 +379,7 @@ where } fn sync_endpoint(host_number: u32) -> String { - if let Ok(endpoint) = std::env::var("SYNC_ENDPOINT") { + if let Ok(endpoint) = env::var("SYNC_ENDPOINT") { endpoint } else { let suffix = if host_number > 0 { @@ -484,13 +484,13 @@ mod test { #[test] fn http_client() -> Result<()> { - let user = match std::env::var("TEST_SYNC_USER") { + let user = match env::var("TEST_SYNC_USER") { Ok(s) => s, Err(_) => { return Ok(()); } }; - let pass = std::env::var("TEST_SYNC_PASS").unwrap(); + let pass = env::var("TEST_SYNC_PASS").unwrap(); env_logger::init(); let rt = Runtime::new().unwrap(); diff --git a/rslib/src/sync/mod.rs b/rslib/src/sync/mod.rs index 62360922e..7ae649692 100644 --- a/rslib/src/sync/mod.rs +++ b/rslib/src/sync/mod.rs @@ -23,7 +23,7 @@ use crate::{ io::atomic_rename, notes::Note, notetype::{Notetype, NotetypeSchema11}, - pb::{sync_status_response, SyncStatusResponse}, + pb::sync::{sync_status_response, SyncStatusResponse}, prelude::*, revlog::RevlogEntry, serde::{default_on_invalid, deserialize_int_from_number}, diff --git a/rslib/src/tags/tree.rs b/rslib/src/tags/tree.rs index 20bccbf9a..0dc4b5a0a 100644 --- a/rslib/src/tags/tree.rs +++ b/rslib/src/tags/tree.rs @@ -6,7 +6,7 @@ use std::{collections::HashSet, iter::Peekable}; use unicase::UniCase; use super::{immediate_parent_name_unicase, Tag}; -use crate::{pb::TagTreeNode, prelude::*}; +use crate::{pb::tags::TagTreeNode, prelude::*}; impl Collection { pub fn tag_tree(&mut self) -> Result { diff --git a/rslib/src/template.rs b/rslib/src/template.rs index 336368e50..e422cb043 100644 --- a/rslib/src/template.rs +++ b/rslib/src/template.rs @@ -78,7 +78,7 @@ fn tokens<'a>(template: &'a str) -> Box impl Iterator> { - std::iter::from_fn(move || { + iter::from_fn(move || { if data.is_empty() { return None; } @@ -158,7 +158,7 @@ fn alternate_handlebar_token(s: &str) -> nom::IResult<&str, Token> { } fn legacy_tokens(mut data: &str) -> impl Iterator> { - std::iter::from_fn(move || { + iter::from_fn(move || { if data.is_empty() { return None; } diff --git a/rslib/src/tests.rs b/rslib/src/tests.rs index 630801c42..8033dba5d 100644 --- a/rslib/src/tests.rs +++ b/rslib/src/tests.rs @@ -10,7 +10,7 @@ use crate::{ deckconfig::UpdateDeckConfigsRequest, io::create_dir, media::MediaManager, - pb::deck_configs_for_update::current_deck::Limits, + pb::deckconfig::deck_configs_for_update::current_deck::Limits, prelude::*, };