Minor Rust cleanups (#2272)

* Run cargo +nightly fmt

* Latest prost-build includes clippy workaround

* Tweak Rust protobuf imports

- Avoid use of stringify!(), as JetBrains editors get confused by it
- Stop merging all protobuf symbols into a single namespace

* Remove some unnecessary qualifications

Found via IntelliJ lint

* Migrate some asserts to assert_eq/ne

* Remove mention of node_modules exclusion

This no longer seems to be necessary after migrating away from Bazel,
and excluding it means TS/Svelte files can't be edited properly.
This commit is contained in:
Damien Elmes 2022-12-16 11:40:27 +00:00 committed by GitHub
parent 22ecef6fb2
commit fa625d7ad8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
101 changed files with 1062 additions and 737 deletions

4
Cargo.lock generated
View file

@ -2583,9 +2583,9 @@ dependencies = [
[[package]] [[package]]
name = "prost-build" name = "prost-build"
version = "0.11.3" version = "0.11.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e330bf1316db56b12c2bcfa399e8edddd4821965ea25ddb2c134b610b1c1c604" checksum = "276470f7f281b0ed53d2ae42dd52b4a8d08853a3c70e7fe95882acbb98a6ae94"
dependencies = [ dependencies = [
"bytes", "bytes",
"heck", "heck",

View file

@ -160,7 +160,7 @@ impl BuildAction for GenPythonProto {
-Iproto $in" -Iproto $in"
} }
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) { fn files(&mut self, build: &mut impl FilesHandle) {
let proto_inputs = build.expand_inputs(&self.proto_files); let proto_inputs = build.expand_inputs(&self.proto_files);
let python_outputs: Vec<_> = proto_inputs let python_outputs: Vec<_> = proto_inputs
.iter() .iter()

View file

@ -94,7 +94,7 @@ fn copy_windows_extras(folder_root: &Utf8Path) {
fn clean_top_level_files(folder_root: &Utf8Path) { fn clean_top_level_files(folder_root: &Utf8Path) {
let mut to_remove = vec![]; let mut to_remove = vec![];
for entry in std::fs::read_dir(folder_root).unwrap() { for entry in fs::read_dir(folder_root).unwrap() {
let entry = entry.unwrap(); let entry = entry.unwrap();
if entry.file_name() == "lib" { if entry.file_name() == "lib" {
continue; continue;
@ -104,9 +104,9 @@ fn clean_top_level_files(folder_root: &Utf8Path) {
} }
for path in to_remove { for path in to_remove {
if path.is_dir() { if path.is_dir() {
std::fs::remove_dir_all(path).unwrap() fs::remove_dir_all(path).unwrap()
} else { } else {
std::fs::remove_file(path).unwrap() fs::remove_file(path).unwrap()
} }
} }
} }

View file

@ -49,12 +49,6 @@ see and install a number of recommended extensions.
If you decide to use PyCharm instead of VS Code, there are somethings to be If you decide to use PyCharm instead of VS Code, there are somethings to be
aware of. aware of.
### Slowdowns
Excluding the node_modules folder inside the editor may improve performance:
https://intellij-support.jetbrains.com/hc/en-us/community/posts/115000721750-Excluding-directories-globally
### Pylib References ### Pylib References
You'll need to use File>Project Structure to tell IntelliJ that pylib/ is a You'll need to use File>Project Structure to tell IntelliJ that pylib/ is a

View file

@ -90,7 +90,7 @@ enum Commands {
BuildDmgs(BuildDmgsArgs), BuildDmgs(BuildDmgsArgs),
} }
fn main() -> anyhow::Result<()> { fn main() -> Result<()> {
match Cli::parse().command { match Cli::parse().command {
Commands::BuildApp { Commands::BuildApp {
version, version,
@ -215,7 +215,7 @@ fn fix_rpath(exe_path: Utf8PathBuf) -> Result<()> {
fn get_plist(anki_version: &str) -> plist::Dictionary { fn get_plist(anki_version: &str) -> plist::Dictionary {
let reader = std::io::Cursor::new(include_bytes!("Info.plist")); let reader = std::io::Cursor::new(include_bytes!("Info.plist"));
let mut plist = plist::Value::from_reader(reader) let mut plist = Value::from_reader(reader)
.unwrap() .unwrap()
.into_dictionary() .into_dictionary()
.unwrap(); .unwrap();

View file

@ -17,7 +17,7 @@ struct Args {
qt5_setup_path: Utf8PathBuf, qt5_setup_path: Utf8PathBuf,
} }
fn main() -> anyhow::Result<()> { fn main() -> Result<()> {
let args = Args::parse(); let args = Args::parse();
let src_win_folder = Utf8Path::new("qt/bundle/win"); let src_win_folder = Utf8Path::new("qt/bundle/win");

View file

@ -27,7 +27,7 @@ required-features = ["bench"]
# After updating anything below, run ../cargo/update.py # After updating anything below, run ../cargo/update.py
[build-dependencies] [build-dependencies]
prost-build = "0.11.3" prost-build = "0.11.4"
which = "4.3.0" which = "4.3.0"
[dev-dependencies] [dev-dependencies]

View file

@ -3,10 +3,11 @@
//! Check the .ftl files at build time to ensure we don't get runtime load failures. //! Check the .ftl files at build time to ensure we don't get runtime load failures.
use super::gather::TranslationsByLang;
use fluent::{FluentBundle, FluentResource}; use fluent::{FluentBundle, FluentResource};
use unic_langid::LanguageIdentifier; use unic_langid::LanguageIdentifier;
use super::gather::TranslationsByLang;
pub fn check(lang_map: &TranslationsByLang) { pub fn check(lang_map: &TranslationsByLang) {
for (lang, files_map) in lang_map { for (lang, files_map) in lang_map {
for (fname, content) in files_map { for (fname, content) in files_map {

View file

@ -362,24 +362,21 @@ fn want_comma_as_decimal_separator(langs: &[LanguageIdentifier]) -> bool {
} }
fn format_decimal_with_comma( fn format_decimal_with_comma(
val: &fluent::FluentValue, val: &FluentValue,
_intl: &intl_memoizer::concurrent::IntlLangMemoizer, _intl: &intl_memoizer::concurrent::IntlLangMemoizer,
) -> Option<String> { ) -> Option<String> {
format_number_values(val, Some(",")) format_number_values(val, Some(","))
} }
fn format_decimal_with_period( fn format_decimal_with_period(
val: &fluent::FluentValue, val: &FluentValue,
_intl: &intl_memoizer::concurrent::IntlLangMemoizer, _intl: &intl_memoizer::concurrent::IntlLangMemoizer,
) -> Option<String> { ) -> Option<String> {
format_number_values(val, None) format_number_values(val, None)
} }
#[inline] #[inline]
fn format_number_values( fn format_number_values(val: &FluentValue, alt_separator: Option<&'static str>) -> Option<String> {
val: &fluent::FluentValue,
alt_separator: Option<&'static str>,
) -> Option<String> {
match val { match val {
FluentValue::Number(num) => { FluentValue::Number(num) => {
// create a string with desired maximum digits // create a string with desired maximum digits

View file

@ -3,9 +3,10 @@
// copied from https://github.com/projectfluent/fluent-rs/pull/241 // copied from https://github.com/projectfluent/fluent-rs/pull/241
use fluent_syntax::{ast::*, parser::Slice};
use std::fmt::{self, Error, Write}; use std::fmt::{self, Error, Write};
use fluent_syntax::{ast::*, parser::Slice};
pub fn serialize<'s, S: Slice<'s>>(resource: &Resource<S>) -> String { pub fn serialize<'s, S: Slice<'s>>(resource: &Resource<S>) -> String {
serialize_with_options(resource, Options::default()) serialize_with_options(resource, Options::default())
} }

View file

@ -1,7 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{adding::DeckAndNotetype, pb::DeckAndNotetype as DeckAndNotetypeProto}; use crate::{adding::DeckAndNotetype, pb::notes::DeckAndNotetype as DeckAndNotetypeProto};
impl From<DeckAndNotetype> for DeckAndNotetypeProto { impl From<DeckAndNotetype> for DeckAndNotetypeProto {
fn from(s: DeckAndNotetype) -> Self { fn from(s: DeckAndNotetype) -> Self {

View file

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend; use super::Backend;
pub(super) use crate::pb::cards_service::Service as CardsService; pub(super) use crate::pb::cards::cards_service::Service as CardsService;
use crate::{ use crate::{
card::{CardQueue, CardType}, card::{CardQueue, CardType},
pb, pb,
@ -10,7 +10,7 @@ use crate::{
}; };
impl CardsService for Backend { impl CardsService for Backend {
fn get_card(&self, input: pb::CardId) -> Result<pb::Card> { fn get_card(&self, input: pb::cards::CardId) -> Result<pb::cards::Card> {
let cid = input.into(); let cid = input.into();
self.with_col(|col| { self.with_col(|col| {
col.storage col.storage
@ -20,7 +20,10 @@ impl CardsService for Backend {
}) })
} }
fn update_cards(&self, input: pb::UpdateCardsRequest) -> Result<pb::OpChanges> { fn update_cards(
&self,
input: pb::cards::UpdateCardsRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| { self.with_col(|col| {
let cards = input let cards = input
.cards .cards
@ -35,7 +38,7 @@ impl CardsService for Backend {
.map(Into::into) .map(Into::into)
} }
fn remove_cards(&self, input: pb::RemoveCardsRequest) -> Result<pb::Empty> { fn remove_cards(&self, input: pb::cards::RemoveCardsRequest) -> Result<pb::generic::Empty> {
self.with_col(|col| { self.with_col(|col| {
col.transact_no_undo(|col| { col.transact_no_undo(|col| {
col.remove_cards_and_orphaned_notes( col.remove_cards_and_orphaned_notes(
@ -50,13 +53,19 @@ impl CardsService for Backend {
}) })
} }
fn set_deck(&self, input: pb::SetDeckRequest) -> Result<pb::OpChangesWithCount> { fn set_deck(
&self,
input: pb::cards::SetDeckRequest,
) -> Result<pb::collection::OpChangesWithCount> {
let cids: Vec<_> = input.card_ids.into_iter().map(CardId).collect(); let cids: Vec<_> = input.card_ids.into_iter().map(CardId).collect();
let deck_id = input.deck_id.into(); let deck_id = input.deck_id.into();
self.with_col(|col| col.set_deck(&cids, deck_id).map(Into::into)) self.with_col(|col| col.set_deck(&cids, deck_id).map(Into::into))
} }
fn set_flag(&self, input: pb::SetFlagRequest) -> Result<pb::OpChangesWithCount> { fn set_flag(
&self,
input: pb::cards::SetFlagRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| { self.with_col(|col| {
col.set_card_flag(&to_card_ids(input.card_ids), input.flag) col.set_card_flag(&to_card_ids(input.card_ids), input.flag)
.map(Into::into) .map(Into::into)
@ -64,10 +73,10 @@ impl CardsService for Backend {
} }
} }
impl TryFrom<pb::Card> for Card { impl TryFrom<pb::cards::Card> for Card {
type Error = AnkiError; type Error = AnkiError;
fn try_from(c: pb::Card) -> Result<Self, Self::Error> { fn try_from(c: pb::cards::Card) -> Result<Self, Self::Error> {
let ctype = CardType::try_from(c.ctype as u8).or_invalid("invalid card type")?; let ctype = CardType::try_from(c.ctype as u8).or_invalid("invalid card type")?;
let queue = CardQueue::try_from(c.queue as i8).or_invalid("invalid card queue")?; let queue = CardQueue::try_from(c.queue as i8).or_invalid("invalid card queue")?;
Ok(Card { Ok(Card {
@ -94,9 +103,9 @@ impl TryFrom<pb::Card> for Card {
} }
} }
impl From<Card> for pb::Card { impl From<Card> for pb::cards::Card {
fn from(c: Card) -> Self { fn from(c: Card) -> Self {
pb::Card { pb::cards::Card {
id: c.id.0, id: c.id.0,
note_id: c.note_id.0, note_id: c.note_id.0,
deck_id: c.deck_id.0, deck_id: c.deck_id.0,

View file

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend; use super::Backend;
pub(super) use crate::pb::cardrendering_service::Service as CardRenderingService; pub(super) use crate::pb::card_rendering::cardrendering_service::Service as CardRenderingService;
use crate::{ use crate::{
card_rendering::{extract_av_tags, strip_av_tags}, card_rendering::{extract_av_tags, strip_av_tags},
latex::{extract_latex, extract_latex_expanding_clozes, ExtractedLatex}, latex::{extract_latex, extract_latex_expanding_clozes, ExtractedLatex},
@ -21,16 +21,19 @@ use crate::{
impl CardRenderingService for Backend { impl CardRenderingService for Backend {
fn extract_av_tags( fn extract_av_tags(
&self, &self,
input: pb::ExtractAvTagsRequest, input: pb::card_rendering::ExtractAvTagsRequest,
) -> Result<pb::ExtractAvTagsResponse> { ) -> Result<pb::card_rendering::ExtractAvTagsResponse> {
let out = extract_av_tags(input.text, input.question_side, self.i18n()); let out = extract_av_tags(input.text, input.question_side, self.i18n());
Ok(pb::ExtractAvTagsResponse { Ok(pb::card_rendering::ExtractAvTagsResponse {
text: out.0, text: out.0,
av_tags: out.1, av_tags: out.1,
}) })
} }
fn extract_latex(&self, input: pb::ExtractLatexRequest) -> Result<pb::ExtractLatexResponse> { fn extract_latex(
&self,
input: pb::card_rendering::ExtractLatexRequest,
) -> Result<pb::card_rendering::ExtractLatexResponse> {
let func = if input.expand_clozes { let func = if input.expand_clozes {
extract_latex_expanding_clozes extract_latex_expanding_clozes
} else { } else {
@ -38,11 +41,11 @@ impl CardRenderingService for Backend {
}; };
let (text, extracted) = func(&input.text, input.svg); let (text, extracted) = func(&input.text, input.svg);
Ok(pb::ExtractLatexResponse { Ok(pb::card_rendering::ExtractLatexResponse {
text, text,
latex: extracted latex: extracted
.into_iter() .into_iter()
.map(|e: ExtractedLatex| pb::ExtractedLatex { .map(|e: ExtractedLatex| pb::card_rendering::ExtractedLatex {
filename: e.fname, filename: e.fname,
latex_body: e.latex, latex_body: e.latex,
}) })
@ -50,7 +53,10 @@ impl CardRenderingService for Backend {
}) })
} }
fn get_empty_cards(&self, _input: pb::Empty) -> Result<pb::EmptyCardsReport> { fn get_empty_cards(
&self,
_input: pb::generic::Empty,
) -> Result<pb::card_rendering::EmptyCardsReport> {
self.with_col(|col| { self.with_col(|col| {
let mut empty = col.empty_cards()?; let mut empty = col.empty_cards()?;
let report = col.empty_cards_report(&mut empty)?; let report = col.empty_cards_report(&mut empty)?;
@ -58,14 +64,14 @@ impl CardRenderingService for Backend {
let mut outnotes = vec![]; let mut outnotes = vec![];
for (_ntid, notes) in empty { for (_ntid, notes) in empty {
outnotes.extend(notes.into_iter().map(|e| { outnotes.extend(notes.into_iter().map(|e| {
pb::empty_cards_report::NoteWithEmptyCards { pb::card_rendering::empty_cards_report::NoteWithEmptyCards {
note_id: e.nid.0, note_id: e.nid.0,
will_delete_note: e.empty.len() == e.current_count, will_delete_note: e.empty.len() == e.current_count,
card_ids: e.empty.into_iter().map(|(_ord, id)| id.0).collect(), card_ids: e.empty.into_iter().map(|(_ord, id)| id.0).collect(),
} }
})) }))
} }
Ok(pb::EmptyCardsReport { Ok(pb::card_rendering::EmptyCardsReport {
report, report,
notes: outnotes, notes: outnotes,
}) })
@ -74,8 +80,8 @@ impl CardRenderingService for Backend {
fn render_existing_card( fn render_existing_card(
&self, &self,
input: pb::RenderExistingCardRequest, input: pb::card_rendering::RenderExistingCardRequest,
) -> Result<pb::RenderCardResponse> { ) -> Result<pb::card_rendering::RenderCardResponse> {
self.with_col(|col| { self.with_col(|col| {
col.render_existing_card(CardId(input.card_id), input.browser) col.render_existing_card(CardId(input.card_id), input.browser)
.map(Into::into) .map(Into::into)
@ -84,8 +90,8 @@ impl CardRenderingService for Backend {
fn render_uncommitted_card( fn render_uncommitted_card(
&self, &self,
input: pb::RenderUncommittedCardRequest, input: pb::card_rendering::RenderUncommittedCardRequest,
) -> Result<pb::RenderCardResponse> { ) -> Result<pb::card_rendering::RenderCardResponse> {
let template = input.template.or_invalid("missing template")?.into(); let template = input.template.or_invalid("missing template")?.into();
let mut note = input.note.or_invalid("missing note")?.into(); let mut note = input.note.or_invalid("missing note")?.into();
let ord = input.card_ord as u16; let ord = input.card_ord as u16;
@ -98,8 +104,8 @@ impl CardRenderingService for Backend {
fn render_uncommitted_card_legacy( fn render_uncommitted_card_legacy(
&self, &self,
input: pb::RenderUncommittedCardLegacyRequest, input: pb::card_rendering::RenderUncommittedCardLegacyRequest,
) -> Result<pb::RenderCardResponse> { ) -> Result<pb::card_rendering::RenderCardResponse> {
let schema11: CardTemplateSchema11 = serde_json::from_slice(&input.template)?; let schema11: CardTemplateSchema11 = serde_json::from_slice(&input.template)?;
let template = schema11.into(); let template = schema11.into();
let mut note = input.note.or_invalid("missing note")?.into(); let mut note = input.note.or_invalid("missing note")?.into();
@ -111,11 +117,14 @@ impl CardRenderingService for Backend {
}) })
} }
fn strip_av_tags(&self, input: pb::String) -> Result<pb::String> { fn strip_av_tags(&self, input: pb::generic::String) -> Result<pb::generic::String> {
Ok(strip_av_tags(input.val).into()) Ok(strip_av_tags(input.val).into())
} }
fn render_markdown(&self, input: pb::RenderMarkdownRequest) -> Result<pb::String> { fn render_markdown(
&self,
input: pb::card_rendering::RenderMarkdownRequest,
) -> Result<pb::generic::String> {
let mut text = render_markdown(&input.markdown); let mut text = render_markdown(&input.markdown);
if input.sanitize { if input.sanitize {
// currently no images // currently no images
@ -124,18 +133,21 @@ impl CardRenderingService for Backend {
Ok(text.into()) Ok(text.into())
} }
fn encode_iri_paths(&self, input: pb::String) -> Result<pb::String> { fn encode_iri_paths(&self, input: pb::generic::String) -> Result<pb::generic::String> {
Ok(encode_iri_paths(&input.val).to_string().into()) Ok(encode_iri_paths(&input.val).to_string().into())
} }
fn decode_iri_paths(&self, input: pb::String) -> Result<pb::String> { fn decode_iri_paths(&self, input: pb::generic::String) -> Result<pb::generic::String> {
Ok(decode_iri_paths(&input.val).to_string().into()) Ok(decode_iri_paths(&input.val).to_string().into())
} }
fn strip_html(&self, input: pb::StripHtmlRequest) -> Result<pb::String> { fn strip_html(
&self,
input: pb::card_rendering::StripHtmlRequest,
) -> Result<pb::generic::String> {
Ok(match input.mode() { Ok(match input.mode() {
pb::strip_html_request::Mode::Normal => strip_html(&input.text), pb::card_rendering::strip_html_request::Mode::Normal => strip_html(&input.text),
pb::strip_html_request::Mode::PreserveMediaFilenames => { pb::card_rendering::strip_html_request::Mode::PreserveMediaFilenames => {
strip_html_preserving_media_filenames(&input.text) strip_html_preserving_media_filenames(&input.text)
} }
} }
@ -143,38 +155,47 @@ impl CardRenderingService for Backend {
.into()) .into())
} }
fn compare_answer(&self, input: pb::CompareAnswerRequest) -> Result<pb::String> { fn compare_answer(
&self,
input: pb::card_rendering::CompareAnswerRequest,
) -> Result<pb::generic::String> {
Ok(compare_answer(&input.expected, &input.provided).into()) Ok(compare_answer(&input.expected, &input.provided).into())
} }
} }
fn rendered_nodes_to_proto(nodes: Vec<RenderedNode>) -> Vec<pb::RenderedTemplateNode> { fn rendered_nodes_to_proto(
nodes: Vec<RenderedNode>,
) -> Vec<pb::card_rendering::RenderedTemplateNode> {
nodes nodes
.into_iter() .into_iter()
.map(|n| pb::RenderedTemplateNode { .map(|n| pb::card_rendering::RenderedTemplateNode {
value: Some(rendered_node_to_proto(n)), value: Some(rendered_node_to_proto(n)),
}) })
.collect() .collect()
} }
fn rendered_node_to_proto(node: RenderedNode) -> pb::rendered_template_node::Value { fn rendered_node_to_proto(node: RenderedNode) -> pb::card_rendering::rendered_template_node::Value {
match node { match node {
RenderedNode::Text { text } => pb::rendered_template_node::Value::Text(text), RenderedNode::Text { text } => {
pb::card_rendering::rendered_template_node::Value::Text(text)
}
RenderedNode::Replacement { RenderedNode::Replacement {
field_name, field_name,
current_text, current_text,
filters, filters,
} => pb::rendered_template_node::Value::Replacement(pb::RenderedTemplateReplacement { } => pb::card_rendering::rendered_template_node::Value::Replacement(
pb::card_rendering::RenderedTemplateReplacement {
field_name, field_name,
current_text, current_text,
filters, filters,
}), },
),
} }
} }
impl From<RenderCardOutput> for pb::RenderCardResponse { impl From<RenderCardOutput> for pb::card_rendering::RenderCardResponse {
fn from(o: RenderCardOutput) -> Self { fn from(o: RenderCardOutput) -> Self {
pb::RenderCardResponse { pb::card_rendering::RenderCardResponse {
question_nodes: rendered_nodes_to_proto(o.qnodes), question_nodes: rendered_nodes_to_proto(o.qnodes),
answer_nodes: rendered_nodes_to_proto(o.anodes), answer_nodes: rendered_nodes_to_proto(o.anodes),
css: o.css, css: o.css,

View file

@ -6,7 +6,7 @@ use std::sync::MutexGuard;
use slog::error; use slog::error;
use super::{progress::Progress, Backend}; use super::{progress::Progress, Backend};
pub(super) use crate::pb::collection_service::Service as CollectionService; pub(super) use crate::pb::collection::collection_service::Service as CollectionService;
use crate::{ use crate::{
backend::progress::progress_to_proto, backend::progress::progress_to_proto,
collection::CollectionBuilder, collection::CollectionBuilder,
@ -16,17 +16,20 @@ use crate::{
}; };
impl CollectionService for Backend { impl CollectionService for Backend {
fn latest_progress(&self, _input: pb::Empty) -> Result<pb::Progress> { fn latest_progress(&self, _input: pb::generic::Empty) -> Result<pb::collection::Progress> {
let progress = self.progress_state.lock().unwrap().last_progress; let progress = self.progress_state.lock().unwrap().last_progress;
Ok(progress_to_proto(progress, &self.tr)) Ok(progress_to_proto(progress, &self.tr))
} }
fn set_wants_abort(&self, _input: pb::Empty) -> Result<pb::Empty> { fn set_wants_abort(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
self.progress_state.lock().unwrap().want_abort = true; self.progress_state.lock().unwrap().want_abort = true;
Ok(().into()) Ok(().into())
} }
fn open_collection(&self, input: pb::OpenCollectionRequest) -> Result<pb::Empty> { fn open_collection(
&self,
input: pb::collection::OpenCollectionRequest,
) -> Result<pb::generic::Empty> {
let mut guard = self.lock_closed_collection()?; let mut guard = self.lock_closed_collection()?;
let mut builder = CollectionBuilder::new(input.collection_path); let mut builder = CollectionBuilder::new(input.collection_path);
@ -45,7 +48,10 @@ impl CollectionService for Backend {
Ok(().into()) Ok(().into())
} }
fn close_collection(&self, input: pb::CloseCollectionRequest) -> Result<pb::Empty> { fn close_collection(
&self,
input: pb::collection::CloseCollectionRequest,
) -> Result<pb::generic::Empty> {
let desired_version = if input.downgrade_to_schema11 { let desired_version = if input.downgrade_to_schema11 {
Some(SchemaVersion::V11) Some(SchemaVersion::V11)
} else { } else {
@ -63,42 +69,48 @@ impl CollectionService for Backend {
Ok(().into()) Ok(().into())
} }
fn check_database(&self, _input: pb::Empty) -> Result<pb::CheckDatabaseResponse> { fn check_database(
&self,
_input: pb::generic::Empty,
) -> Result<pb::collection::CheckDatabaseResponse> {
let mut handler = self.new_progress_handler(); let mut handler = self.new_progress_handler();
let progress_fn = move |progress, throttle| { let progress_fn = move |progress, throttle| {
handler.update(Progress::DatabaseCheck(progress), throttle); handler.update(Progress::DatabaseCheck(progress), throttle);
}; };
self.with_col(|col| { self.with_col(|col| {
col.check_database(progress_fn) col.check_database(progress_fn)
.map(|problems| pb::CheckDatabaseResponse { .map(|problems| pb::collection::CheckDatabaseResponse {
problems: problems.to_i18n_strings(&col.tr), problems: problems.to_i18n_strings(&col.tr),
}) })
}) })
} }
fn get_undo_status(&self, _input: pb::Empty) -> Result<pb::UndoStatus> { fn get_undo_status(&self, _input: pb::generic::Empty) -> Result<pb::collection::UndoStatus> {
self.with_col(|col| Ok(col.undo_status().into_protobuf(&col.tr))) self.with_col(|col| Ok(col.undo_status().into_protobuf(&col.tr)))
} }
fn undo(&self, _input: pb::Empty) -> Result<pb::OpChangesAfterUndo> { fn undo(&self, _input: pb::generic::Empty) -> Result<pb::collection::OpChangesAfterUndo> {
self.with_col(|col| col.undo().map(|out| out.into_protobuf(&col.tr))) self.with_col(|col| col.undo().map(|out| out.into_protobuf(&col.tr)))
} }
fn redo(&self, _input: pb::Empty) -> Result<pb::OpChangesAfterUndo> { fn redo(&self, _input: pb::generic::Empty) -> Result<pb::collection::OpChangesAfterUndo> {
self.with_col(|col| col.redo().map(|out| out.into_protobuf(&col.tr))) self.with_col(|col| col.redo().map(|out| out.into_protobuf(&col.tr)))
} }
fn add_custom_undo_entry(&self, input: pb::String) -> Result<pb::UInt32> { fn add_custom_undo_entry(&self, input: pb::generic::String) -> Result<pb::generic::UInt32> {
self.with_col(|col| Ok(col.add_custom_undo_step(input.val).into())) self.with_col(|col| Ok(col.add_custom_undo_step(input.val).into()))
} }
fn merge_undo_entries(&self, input: pb::UInt32) -> Result<pb::OpChanges> { fn merge_undo_entries(&self, input: pb::generic::UInt32) -> Result<pb::collection::OpChanges> {
let starting_from = input.val as usize; let starting_from = input.val as usize;
self.with_col(|col| col.merge_undoable_ops(starting_from)) self.with_col(|col| col.merge_undoable_ops(starting_from))
.map(Into::into) .map(Into::into)
} }
fn create_backup(&self, input: pb::CreateBackupRequest) -> Result<pb::Bool> { fn create_backup(
&self,
input: pb::collection::CreateBackupRequest,
) -> Result<pb::generic::Bool> {
// lock collection // lock collection
let mut col_lock = self.lock_open_collection()?; let mut col_lock = self.lock_open_collection()?;
let col = col_lock.as_mut().unwrap(); let col = col_lock.as_mut().unwrap();
@ -122,7 +134,7 @@ impl CollectionService for Backend {
Ok(created.into()) Ok(created.into())
} }
fn await_backup_completion(&self, _input: pb::Empty) -> Result<pb::Empty> { fn await_backup_completion(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
self.await_backup_completion()?; self.await_backup_completion()?;
Ok(().into()) Ok(().into())
} }

View file

@ -4,11 +4,11 @@
use serde_json::Value; use serde_json::Value;
use super::Backend; use super::Backend;
pub(super) use crate::pb::config_service::Service as ConfigService; pub(super) use crate::pb::config::config_service::Service as ConfigService;
use crate::{ use crate::{
config::{BoolKey, StringKey}, config::{BoolKey, StringKey},
pb, pb,
pb::config_key::{Bool as BoolKeyProto, String as StringKeyProto}, pb::config::config_key::{Bool as BoolKeyProto, String as StringKeyProto},
prelude::*, prelude::*,
}; };
@ -54,7 +54,7 @@ impl From<StringKeyProto> for StringKey {
} }
impl ConfigService for Backend { impl ConfigService for Backend {
fn get_config_json(&self, input: pb::String) -> Result<pb::Json> { fn get_config_json(&self, input: pb::generic::String) -> Result<pb::generic::Json> {
self.with_col(|col| { self.with_col(|col| {
let val: Option<Value> = col.get_config_optional(input.val.as_str()); let val: Option<Value> = col.get_config_optional(input.val.as_str());
val.or_not_found(input.val) val.or_not_found(input.val)
@ -63,7 +63,10 @@ impl ConfigService for Backend {
}) })
} }
fn set_config_json(&self, input: pb::SetConfigJsonRequest) -> Result<pb::OpChanges> { fn set_config_json(
&self,
input: pb::config::SetConfigJsonRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| { self.with_col(|col| {
let val: Value = serde_json::from_slice(&input.value_json)?; let val: Value = serde_json::from_slice(&input.value_json)?;
col.set_config_json(input.key.as_str(), &val, input.undoable) col.set_config_json(input.key.as_str(), &val, input.undoable)
@ -71,7 +74,10 @@ impl ConfigService for Backend {
.map(Into::into) .map(Into::into)
} }
fn set_config_json_no_undo(&self, input: pb::SetConfigJsonRequest) -> Result<pb::Empty> { fn set_config_json_no_undo(
&self,
input: pb::config::SetConfigJsonRequest,
) -> Result<pb::generic::Empty> {
self.with_col(|col| { self.with_col(|col| {
let val: Value = serde_json::from_slice(&input.value_json)?; let val: Value = serde_json::from_slice(&input.value_json)?;
col.transact_no_undo(|col| col.set_config(input.key.as_str(), &val).map(|_| ())) col.transact_no_undo(|col| col.set_config(input.key.as_str(), &val).map(|_| ()))
@ -79,12 +85,12 @@ impl ConfigService for Backend {
.map(Into::into) .map(Into::into)
} }
fn remove_config(&self, input: pb::String) -> Result<pb::OpChanges> { fn remove_config(&self, input: pb::generic::String) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.remove_config(input.val.as_str())) self.with_col(|col| col.remove_config(input.val.as_str()))
.map(Into::into) .map(Into::into)
} }
fn get_all_config(&self, _input: pb::Empty) -> Result<pb::Json> { fn get_all_config(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
self.with_col(|col| { self.with_col(|col| {
let conf = col.storage.get_all_config()?; let conf = col.storage.get_all_config()?;
serde_json::to_vec(&conf).map_err(Into::into) serde_json::to_vec(&conf).map_err(Into::into)
@ -92,37 +98,49 @@ impl ConfigService for Backend {
.map(Into::into) .map(Into::into)
} }
fn get_config_bool(&self, input: pb::GetConfigBoolRequest) -> Result<pb::Bool> { fn get_config_bool(
&self,
input: pb::config::GetConfigBoolRequest,
) -> Result<pb::generic::Bool> {
self.with_col(|col| { self.with_col(|col| {
Ok(pb::Bool { Ok(pb::generic::Bool {
val: col.get_config_bool(input.key().into()), val: col.get_config_bool(input.key().into()),
}) })
}) })
} }
fn set_config_bool(&self, input: pb::SetConfigBoolRequest) -> Result<pb::OpChanges> { fn set_config_bool(
&self,
input: pb::config::SetConfigBoolRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.set_config_bool(input.key().into(), input.value, input.undoable)) self.with_col(|col| col.set_config_bool(input.key().into(), input.value, input.undoable))
.map(Into::into) .map(Into::into)
} }
fn get_config_string(&self, input: pb::GetConfigStringRequest) -> Result<pb::String> { fn get_config_string(
&self,
input: pb::config::GetConfigStringRequest,
) -> Result<pb::generic::String> {
self.with_col(|col| { self.with_col(|col| {
Ok(pb::String { Ok(pb::generic::String {
val: col.get_config_string(input.key().into()), val: col.get_config_string(input.key().into()),
}) })
}) })
} }
fn set_config_string(&self, input: pb::SetConfigStringRequest) -> Result<pb::OpChanges> { fn set_config_string(
&self,
input: pb::config::SetConfigStringRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.set_config_string(input.key().into(), &input.value, input.undoable)) self.with_col(|col| col.set_config_string(input.key().into(), &input.value, input.undoable))
.map(Into::into) .map(Into::into)
} }
fn get_preferences(&self, _input: pb::Empty) -> Result<pb::Preferences> { fn get_preferences(&self, _input: pb::generic::Empty) -> Result<pb::config::Preferences> {
self.with_col(|col| col.get_preferences()) self.with_col(|col| col.get_preferences())
} }
fn set_preferences(&self, input: pb::Preferences) -> Result<pb::OpChanges> { fn set_preferences(&self, input: pb::config::Preferences) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.set_preferences(input)) self.with_col(|col| col.set_preferences(input))
.map(Into::into) .map(Into::into)
} }

View file

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend; use super::Backend;
pub(super) use crate::pb::deckconfig_service::Service as DeckConfigService; pub(super) use crate::pb::deckconfig::deckconfig_service::Service as DeckConfigService;
use crate::{ use crate::{
deckconfig::{DeckConfSchema11, DeckConfig, UpdateDeckConfigsRequest}, deckconfig::{DeckConfSchema11, DeckConfig, UpdateDeckConfigsRequest},
pb, pb,
@ -10,19 +10,22 @@ use crate::{
}; };
impl DeckConfigService for Backend { impl DeckConfigService for Backend {
fn add_or_update_deck_config_legacy(&self, input: pb::Json) -> Result<pb::DeckConfigId> { fn add_or_update_deck_config_legacy(
&self,
input: pb::generic::Json,
) -> Result<pb::deckconfig::DeckConfigId> {
let conf: DeckConfSchema11 = serde_json::from_slice(&input.json)?; let conf: DeckConfSchema11 = serde_json::from_slice(&input.json)?;
let mut conf: DeckConfig = conf.into(); let mut conf: DeckConfig = conf.into();
self.with_col(|col| { self.with_col(|col| {
col.transact_no_undo(|col| { col.transact_no_undo(|col| {
col.add_or_update_deck_config_legacy(&mut conf)?; col.add_or_update_deck_config_legacy(&mut conf)?;
Ok(pb::DeckConfigId { dcid: conf.id.0 }) Ok(pb::deckconfig::DeckConfigId { dcid: conf.id.0 })
}) })
}) })
.map(Into::into) .map(Into::into)
} }
fn all_deck_config_legacy(&self, _input: pb::Empty) -> Result<pb::Json> { fn all_deck_config_legacy(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
self.with_col(|col| { self.with_col(|col| {
let conf: Vec<DeckConfSchema11> = col let conf: Vec<DeckConfSchema11> = col
.storage .storage
@ -35,11 +38,17 @@ impl DeckConfigService for Backend {
.map(Into::into) .map(Into::into)
} }
fn get_deck_config(&self, input: pb::DeckConfigId) -> Result<pb::DeckConfig> { fn get_deck_config(
&self,
input: pb::deckconfig::DeckConfigId,
) -> Result<pb::deckconfig::DeckConfig> {
self.with_col(|col| Ok(col.get_deck_config(input.into(), true)?.unwrap().into())) self.with_col(|col| Ok(col.get_deck_config(input.into(), true)?.unwrap().into()))
} }
fn get_deck_config_legacy(&self, input: pb::DeckConfigId) -> Result<pb::Json> { fn get_deck_config_legacy(
&self,
input: pb::deckconfig::DeckConfigId,
) -> Result<pb::generic::Json> {
self.with_col(|col| { self.with_col(|col| {
let conf = col.get_deck_config(input.into(), true)?.unwrap(); let conf = col.get_deck_config(input.into(), true)?.unwrap();
let conf: DeckConfSchema11 = conf.into(); let conf: DeckConfSchema11 = conf.into();
@ -48,30 +57,39 @@ impl DeckConfigService for Backend {
.map(Into::into) .map(Into::into)
} }
fn new_deck_config_legacy(&self, _input: pb::Empty) -> Result<pb::Json> { fn new_deck_config_legacy(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
serde_json::to_vec(&DeckConfSchema11::default()) serde_json::to_vec(&DeckConfSchema11::default())
.map_err(Into::into) .map_err(Into::into)
.map(Into::into) .map(Into::into)
} }
fn remove_deck_config(&self, input: pb::DeckConfigId) -> Result<pb::Empty> { fn remove_deck_config(
&self,
input: pb::deckconfig::DeckConfigId,
) -> Result<pb::generic::Empty> {
self.with_col(|col| col.transact_no_undo(|col| col.remove_deck_config_inner(input.into()))) self.with_col(|col| col.transact_no_undo(|col| col.remove_deck_config_inner(input.into())))
.map(Into::into) .map(Into::into)
} }
fn get_deck_configs_for_update(&self, input: pb::DeckId) -> Result<pb::DeckConfigsForUpdate> { fn get_deck_configs_for_update(
&self,
input: pb::decks::DeckId,
) -> Result<pb::deckconfig::DeckConfigsForUpdate> {
self.with_col(|col| col.get_deck_configs_for_update(input.into())) self.with_col(|col| col.get_deck_configs_for_update(input.into()))
} }
fn update_deck_configs(&self, input: pb::UpdateDeckConfigsRequest) -> Result<pb::OpChanges> { fn update_deck_configs(
&self,
input: pb::deckconfig::UpdateDeckConfigsRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.update_deck_configs(input.into())) self.with_col(|col| col.update_deck_configs(input.into()))
.map(Into::into) .map(Into::into)
} }
} }
impl From<DeckConfig> for pb::DeckConfig { impl From<DeckConfig> for pb::deckconfig::DeckConfig {
fn from(c: DeckConfig) -> Self { fn from(c: DeckConfig) -> Self {
pb::DeckConfig { pb::deckconfig::DeckConfig {
id: c.id.0, id: c.id.0,
name: c.name, name: c.name,
mtime_secs: c.mtime_secs.0, mtime_secs: c.mtime_secs.0,
@ -81,8 +99,8 @@ impl From<DeckConfig> for pb::DeckConfig {
} }
} }
impl From<pb::UpdateDeckConfigsRequest> for UpdateDeckConfigsRequest { impl From<pb::deckconfig::UpdateDeckConfigsRequest> for UpdateDeckConfigsRequest {
fn from(c: pb::UpdateDeckConfigsRequest) -> Self { fn from(c: pb::deckconfig::UpdateDeckConfigsRequest) -> Self {
UpdateDeckConfigsRequest { UpdateDeckConfigsRequest {
target_deck_id: c.target_deck_id.into(), target_deck_id: c.target_deck_id.into(),
configs: c.configs.into_iter().map(Into::into).collect(), configs: c.configs.into_iter().map(Into::into).collect(),
@ -94,8 +112,8 @@ impl From<pb::UpdateDeckConfigsRequest> for UpdateDeckConfigsRequest {
} }
} }
impl From<pb::DeckConfig> for DeckConfig { impl From<pb::deckconfig::DeckConfig> for DeckConfig {
fn from(c: pb::DeckConfig) -> Self { fn from(c: pb::deckconfig::DeckConfig) -> Self {
DeckConfig { DeckConfig {
id: c.id.into(), id: c.id.into(),
name: c.name, name: c.name,

View file

@ -4,7 +4,7 @@
use std::convert::TryFrom; use std::convert::TryFrom;
use super::Backend; use super::Backend;
pub(super) use crate::pb::decks_service::Service as DecksService; pub(super) use crate::pb::decks::decks_service::Service as DecksService;
use crate::{ use crate::{
decks::{DeckSchema11, FilteredSearchOrder}, decks::{DeckSchema11, FilteredSearchOrder},
pb::{self as pb}, pb::{self as pb},
@ -13,16 +13,16 @@ use crate::{
}; };
impl DecksService for Backend { impl DecksService for Backend {
fn new_deck(&self, _input: pb::Empty) -> Result<pb::Deck> { fn new_deck(&self, _input: pb::generic::Empty) -> Result<pb::decks::Deck> {
Ok(Deck::new_normal().into()) Ok(Deck::new_normal().into())
} }
fn add_deck(&self, deck: pb::Deck) -> Result<pb::OpChangesWithId> { fn add_deck(&self, deck: pb::decks::Deck) -> Result<pb::collection::OpChangesWithId> {
let mut deck: Deck = deck.try_into()?; let mut deck: Deck = deck.try_into()?;
self.with_col(|col| Ok(col.add_deck(&mut deck)?.map(|_| deck.id.0).into())) self.with_col(|col| Ok(col.add_deck(&mut deck)?.map(|_| deck.id.0).into()))
} }
fn add_deck_legacy(&self, input: pb::Json) -> Result<pb::OpChangesWithId> { fn add_deck_legacy(&self, input: pb::generic::Json) -> Result<pb::collection::OpChangesWithId> {
let schema11: DeckSchema11 = serde_json::from_slice(&input.json)?; let schema11: DeckSchema11 = serde_json::from_slice(&input.json)?;
let mut deck: Deck = schema11.into(); let mut deck: Deck = schema11.into();
self.with_col(|col| { self.with_col(|col| {
@ -33,8 +33,8 @@ impl DecksService for Backend {
fn add_or_update_deck_legacy( fn add_or_update_deck_legacy(
&self, &self,
input: pb::AddOrUpdateDeckLegacyRequest, input: pb::decks::AddOrUpdateDeckLegacyRequest,
) -> Result<pb::DeckId> { ) -> Result<pb::decks::DeckId> {
self.with_col(|col| { self.with_col(|col| {
let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?; let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?;
let mut deck: Deck = schema11.into(); let mut deck: Deck = schema11.into();
@ -46,11 +46,11 @@ impl DecksService for Backend {
} else { } else {
col.add_or_update_deck(&mut deck)?; col.add_or_update_deck(&mut deck)?;
} }
Ok(pb::DeckId { did: deck.id.0 }) Ok(pb::decks::DeckId { did: deck.id.0 })
}) })
} }
fn deck_tree(&self, input: pb::DeckTreeRequest) -> Result<pb::DeckTreeNode> { fn deck_tree(&self, input: pb::decks::DeckTreeRequest) -> Result<pb::decks::DeckTreeNode> {
self.with_col(|col| { self.with_col(|col| {
let now = if input.now == 0 { let now = if input.now == 0 {
None None
@ -61,7 +61,7 @@ impl DecksService for Backend {
}) })
} }
fn deck_tree_legacy(&self, _input: pb::Empty) -> Result<pb::Json> { fn deck_tree_legacy(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
self.with_col(|col| { self.with_col(|col| {
let tree = col.legacy_deck_tree()?; let tree = col.legacy_deck_tree()?;
serde_json::to_vec(&tree) serde_json::to_vec(&tree)
@ -70,7 +70,7 @@ impl DecksService for Backend {
}) })
} }
fn get_all_decks_legacy(&self, _input: pb::Empty) -> Result<pb::Json> { fn get_all_decks_legacy(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
self.with_col(|col| { self.with_col(|col| {
let decks = col.storage.get_all_decks_as_schema11()?; let decks = col.storage.get_all_decks_as_schema11()?;
serde_json::to_vec(&decks).map_err(Into::into) serde_json::to_vec(&decks).map_err(Into::into)
@ -78,26 +78,28 @@ impl DecksService for Backend {
.map(Into::into) .map(Into::into)
} }
fn get_deck_id_by_name(&self, input: pb::String) -> Result<pb::DeckId> { fn get_deck_id_by_name(&self, input: pb::generic::String) -> Result<pb::decks::DeckId> {
self.with_col(|col| { self.with_col(|col| {
col.get_deck_id(&input.val) col.get_deck_id(&input.val).and_then(|d| {
.and_then(|d| d.or_not_found(input.val).map(|d| pb::DeckId { did: d.0 })) d.or_not_found(input.val)
.map(|d| pb::decks::DeckId { did: d.0 })
})
}) })
} }
fn get_deck(&self, input: pb::DeckId) -> Result<pb::Deck> { fn get_deck(&self, input: pb::decks::DeckId) -> Result<pb::decks::Deck> {
let did = input.into(); let did = input.into();
self.with_col(|col| Ok(col.storage.get_deck(did)?.or_not_found(did)?.into())) self.with_col(|col| Ok(col.storage.get_deck(did)?.or_not_found(did)?.into()))
} }
fn update_deck(&self, input: pb::Deck) -> Result<pb::OpChanges> { fn update_deck(&self, input: pb::decks::Deck) -> Result<pb::collection::OpChanges> {
self.with_col(|col| { self.with_col(|col| {
let mut deck = Deck::try_from(input)?; let mut deck = Deck::try_from(input)?;
col.update_deck(&mut deck).map(Into::into) col.update_deck(&mut deck).map(Into::into)
}) })
} }
fn update_deck_legacy(&self, input: pb::Json) -> Result<pb::OpChanges> { fn update_deck_legacy(&self, input: pb::generic::Json) -> Result<pb::collection::OpChanges> {
self.with_col(|col| { self.with_col(|col| {
let deck: DeckSchema11 = serde_json::from_slice(&input.json)?; let deck: DeckSchema11 = serde_json::from_slice(&input.json)?;
let mut deck = deck.into(); let mut deck = deck.into();
@ -105,7 +107,7 @@ impl DecksService for Backend {
}) })
} }
fn get_deck_legacy(&self, input: pb::DeckId) -> Result<pb::Json> { fn get_deck_legacy(&self, input: pb::decks::DeckId) -> Result<pb::generic::Json> {
let did = input.into(); let did = input.into();
self.with_col(|col| { self.with_col(|col| {
let deck: DeckSchema11 = col.storage.get_deck(did)?.or_not_found(did)?.into(); let deck: DeckSchema11 = col.storage.get_deck(did)?.or_not_found(did)?.into();
@ -115,7 +117,10 @@ impl DecksService for Backend {
}) })
} }
fn get_deck_names(&self, input: pb::GetDeckNamesRequest) -> Result<pb::DeckNames> { fn get_deck_names(
&self,
input: pb::decks::GetDeckNamesRequest,
) -> Result<pb::decks::DeckNames> {
self.with_col(|col| { self.with_col(|col| {
let names = if input.include_filtered { let names = if input.include_filtered {
col.get_all_deck_names(input.skip_empty_default)? col.get_all_deck_names(input.skip_empty_default)?
@ -126,14 +131,14 @@ impl DecksService for Backend {
}) })
} }
fn get_deck_and_child_names(&self, input: pb::DeckId) -> Result<pb::DeckNames> { fn get_deck_and_child_names(&self, input: pb::decks::DeckId) -> Result<pb::decks::DeckNames> {
self.with_col(|col| { self.with_col(|col| {
col.get_deck_and_child_names(input.did.into()) col.get_deck_and_child_names(input.did.into())
.map(Into::into) .map(Into::into)
}) })
} }
fn new_deck_legacy(&self, input: pb::Bool) -> Result<pb::Json> { fn new_deck_legacy(&self, input: pb::generic::Bool) -> Result<pb::generic::Json> {
let deck = if input.val { let deck = if input.val {
Deck::new_filtered() Deck::new_filtered()
} else { } else {
@ -145,12 +150,18 @@ impl DecksService for Backend {
.map(Into::into) .map(Into::into)
} }
fn remove_decks(&self, input: pb::DeckIds) -> Result<pb::OpChangesWithCount> { fn remove_decks(
&self,
input: pb::decks::DeckIds,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| col.remove_decks_and_child_decks(&Into::<Vec<DeckId>>::into(input))) self.with_col(|col| col.remove_decks_and_child_decks(&Into::<Vec<DeckId>>::into(input)))
.map(Into::into) .map(Into::into)
} }
fn reparent_decks(&self, input: pb::ReparentDecksRequest) -> Result<pb::OpChangesWithCount> { fn reparent_decks(
&self,
input: pb::decks::ReparentDecksRequest,
) -> Result<pb::collection::OpChangesWithCount> {
let deck_ids: Vec<_> = input.deck_ids.into_iter().map(Into::into).collect(); let deck_ids: Vec<_> = input.deck_ids.into_iter().map(Into::into).collect();
let new_parent = if input.new_parent == 0 { let new_parent = if input.new_parent == 0 {
None None
@ -161,68 +172,80 @@ impl DecksService for Backend {
.map(Into::into) .map(Into::into)
} }
fn rename_deck(&self, input: pb::RenameDeckRequest) -> Result<pb::OpChanges> { fn rename_deck(
&self,
input: pb::decks::RenameDeckRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.rename_deck(input.deck_id.into(), &input.new_name)) self.with_col(|col| col.rename_deck(input.deck_id.into(), &input.new_name))
.map(Into::into) .map(Into::into)
} }
fn get_or_create_filtered_deck(&self, input: pb::DeckId) -> Result<pb::FilteredDeckForUpdate> { fn get_or_create_filtered_deck(
&self,
input: pb::decks::DeckId,
) -> Result<pb::decks::FilteredDeckForUpdate> {
self.with_col(|col| col.get_or_create_filtered_deck(input.into())) self.with_col(|col| col.get_or_create_filtered_deck(input.into()))
.map(Into::into) .map(Into::into)
} }
fn add_or_update_filtered_deck( fn add_or_update_filtered_deck(
&self, &self,
input: pb::FilteredDeckForUpdate, input: pb::decks::FilteredDeckForUpdate,
) -> Result<pb::OpChangesWithId> { ) -> Result<pb::collection::OpChangesWithId> {
self.with_col(|col| col.add_or_update_filtered_deck(input.into())) self.with_col(|col| col.add_or_update_filtered_deck(input.into()))
.map(|out| out.map(i64::from)) .map(|out| out.map(i64::from))
.map(Into::into) .map(Into::into)
} }
fn filtered_deck_order_labels(&self, _input: pb::Empty) -> Result<pb::StringList> { fn filtered_deck_order_labels(
&self,
_input: pb::generic::Empty,
) -> Result<pb::generic::StringList> {
Ok(FilteredSearchOrder::labels(&self.tr).into()) Ok(FilteredSearchOrder::labels(&self.tr).into())
} }
fn set_deck_collapsed(&self, input: pb::SetDeckCollapsedRequest) -> Result<pb::OpChanges> { fn set_deck_collapsed(
&self,
input: pb::decks::SetDeckCollapsedRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| { self.with_col(|col| {
col.set_deck_collapsed(input.deck_id.into(), input.collapsed, input.scope()) col.set_deck_collapsed(input.deck_id.into(), input.collapsed, input.scope())
}) })
.map(Into::into) .map(Into::into)
} }
fn set_current_deck(&self, input: pb::DeckId) -> Result<pb::OpChanges> { fn set_current_deck(&self, input: pb::decks::DeckId) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.set_current_deck(input.did.into())) self.with_col(|col| col.set_current_deck(input.did.into()))
.map(Into::into) .map(Into::into)
} }
fn get_current_deck(&self, _input: pb::Empty) -> Result<pb::Deck> { fn get_current_deck(&self, _input: pb::generic::Empty) -> Result<pb::decks::Deck> {
self.with_col(|col| col.get_current_deck()) self.with_col(|col| col.get_current_deck())
.map(|deck| (*deck).clone().into()) .map(|deck| (*deck).clone().into())
} }
} }
impl From<pb::DeckId> for DeckId { impl From<pb::decks::DeckId> for DeckId {
fn from(did: pb::DeckId) -> Self { fn from(did: pb::decks::DeckId) -> Self {
DeckId(did.did) DeckId(did.did)
} }
} }
impl From<pb::DeckIds> for Vec<DeckId> { impl From<pb::decks::DeckIds> for Vec<DeckId> {
fn from(dids: pb::DeckIds) -> Self { fn from(dids: pb::decks::DeckIds) -> Self {
dids.dids.into_iter().map(DeckId).collect() dids.dids.into_iter().map(DeckId).collect()
} }
} }
impl From<DeckId> for pb::DeckId { impl From<DeckId> for pb::decks::DeckId {
fn from(did: DeckId) -> Self { fn from(did: DeckId) -> Self {
pb::DeckId { did: did.0 } pb::decks::DeckId { did: did.0 }
} }
} }
impl From<FilteredDeckForUpdate> for pb::FilteredDeckForUpdate { impl From<FilteredDeckForUpdate> for pb::decks::FilteredDeckForUpdate {
fn from(deck: FilteredDeckForUpdate) -> Self { fn from(deck: FilteredDeckForUpdate) -> Self {
pb::FilteredDeckForUpdate { pb::decks::FilteredDeckForUpdate {
id: deck.id.into(), id: deck.id.into(),
name: deck.human_name, name: deck.human_name,
config: Some(deck.config), config: Some(deck.config),
@ -230,8 +253,8 @@ impl From<FilteredDeckForUpdate> for pb::FilteredDeckForUpdate {
} }
} }
impl From<pb::FilteredDeckForUpdate> for FilteredDeckForUpdate { impl From<pb::decks::FilteredDeckForUpdate> for FilteredDeckForUpdate {
fn from(deck: pb::FilteredDeckForUpdate) -> Self { fn from(deck: pb::decks::FilteredDeckForUpdate) -> Self {
FilteredDeckForUpdate { FilteredDeckForUpdate {
id: deck.id.into(), id: deck.id.into(),
human_name: deck.name, human_name: deck.name,
@ -240,9 +263,9 @@ impl From<pb::FilteredDeckForUpdate> for FilteredDeckForUpdate {
} }
} }
impl From<Deck> for pb::Deck { impl From<Deck> for pb::decks::Deck {
fn from(d: Deck) -> Self { fn from(d: Deck) -> Self {
pb::Deck { pb::decks::Deck {
id: d.id.0, id: d.id.0,
name: d.name.human_name(), name: d.name.human_name(),
mtime_secs: d.mtime_secs.0, mtime_secs: d.mtime_secs.0,
@ -253,10 +276,10 @@ impl From<Deck> for pb::Deck {
} }
} }
impl TryFrom<pb::Deck> for Deck { impl TryFrom<pb::decks::Deck> for Deck {
type Error = AnkiError; type Error = AnkiError;
fn try_from(d: pb::Deck) -> Result<Self, Self::Error> { fn try_from(d: pb::decks::Deck) -> Result<Self, Self::Error> {
Ok(Deck { Ok(Deck {
id: DeckId(d.id), id: DeckId(d.id),
name: NativeDeckName::from_human_name(&d.name), name: NativeDeckName::from_human_name(&d.name),
@ -268,42 +291,42 @@ impl TryFrom<pb::Deck> for Deck {
} }
} }
impl From<DeckKind> for pb::deck::Kind { impl From<DeckKind> for pb::decks::deck::Kind {
fn from(k: DeckKind) -> Self { fn from(k: DeckKind) -> Self {
match k { match k {
DeckKind::Normal(n) => pb::deck::Kind::Normal(n), DeckKind::Normal(n) => pb::decks::deck::Kind::Normal(n),
DeckKind::Filtered(f) => pb::deck::Kind::Filtered(f), DeckKind::Filtered(f) => pb::decks::deck::Kind::Filtered(f),
} }
} }
} }
impl From<pb::deck::Kind> for DeckKind { impl From<pb::decks::deck::Kind> for DeckKind {
fn from(kind: pb::deck::Kind) -> Self { fn from(kind: pb::decks::deck::Kind) -> Self {
match kind { match kind {
pb::deck::Kind::Normal(normal) => DeckKind::Normal(normal), pb::decks::deck::Kind::Normal(normal) => DeckKind::Normal(normal),
pb::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered), pb::decks::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered),
} }
} }
} }
impl From<(DeckId, String)> for pb::DeckNameId { impl From<(DeckId, String)> for pb::decks::DeckNameId {
fn from(id_name: (DeckId, String)) -> Self { fn from(id_name: (DeckId, String)) -> Self {
pb::DeckNameId { pb::decks::DeckNameId {
id: id_name.0 .0, id: id_name.0 .0,
name: id_name.1, name: id_name.1,
} }
} }
} }
impl From<Vec<(DeckId, String)>> for pb::DeckNames { impl From<Vec<(DeckId, String)>> for pb::decks::DeckNames {
fn from(id_names: Vec<(DeckId, String)>) -> Self { fn from(id_names: Vec<(DeckId, String)>) -> Self {
pb::DeckNames { pb::decks::DeckNames {
entries: id_names.into_iter().map(Into::into).collect(), entries: id_names.into_iter().map(Into::into).collect(),
} }
} }
} }
// fn new_deck(&self, input: pb::Bool) -> Result<pb::Deck> { // fn new_deck(&self, input: pb::generic::Bool) -> Result<pb::decks::Deck> {
// let deck = if input.val { // let deck = if input.val {
// Deck::new_filtered() // Deck::new_filtered()
// } else { // } else {

View file

@ -4,12 +4,12 @@
use crate::{ use crate::{
error::{AnkiError, SyncErrorKind}, error::{AnkiError, SyncErrorKind},
pb, pb,
pb::backend_error::Kind, pb::backend::backend_error::Kind,
prelude::*, prelude::*,
}; };
impl AnkiError { impl AnkiError {
pub fn into_protobuf(self, tr: &I18n) -> pb::BackendError { pub fn into_protobuf(self, tr: &I18n) -> pb::backend::BackendError {
let message = self.message(tr); let message = self.message(tr);
let help_page = self.help_page().map(|page| page as i32); let help_page = self.help_page().map(|page| page as i32);
let context = self.context(); let context = self.context();
@ -43,7 +43,7 @@ impl AnkiError {
AnkiError::InvalidId => Kind::InvalidInput, AnkiError::InvalidId => Kind::InvalidInput,
}; };
pb::BackendError { pb::backend::BackendError {
kind: kind as i32, kind: kind as i32,
message, message,
help_page, help_page,

View file

@ -3,98 +3,98 @@
use crate::{pb, prelude::*}; use crate::{pb, prelude::*};
impl From<Vec<u8>> for pb::Json { impl From<Vec<u8>> for pb::generic::Json {
fn from(json: Vec<u8>) -> Self { fn from(json: Vec<u8>) -> Self {
pb::Json { json } pb::generic::Json { json }
} }
} }
impl From<String> for pb::String { impl From<String> for pb::generic::String {
fn from(val: String) -> Self { fn from(val: String) -> Self {
pb::String { val } pb::generic::String { val }
} }
} }
impl From<bool> for pb::Bool { impl From<bool> for pb::generic::Bool {
fn from(val: bool) -> Self { fn from(val: bool) -> Self {
pb::Bool { val } pb::generic::Bool { val }
} }
} }
impl From<i32> for pb::Int32 { impl From<i32> for pb::generic::Int32 {
fn from(val: i32) -> Self { fn from(val: i32) -> Self {
pb::Int32 { val } pb::generic::Int32 { val }
} }
} }
impl From<i64> for pb::Int64 { impl From<i64> for pb::generic::Int64 {
fn from(val: i64) -> Self { fn from(val: i64) -> Self {
pb::Int64 { val } pb::generic::Int64 { val }
} }
} }
impl From<u32> for pb::UInt32 { impl From<u32> for pb::generic::UInt32 {
fn from(val: u32) -> Self { fn from(val: u32) -> Self {
pb::UInt32 { val } pb::generic::UInt32 { val }
} }
} }
impl From<usize> for pb::UInt32 { impl From<usize> for pb::generic::UInt32 {
fn from(val: usize) -> Self { fn from(val: usize) -> Self {
pb::UInt32 { val: val as u32 } pb::generic::UInt32 { val: val as u32 }
} }
} }
impl From<()> for pb::Empty { impl From<()> for pb::generic::Empty {
fn from(_val: ()) -> Self { fn from(_val: ()) -> Self {
pb::Empty {} pb::generic::Empty {}
} }
} }
impl From<pb::CardId> for CardId { impl From<pb::cards::CardId> for CardId {
fn from(cid: pb::CardId) -> Self { fn from(cid: pb::cards::CardId) -> Self {
CardId(cid.cid) CardId(cid.cid)
} }
} }
impl From<pb::CardIds> for Vec<CardId> { impl From<pb::cards::CardIds> for Vec<CardId> {
fn from(c: pb::CardIds) -> Self { fn from(c: pb::cards::CardIds) -> Self {
c.cids.into_iter().map(CardId).collect() c.cids.into_iter().map(CardId).collect()
} }
} }
impl From<pb::NoteId> for NoteId { impl From<pb::notes::NoteId> for NoteId {
fn from(nid: pb::NoteId) -> Self { fn from(nid: pb::notes::NoteId) -> Self {
NoteId(nid.nid) NoteId(nid.nid)
} }
} }
impl From<NoteId> for pb::NoteId { impl From<NoteId> for pb::notes::NoteId {
fn from(nid: NoteId) -> Self { fn from(nid: NoteId) -> Self {
pb::NoteId { nid: nid.0 } pb::notes::NoteId { nid: nid.0 }
} }
} }
impl From<pb::NotetypeId> for NotetypeId { impl From<pb::notetypes::NotetypeId> for NotetypeId {
fn from(ntid: pb::NotetypeId) -> Self { fn from(ntid: pb::notetypes::NotetypeId) -> Self {
NotetypeId(ntid.ntid) NotetypeId(ntid.ntid)
} }
} }
impl From<NotetypeId> for pb::NotetypeId { impl From<NotetypeId> for pb::notetypes::NotetypeId {
fn from(ntid: NotetypeId) -> Self { fn from(ntid: NotetypeId) -> Self {
pb::NotetypeId { ntid: ntid.0 } pb::notetypes::NotetypeId { ntid: ntid.0 }
} }
} }
impl From<pb::DeckConfigId> for DeckConfigId { impl From<pb::deckconfig::DeckConfigId> for DeckConfigId {
fn from(dcid: pb::DeckConfigId) -> Self { fn from(dcid: pb::deckconfig::DeckConfigId) -> Self {
DeckConfigId(dcid.dcid) DeckConfigId(dcid.dcid)
} }
} }
impl From<Vec<String>> for pb::StringList { impl From<Vec<String>> for pb::generic::StringList {
fn from(vals: Vec<String>) -> Self { fn from(vals: Vec<String>) -> Self {
pb::StringList { vals } pb::generic::StringList { vals }
} }
} }

View file

@ -6,7 +6,7 @@ use std::collections::HashMap;
use fluent::{FluentArgs, FluentValue}; use fluent::{FluentArgs, FluentValue};
use super::Backend; use super::Backend;
pub(super) use crate::pb::i18n_service::Service as I18nService; pub(super) use crate::pb::i18n::i18n_service::Service as I18nService;
use crate::{ use crate::{
pb, pb,
prelude::*, prelude::*,
@ -14,7 +14,10 @@ use crate::{
}; };
impl I18nService for Backend { impl I18nService for Backend {
fn translate_string(&self, input: pb::TranslateStringRequest) -> Result<pb::String> { fn translate_string(
&self,
input: pb::i18n::TranslateStringRequest,
) -> Result<pb::generic::String> {
let args = build_fluent_args(input.args); let args = build_fluent_args(input.args);
Ok(self Ok(self
@ -27,8 +30,11 @@ impl I18nService for Backend {
.into()) .into())
} }
fn format_timespan(&self, input: pb::FormatTimespanRequest) -> Result<pb::String> { fn format_timespan(
use pb::format_timespan_request::Context; &self,
input: pb::i18n::FormatTimespanRequest,
) -> Result<pb::generic::String> {
use pb::i18n::format_timespan_request::Context;
Ok(match input.context() { Ok(match input.context() {
Context::Precise => time_span(input.seconds, &self.tr, true), Context::Precise => time_span(input.seconds, &self.tr, true),
Context::Intervals => time_span(input.seconds, &self.tr, false), Context::Intervals => time_span(input.seconds, &self.tr, false),
@ -37,14 +43,14 @@ impl I18nService for Backend {
.into()) .into())
} }
fn i18n_resources(&self, input: pb::I18nResourcesRequest) -> Result<pb::Json> { fn i18n_resources(&self, input: pb::i18n::I18nResourcesRequest) -> Result<pb::generic::Json> {
serde_json::to_vec(&self.tr.resources_for_js(&input.modules)) serde_json::to_vec(&self.tr.resources_for_js(&input.modules))
.map(Into::into) .map(Into::into)
.map_err(Into::into) .map_err(Into::into)
} }
} }
fn build_fluent_args(input: HashMap<String, pb::TranslateArgValue>) -> FluentArgs<'static> { fn build_fluent_args(input: HashMap<String, pb::i18n::TranslateArgValue>) -> FluentArgs<'static> {
let mut args = FluentArgs::new(); let mut args = FluentArgs::new();
for (key, val) in input { for (key, val) in input {
args.set(key, translate_arg_to_fluent_val(&val)); args.set(key, translate_arg_to_fluent_val(&val));
@ -52,8 +58,8 @@ fn build_fluent_args(input: HashMap<String, pb::TranslateArgValue>) -> FluentArg
args args
} }
fn translate_arg_to_fluent_val(arg: &pb::TranslateArgValue) -> FluentValue<'static> { fn translate_arg_to_fluent_val(arg: &pb::i18n::TranslateArgValue) -> FluentValue<'static> {
use pb::translate_arg_value::Value as V; use pb::i18n::translate_arg_value::Value as V;
match &arg.value { match &arg.value {
Some(val) => match val { Some(val) => match val {
V::Str(s) => FluentValue::String(s.to_owned().into()), V::Str(s) => FluentValue::String(s.to_owned().into()),

View file

@ -4,10 +4,13 @@
use std::path::Path; use std::path::Path;
use super::{progress::Progress, Backend}; use super::{progress::Progress, Backend};
pub(super) use crate::pb::importexport_service::Service as ImportExportService; pub(super) use crate::pb::import_export::importexport_service::Service as ImportExportService;
use crate::{ use crate::{
import_export::{package::import_colpkg, ExportProgress, ImportProgress, NoteLog}, import_export::{package::import_colpkg, ExportProgress, ImportProgress, NoteLog},
pb::{self as pb, export_limit, ExportLimit}, pb::{
import_export::{export_limit, ExportLimit},
{self as pb},
},
prelude::*, prelude::*,
search::SearchNode, search::SearchNode,
}; };
@ -15,8 +18,8 @@ use crate::{
impl ImportExportService for Backend { impl ImportExportService for Backend {
fn export_collection_package( fn export_collection_package(
&self, &self,
input: pb::ExportCollectionPackageRequest, input: pb::import_export::ExportCollectionPackageRequest,
) -> Result<pb::Empty> { ) -> Result<pb::generic::Empty> {
self.abort_media_sync_and_wait(); self.abort_media_sync_and_wait();
let mut guard = self.lock_open_collection()?; let mut guard = self.lock_open_collection()?;
@ -34,8 +37,8 @@ impl ImportExportService for Backend {
fn import_collection_package( fn import_collection_package(
&self, &self,
input: pb::ImportCollectionPackageRequest, input: pb::import_export::ImportCollectionPackageRequest,
) -> Result<pb::Empty> { ) -> Result<pb::generic::Empty> {
let _guard = self.lock_closed_collection()?; let _guard = self.lock_closed_collection()?;
import_colpkg( import_colpkg(
@ -51,13 +54,16 @@ impl ImportExportService for Backend {
fn import_anki_package( fn import_anki_package(
&self, &self,
input: pb::ImportAnkiPackageRequest, input: pb::import_export::ImportAnkiPackageRequest,
) -> Result<pb::ImportResponse> { ) -> Result<pb::import_export::ImportResponse> {
self.with_col(|col| col.import_apkg(&input.package_path, self.import_progress_fn())) self.with_col(|col| col.import_apkg(&input.package_path, self.import_progress_fn()))
.map(Into::into) .map(Into::into)
} }
fn export_anki_package(&self, input: pb::ExportAnkiPackageRequest) -> Result<pb::UInt32> { fn export_anki_package(
&self,
input: pb::import_export::ExportAnkiPackageRequest,
) -> Result<pb::generic::UInt32> {
self.with_col(|col| { self.with_col(|col| {
col.export_apkg( col.export_apkg(
&input.out_path, &input.out_path,
@ -72,7 +78,10 @@ impl ImportExportService for Backend {
.map(Into::into) .map(Into::into)
} }
fn get_csv_metadata(&self, input: pb::CsvMetadataRequest) -> Result<pb::CsvMetadata> { fn get_csv_metadata(
&self,
input: pb::import_export::CsvMetadataRequest,
) -> Result<pb::import_export::CsvMetadata> {
let delimiter = input.delimiter.is_some().then(|| input.delimiter()); let delimiter = input.delimiter.is_some().then(|| input.delimiter());
self.with_col(|col| { self.with_col(|col| {
col.get_csv_metadata( col.get_csv_metadata(
@ -84,7 +93,10 @@ impl ImportExportService for Backend {
}) })
} }
fn import_csv(&self, input: pb::ImportCsvRequest) -> Result<pb::ImportResponse> { fn import_csv(
&self,
input: pb::import_export::ImportCsvRequest,
) -> Result<pb::import_export::ImportResponse> {
self.with_col(|col| { self.with_col(|col| {
col.import_csv( col.import_csv(
&input.path, &input.path,
@ -95,12 +107,18 @@ impl ImportExportService for Backend {
.map(Into::into) .map(Into::into)
} }
fn export_note_csv(&self, input: pb::ExportNoteCsvRequest) -> Result<pb::UInt32> { fn export_note_csv(
&self,
input: pb::import_export::ExportNoteCsvRequest,
) -> Result<pb::generic::UInt32> {
self.with_col(|col| col.export_note_csv(input, self.export_progress_fn())) self.with_col(|col| col.export_note_csv(input, self.export_progress_fn()))
.map(Into::into) .map(Into::into)
} }
fn export_card_csv(&self, input: pb::ExportCardCsvRequest) -> Result<pb::UInt32> { fn export_card_csv(
&self,
input: pb::import_export::ExportCardCsvRequest,
) -> Result<pb::generic::UInt32> {
self.with_col(|col| { self.with_col(|col| {
col.export_card_csv( col.export_card_csv(
&input.out_path, &input.out_path,
@ -112,12 +130,18 @@ impl ImportExportService for Backend {
.map(Into::into) .map(Into::into)
} }
fn import_json_file(&self, input: pb::String) -> Result<pb::ImportResponse> { fn import_json_file(
&self,
input: pb::generic::String,
) -> Result<pb::import_export::ImportResponse> {
self.with_col(|col| col.import_json_file(&input.val, self.import_progress_fn())) self.with_col(|col| col.import_json_file(&input.val, self.import_progress_fn()))
.map(Into::into) .map(Into::into)
} }
fn import_json_string(&self, input: pb::String) -> Result<pb::ImportResponse> { fn import_json_string(
&self,
input: pb::generic::String,
) -> Result<pb::import_export::ImportResponse> {
self.with_col(|col| col.import_json_string(&input.val, self.import_progress_fn())) self.with_col(|col| col.import_json_string(&input.val, self.import_progress_fn()))
.map(Into::into) .map(Into::into)
} }
@ -135,7 +159,7 @@ impl Backend {
} }
} }
impl From<OpOutput<NoteLog>> for pb::ImportResponse { impl From<OpOutput<NoteLog>> for pb::import_export::ImportResponse {
fn from(output: OpOutput<NoteLog>) -> Self { fn from(output: OpOutput<NoteLog>) -> Self {
Self { Self {
changes: Some(output.changes.into()), changes: Some(output.changes.into()),
@ -149,7 +173,7 @@ impl From<ExportLimit> for SearchNode {
use export_limit::Limit; use export_limit::Limit;
let limit = export_limit let limit = export_limit
.limit .limit
.unwrap_or(Limit::WholeCollection(pb::Empty {})); .unwrap_or(Limit::WholeCollection(pb::generic::Empty {}));
match limit { match limit {
Limit::WholeCollection(_) => Self::WholeCollection, Limit::WholeCollection(_) => Self::WholeCollection,
Limit::DeckId(did) => Self::from_deck_id(did, true), Limit::DeckId(did) => Self::from_deck_id(did, true),

View file

@ -2,11 +2,11 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend; use super::Backend;
pub(super) use crate::pb::links_service::Service as LinksService; pub(super) use crate::pb::links::links_service::Service as LinksService;
use crate::{pb, pb::links::help_page_link_request::HelpPage, prelude::*}; use crate::{pb, pb::links::help_page_link_request::HelpPage, prelude::*};
impl LinksService for Backend { impl LinksService for Backend {
fn help_page_link(&self, input: pb::HelpPageLinkRequest) -> Result<pb::String> { fn help_page_link(&self, input: pb::links::HelpPageLinkRequest) -> Result<pb::generic::String> {
Ok(HelpPage::from_i32(input.page) Ok(HelpPage::from_i32(input.page)
.unwrap_or(HelpPage::Index) .unwrap_or(HelpPage::Index)
.to_link() .to_link()

View file

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{progress::Progress, Backend}; use super::{progress::Progress, Backend};
pub(super) use crate::pb::media_service::Service as MediaService; pub(super) use crate::pb::media::media_service::Service as MediaService;
use crate::{ use crate::{
media::{check::MediaChecker, MediaManager}, media::{check::MediaChecker, MediaManager},
pb, pb,
@ -13,7 +13,7 @@ impl MediaService for Backend {
// media // media
//----------------------------------------------- //-----------------------------------------------
fn check_media(&self, _input: pb::Empty) -> Result<pb::CheckMediaResponse> { fn check_media(&self, _input: pb::generic::Empty) -> Result<pb::media::CheckMediaResponse> {
let mut handler = self.new_progress_handler(); let mut handler = self.new_progress_handler();
let progress_fn = let progress_fn =
move |progress| handler.update(Progress::MediaCheck(progress as u32), true); move |progress| handler.update(Progress::MediaCheck(progress as u32), true);
@ -26,7 +26,7 @@ impl MediaService for Backend {
let mut report = checker.summarize_output(&mut output); let mut report = checker.summarize_output(&mut output);
ctx.report_media_field_referencing_templates(&mut report)?; ctx.report_media_field_referencing_templates(&mut report)?;
Ok(pb::CheckMediaResponse { Ok(pb::media::CheckMediaResponse {
unused: output.unused, unused: output.unused,
missing: output.missing, missing: output.missing,
report, report,
@ -36,7 +36,10 @@ impl MediaService for Backend {
}) })
} }
fn trash_media_files(&self, input: pb::TrashMediaFilesRequest) -> Result<pb::Empty> { fn trash_media_files(
&self,
input: pb::media::TrashMediaFilesRequest,
) -> Result<pb::generic::Empty> {
self.with_col(|col| { self.with_col(|col| {
let mgr = MediaManager::new(&col.media_folder, &col.media_db)?; let mgr = MediaManager::new(&col.media_folder, &col.media_db)?;
let mut ctx = mgr.dbctx(); let mut ctx = mgr.dbctx();
@ -45,7 +48,7 @@ impl MediaService for Backend {
.map(Into::into) .map(Into::into)
} }
fn add_media_file(&self, input: pb::AddMediaFileRequest) -> Result<pb::String> { fn add_media_file(&self, input: pb::media::AddMediaFileRequest) -> Result<pb::generic::String> {
self.with_col(|col| { self.with_col(|col| {
let mgr = MediaManager::new(&col.media_folder, &col.media_db)?; let mgr = MediaManager::new(&col.media_folder, &col.media_db)?;
let mut ctx = mgr.dbctx(); let mut ctx = mgr.dbctx();
@ -56,7 +59,7 @@ impl MediaService for Backend {
}) })
} }
fn empty_trash(&self, _input: pb::Empty) -> Result<pb::Empty> { fn empty_trash(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
let mut handler = self.new_progress_handler(); let mut handler = self.new_progress_handler();
let progress_fn = let progress_fn =
move |progress| handler.update(Progress::MediaCheck(progress as u32), true); move |progress| handler.update(Progress::MediaCheck(progress as u32), true);
@ -69,7 +72,7 @@ impl MediaService for Backend {
.map(Into::into) .map(Into::into)
} }
fn restore_trash(&self, _input: pb::Empty) -> Result<pb::Empty> { fn restore_trash(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
let mut handler = self.new_progress_handler(); let mut handler = self.new_progress_handler();
let progress_fn = let progress_fn =
move |progress| handler.update(Progress::MediaCheck(progress as u32), true); move |progress| handler.update(Progress::MediaCheck(progress as u32), true);

View file

@ -38,7 +38,9 @@ use once_cell::sync::OnceCell;
use progress::AbortHandleSlot; use progress::AbortHandleSlot;
use prost::Message; use prost::Message;
use slog::Logger; use slog::Logger;
use tokio::runtime::{self, Runtime}; use tokio::runtime::{
Runtime, {self},
};
use self::{ use self::{
card::CardsService, card::CardsService,
@ -60,7 +62,7 @@ use self::{
sync::{SyncService, SyncState}, sync::{SyncService, SyncState},
tags::TagsService, tags::TagsService,
}; };
use crate::{backend::dbproxy::db_command_bytes, log, pb, prelude::*}; use crate::{backend::dbproxy::db_command_bytes, log, pb, pb::backend::ServiceIndex, prelude::*};
pub struct Backend { pub struct Backend {
col: Arc<Mutex<Option<Collection>>>, col: Arc<Mutex<Option<Collection>>>,
@ -79,8 +81,8 @@ struct BackendState {
sync: SyncState, sync: SyncState,
} }
pub fn init_backend(init_msg: &[u8], log: Option<Logger>) -> std::result::Result<Backend, String> { pub fn init_backend(init_msg: &[u8], log: Option<Logger>) -> result::Result<Backend, String> {
let input: pb::BackendInit = match pb::BackendInit::decode(init_msg) { let input: pb::backend::BackendInit = match pb::backend::BackendInit::decode(init_msg) {
Ok(req) => req, Ok(req) => req,
Err(_) => return Err("couldn't decode init request".into()), Err(_) => return Err("couldn't decode init request".into()),
}; };
@ -119,30 +121,28 @@ impl Backend {
method: u32, method: u32,
input: &[u8], input: &[u8],
) -> result::Result<Vec<u8>, Vec<u8>> { ) -> result::Result<Vec<u8>, Vec<u8>> {
pb::ServiceIndex::from_i32(service as i32) ServiceIndex::from_i32(service as i32)
.or_invalid("invalid service") .or_invalid("invalid service")
.and_then(|service| match service { .and_then(|service| match service {
pb::ServiceIndex::Scheduler => SchedulerService::run_method(self, method, input), ServiceIndex::Scheduler => SchedulerService::run_method(self, method, input),
pb::ServiceIndex::Decks => DecksService::run_method(self, method, input), ServiceIndex::Decks => DecksService::run_method(self, method, input),
pb::ServiceIndex::Notes => NotesService::run_method(self, method, input), ServiceIndex::Notes => NotesService::run_method(self, method, input),
pb::ServiceIndex::Notetypes => NotetypesService::run_method(self, method, input), ServiceIndex::Notetypes => NotetypesService::run_method(self, method, input),
pb::ServiceIndex::Config => ConfigService::run_method(self, method, input), ServiceIndex::Config => ConfigService::run_method(self, method, input),
pb::ServiceIndex::Sync => SyncService::run_method(self, method, input), ServiceIndex::Sync => SyncService::run_method(self, method, input),
pb::ServiceIndex::Tags => TagsService::run_method(self, method, input), ServiceIndex::Tags => TagsService::run_method(self, method, input),
pb::ServiceIndex::DeckConfig => DeckConfigService::run_method(self, method, input), ServiceIndex::DeckConfig => DeckConfigService::run_method(self, method, input),
pb::ServiceIndex::CardRendering => { ServiceIndex::CardRendering => {
CardRenderingService::run_method(self, method, input) CardRenderingService::run_method(self, method, input)
} }
pb::ServiceIndex::Media => MediaService::run_method(self, method, input), ServiceIndex::Media => MediaService::run_method(self, method, input),
pb::ServiceIndex::Stats => StatsService::run_method(self, method, input), ServiceIndex::Stats => StatsService::run_method(self, method, input),
pb::ServiceIndex::Search => SearchService::run_method(self, method, input), ServiceIndex::Search => SearchService::run_method(self, method, input),
pb::ServiceIndex::I18n => I18nService::run_method(self, method, input), ServiceIndex::I18n => I18nService::run_method(self, method, input),
pb::ServiceIndex::Links => LinksService::run_method(self, method, input), ServiceIndex::Links => LinksService::run_method(self, method, input),
pb::ServiceIndex::Collection => CollectionService::run_method(self, method, input), ServiceIndex::Collection => CollectionService::run_method(self, method, input),
pb::ServiceIndex::Cards => CardsService::run_method(self, method, input), ServiceIndex::Cards => CardsService::run_method(self, method, input),
pb::ServiceIndex::ImportExport => { ServiceIndex::ImportExport => ImportExportService::run_method(self, method, input),
ImportExportService::run_method(self, method, input)
}
}) })
.map_err(|err| { .map_err(|err| {
let backend_err = err.into_protobuf(&self.tr); let backend_err = err.into_protobuf(&self.tr);
@ -152,7 +152,7 @@ impl Backend {
}) })
} }
pub fn run_db_command_bytes(&self, input: &[u8]) -> std::result::Result<Vec<u8>, Vec<u8>> { pub fn run_db_command_bytes(&self, input: &[u8]) -> result::Result<Vec<u8>, Vec<u8>> {
self.db_command(input).map_err(|err| { self.db_command(input).map_err(|err| {
let backend_err = err.into_protobuf(&self.tr); let backend_err = err.into_protobuf(&self.tr);
let mut bytes = Vec::new(); let mut bytes = Vec::new();

View file

@ -4,7 +4,7 @@
use std::collections::HashSet; use std::collections::HashSet;
use super::Backend; use super::Backend;
pub(super) use crate::pb::notes_service::Service as NotesService; pub(super) use crate::pb::notes::notes_service::Service as NotesService;
use crate::{ use crate::{
cloze::add_cloze_numbers_in_string, cloze::add_cloze_numbers_in_string,
pb::{self as pb}, pb::{self as pb},
@ -12,7 +12,7 @@ use crate::{
}; };
impl NotesService for Backend { impl NotesService for Backend {
fn new_note(&self, input: pb::NotetypeId) -> Result<pb::Note> { fn new_note(&self, input: pb::notetypes::NotetypeId) -> Result<pb::notes::Note> {
let ntid = input.into(); let ntid = input.into();
self.with_col(|col| { self.with_col(|col| {
let nt = col.get_notetype(ntid)?.or_not_found(ntid)?; let nt = col.get_notetype(ntid)?.or_not_found(ntid)?;
@ -20,11 +20,11 @@ impl NotesService for Backend {
}) })
} }
fn add_note(&self, input: pb::AddNoteRequest) -> Result<pb::AddNoteResponse> { fn add_note(&self, input: pb::notes::AddNoteRequest) -> Result<pb::notes::AddNoteResponse> {
self.with_col(|col| { self.with_col(|col| {
let mut note: Note = input.note.or_invalid("no note provided")?.into(); let mut note: Note = input.note.or_invalid("no note provided")?.into();
let changes = col.add_note(&mut note, DeckId(input.deck_id))?; let changes = col.add_note(&mut note, DeckId(input.deck_id))?;
Ok(pb::AddNoteResponse { Ok(pb::notes::AddNoteResponse {
note_id: note.id.0, note_id: note.id.0,
changes: Some(changes.into()), changes: Some(changes.into()),
}) })
@ -33,15 +33,18 @@ impl NotesService for Backend {
fn defaults_for_adding( fn defaults_for_adding(
&self, &self,
input: pb::DefaultsForAddingRequest, input: pb::notes::DefaultsForAddingRequest,
) -> Result<pb::DeckAndNotetype> { ) -> Result<pb::notes::DeckAndNotetype> {
self.with_col(|col| { self.with_col(|col| {
let home_deck: DeckId = input.home_deck_of_current_review_card.into(); let home_deck: DeckId = input.home_deck_of_current_review_card.into();
col.defaults_for_adding(home_deck).map(Into::into) col.defaults_for_adding(home_deck).map(Into::into)
}) })
} }
fn default_deck_for_notetype(&self, input: pb::NotetypeId) -> Result<pb::DeckId> { fn default_deck_for_notetype(
&self,
input: pb::notetypes::NotetypeId,
) -> Result<pb::decks::DeckId> {
self.with_col(|col| { self.with_col(|col| {
Ok(col Ok(col
.default_deck_for_notetype(input.into())? .default_deck_for_notetype(input.into())?
@ -50,7 +53,10 @@ impl NotesService for Backend {
}) })
} }
fn update_notes(&self, input: pb::UpdateNotesRequest) -> Result<pb::OpChanges> { fn update_notes(
&self,
input: pb::notes::UpdateNotesRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| { self.with_col(|col| {
let notes = input let notes = input
.notes .notes
@ -62,12 +68,15 @@ impl NotesService for Backend {
.map(Into::into) .map(Into::into)
} }
fn get_note(&self, input: pb::NoteId) -> Result<pb::Note> { fn get_note(&self, input: pb::notes::NoteId) -> Result<pb::notes::Note> {
let nid = input.into(); let nid = input.into();
self.with_col(|col| col.storage.get_note(nid)?.or_not_found(nid).map(Into::into)) self.with_col(|col| col.storage.get_note(nid)?.or_not_found(nid).map(Into::into))
} }
fn remove_notes(&self, input: pb::RemoveNotesRequest) -> Result<pb::OpChangesWithCount> { fn remove_notes(
&self,
input: pb::notes::RemoveNotesRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| { self.with_col(|col| {
if !input.note_ids.is_empty() { if !input.note_ids.is_empty() {
col.remove_notes( col.remove_notes(
@ -91,20 +100,23 @@ impl NotesService for Backend {
}) })
} }
fn cloze_numbers_in_note(&self, note: pb::Note) -> Result<pb::ClozeNumbersInNoteResponse> { fn cloze_numbers_in_note(
&self,
note: pb::notes::Note,
) -> Result<pb::notes::ClozeNumbersInNoteResponse> {
let mut set = HashSet::with_capacity(4); let mut set = HashSet::with_capacity(4);
for field in &note.fields { for field in &note.fields {
add_cloze_numbers_in_string(field, &mut set); add_cloze_numbers_in_string(field, &mut set);
} }
Ok(pb::ClozeNumbersInNoteResponse { Ok(pb::notes::ClozeNumbersInNoteResponse {
numbers: set.into_iter().map(|n| n as u32).collect(), numbers: set.into_iter().map(|n| n as u32).collect(),
}) })
} }
fn after_note_updates( fn after_note_updates(
&self, &self,
input: pb::AfterNoteUpdatesRequest, input: pb::notes::AfterNoteUpdatesRequest,
) -> Result<pb::OpChangesWithCount> { ) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| { self.with_col(|col| {
col.after_note_updates( col.after_note_updates(
&to_note_ids(input.nids), &to_note_ids(input.nids),
@ -117,35 +129,41 @@ impl NotesService for Backend {
fn field_names_for_notes( fn field_names_for_notes(
&self, &self,
input: pb::FieldNamesForNotesRequest, input: pb::notes::FieldNamesForNotesRequest,
) -> Result<pb::FieldNamesForNotesResponse> { ) -> Result<pb::notes::FieldNamesForNotesResponse> {
self.with_col(|col| { self.with_col(|col| {
let nids: Vec<_> = input.nids.into_iter().map(NoteId).collect(); let nids: Vec<_> = input.nids.into_iter().map(NoteId).collect();
col.storage col.storage
.field_names_for_notes(&nids) .field_names_for_notes(&nids)
.map(|fields| pb::FieldNamesForNotesResponse { fields }) .map(|fields| pb::notes::FieldNamesForNotesResponse { fields })
}) })
} }
fn note_fields_check(&self, input: pb::Note) -> Result<pb::NoteFieldsCheckResponse> { fn note_fields_check(
&self,
input: pb::notes::Note,
) -> Result<pb::notes::NoteFieldsCheckResponse> {
let note: Note = input.into(); let note: Note = input.into();
self.with_col(|col| { self.with_col(|col| {
col.note_fields_check(&note) col.note_fields_check(&note)
.map(|r| pb::NoteFieldsCheckResponse { state: r as i32 }) .map(|r| pb::notes::NoteFieldsCheckResponse { state: r as i32 })
}) })
} }
fn cards_of_note(&self, input: pb::NoteId) -> Result<pb::CardIds> { fn cards_of_note(&self, input: pb::notes::NoteId) -> Result<pb::cards::CardIds> {
self.with_col(|col| { self.with_col(|col| {
col.storage col.storage
.all_card_ids_of_note_in_template_order(NoteId(input.nid)) .all_card_ids_of_note_in_template_order(NoteId(input.nid))
.map(|v| pb::CardIds { .map(|v| pb::cards::CardIds {
cids: v.into_iter().map(Into::into).collect(), cids: v.into_iter().map(Into::into).collect(),
}) })
}) })
} }
fn get_single_notetype_of_notes(&self, input: pb::NoteIds) -> Result<pb::NotetypeId> { fn get_single_notetype_of_notes(
&self,
input: pb::notes::NoteIds,
) -> Result<pb::notetypes::NotetypeId> {
self.with_col(|col| { self.with_col(|col| {
col.get_single_notetype_of_notes(&input.note_ids.into_newtype(NoteId)) col.get_single_notetype_of_notes(&input.note_ids.into_newtype(NoteId))
.map(Into::into) .map(Into::into)

View file

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend; use super::Backend;
pub(super) use crate::pb::notetypes_service::Service as NotetypesService; pub(super) use crate::pb::notetypes::notetypes_service::Service as NotetypesService;
use crate::{ use crate::{
config::get_aux_notetype_config_key, config::get_aux_notetype_config_key,
notetype::{ notetype::{
@ -13,7 +13,10 @@ use crate::{
}; };
impl NotetypesService for Backend { impl NotetypesService for Backend {
fn add_notetype(&self, input: pb::Notetype) -> Result<pb::OpChangesWithId> { fn add_notetype(
&self,
input: pb::notetypes::Notetype,
) -> Result<pb::collection::OpChangesWithId> {
let mut notetype: Notetype = input.into(); let mut notetype: Notetype = input.into();
self.with_col(|col| { self.with_col(|col| {
Ok(col Ok(col
@ -23,13 +26,16 @@ impl NotetypesService for Backend {
}) })
} }
fn update_notetype(&self, input: pb::Notetype) -> Result<pb::OpChanges> { fn update_notetype(&self, input: pb::notetypes::Notetype) -> Result<pb::collection::OpChanges> {
let mut notetype: Notetype = input.into(); let mut notetype: Notetype = input.into();
self.with_col(|col| col.update_notetype(&mut notetype, false)) self.with_col(|col| col.update_notetype(&mut notetype, false))
.map(Into::into) .map(Into::into)
} }
fn add_notetype_legacy(&self, input: pb::Json) -> Result<pb::OpChangesWithId> { fn add_notetype_legacy(
&self,
input: pb::generic::Json,
) -> Result<pb::collection::OpChangesWithId> {
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?; let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
let mut notetype: Notetype = legacy.into(); let mut notetype: Notetype = legacy.into();
self.with_col(|col| { self.with_col(|col| {
@ -40,7 +46,10 @@ impl NotetypesService for Backend {
}) })
} }
fn update_notetype_legacy(&self, input: pb::Json) -> Result<pb::OpChanges> { fn update_notetype_legacy(
&self,
input: pb::generic::Json,
) -> Result<pb::collection::OpChanges> {
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?; let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
let mut notetype: Notetype = legacy.into(); let mut notetype: Notetype = legacy.into();
self.with_col(|col| col.update_notetype(&mut notetype, false)) self.with_col(|col| col.update_notetype(&mut notetype, false))
@ -49,8 +58,8 @@ impl NotetypesService for Backend {
fn add_or_update_notetype( fn add_or_update_notetype(
&self, &self,
input: pb::AddOrUpdateNotetypeRequest, input: pb::notetypes::AddOrUpdateNotetypeRequest,
) -> Result<pb::NotetypeId> { ) -> Result<pb::notetypes::NotetypeId> {
self.with_col(|col| { self.with_col(|col| {
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?; let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
let mut nt: Notetype = legacy.into(); let mut nt: Notetype = legacy.into();
@ -64,11 +73,14 @@ impl NotetypesService for Backend {
} else { } else {
col.add_or_update_notetype_with_existing_id(&mut nt, input.skip_checks)?; col.add_or_update_notetype_with_existing_id(&mut nt, input.skip_checks)?;
} }
Ok(pb::NotetypeId { ntid: nt.id.0 }) Ok(pb::notetypes::NotetypeId { ntid: nt.id.0 })
}) })
} }
fn get_stock_notetype_legacy(&self, input: pb::StockNotetype) -> Result<pb::Json> { fn get_stock_notetype_legacy(
&self,
input: pb::notetypes::StockNotetype,
) -> Result<pb::generic::Json> {
// fixme: use individual functions instead of full vec // fixme: use individual functions instead of full vec
let mut all = all_stock_notetypes(&self.tr); let mut all = all_stock_notetypes(&self.tr);
let idx = (input.kind as usize).min(all.len() - 1); let idx = (input.kind as usize).min(all.len() - 1);
@ -79,7 +91,7 @@ impl NotetypesService for Backend {
.map(Into::into) .map(Into::into)
} }
fn get_notetype(&self, input: pb::NotetypeId) -> Result<pb::Notetype> { fn get_notetype(&self, input: pb::notetypes::NotetypeId) -> Result<pb::notetypes::Notetype> {
let ntid = input.into(); let ntid = input.into();
self.with_col(|col| { self.with_col(|col| {
col.storage col.storage
@ -89,7 +101,7 @@ impl NotetypesService for Backend {
}) })
} }
fn get_notetype_legacy(&self, input: pb::NotetypeId) -> Result<pb::Json> { fn get_notetype_legacy(&self, input: pb::notetypes::NotetypeId) -> Result<pb::generic::Json> {
let ntid = input.into(); let ntid = input.into();
self.with_col(|col| { self.with_col(|col| {
let schema11: NotetypeSchema11 = let schema11: NotetypeSchema11 =
@ -98,56 +110,73 @@ impl NotetypesService for Backend {
}) })
} }
fn get_notetype_names(&self, _input: pb::Empty) -> Result<pb::NotetypeNames> { fn get_notetype_names(
&self,
_input: pb::generic::Empty,
) -> Result<pb::notetypes::NotetypeNames> {
self.with_col(|col| { self.with_col(|col| {
let entries: Vec<_> = col let entries: Vec<_> = col
.storage .storage
.get_all_notetype_names()? .get_all_notetype_names()?
.into_iter() .into_iter()
.map(|(id, name)| pb::NotetypeNameId { id: id.0, name }) .map(|(id, name)| pb::notetypes::NotetypeNameId { id: id.0, name })
.collect(); .collect();
Ok(pb::NotetypeNames { entries }) Ok(pb::notetypes::NotetypeNames { entries })
}) })
} }
fn get_notetype_names_and_counts(&self, _input: pb::Empty) -> Result<pb::NotetypeUseCounts> { fn get_notetype_names_and_counts(
&self,
_input: pb::generic::Empty,
) -> Result<pb::notetypes::NotetypeUseCounts> {
self.with_col(|col| { self.with_col(|col| {
let entries: Vec<_> = col let entries: Vec<_> = col
.storage .storage
.get_notetype_use_counts()? .get_notetype_use_counts()?
.into_iter() .into_iter()
.map(|(id, name, use_count)| pb::NotetypeNameIdUseCount { .map(
|(id, name, use_count)| pb::notetypes::NotetypeNameIdUseCount {
id: id.0, id: id.0,
name, name,
use_count, use_count,
}) },
)
.collect(); .collect();
Ok(pb::NotetypeUseCounts { entries }) Ok(pb::notetypes::NotetypeUseCounts { entries })
}) })
} }
fn get_notetype_id_by_name(&self, input: pb::String) -> Result<pb::NotetypeId> { fn get_notetype_id_by_name(
&self,
input: pb::generic::String,
) -> Result<pb::notetypes::NotetypeId> {
self.with_col(|col| { self.with_col(|col| {
col.storage col.storage
.get_notetype_id(&input.val) .get_notetype_id(&input.val)
.and_then(|nt| nt.or_not_found(input.val)) .and_then(|nt| nt.or_not_found(input.val))
.map(|ntid| pb::NotetypeId { ntid: ntid.0 }) .map(|ntid| pb::notetypes::NotetypeId { ntid: ntid.0 })
}) })
} }
fn remove_notetype(&self, input: pb::NotetypeId) -> Result<pb::OpChanges> { fn remove_notetype(
&self,
input: pb::notetypes::NotetypeId,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.remove_notetype(input.into())) self.with_col(|col| col.remove_notetype(input.into()))
.map(Into::into) .map(Into::into)
} }
fn get_aux_notetype_config_key(&self, input: pb::GetAuxConfigKeyRequest) -> Result<pb::String> { fn get_aux_notetype_config_key(
&self,
input: pb::notetypes::GetAuxConfigKeyRequest,
) -> Result<pb::generic::String> {
Ok(get_aux_notetype_config_key(input.id.into(), &input.key).into()) Ok(get_aux_notetype_config_key(input.id.into(), &input.key).into())
} }
fn get_aux_template_config_key( fn get_aux_template_config_key(
&self, &self,
input: pb::GetAuxTemplateConfigKeyRequest, input: pb::notetypes::GetAuxTemplateConfigKeyRequest,
) -> Result<pb::String> { ) -> Result<pb::generic::String> {
self.with_col(|col| { self.with_col(|col| {
col.get_aux_template_config_key( col.get_aux_template_config_key(
input.notetype_id.into(), input.notetype_id.into(),
@ -160,26 +189,29 @@ impl NotetypesService for Backend {
fn get_change_notetype_info( fn get_change_notetype_info(
&self, &self,
input: pb::GetChangeNotetypeInfoRequest, input: pb::notetypes::GetChangeNotetypeInfoRequest,
) -> Result<pb::ChangeNotetypeInfo> { ) -> Result<pb::notetypes::ChangeNotetypeInfo> {
self.with_col(|col| { self.with_col(|col| {
col.notetype_change_info(input.old_notetype_id.into(), input.new_notetype_id.into()) col.notetype_change_info(input.old_notetype_id.into(), input.new_notetype_id.into())
.map(Into::into) .map(Into::into)
}) })
} }
fn change_notetype(&self, input: pb::ChangeNotetypeRequest) -> Result<pb::OpChanges> { fn change_notetype(
&self,
input: pb::notetypes::ChangeNotetypeRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.change_notetype_of_notes(input.into()).map(Into::into)) self.with_col(|col| col.change_notetype_of_notes(input.into()).map(Into::into))
} }
fn get_field_names(&self, input: pb::NotetypeId) -> Result<pb::StringList> { fn get_field_names(&self, input: pb::notetypes::NotetypeId) -> Result<pb::generic::StringList> {
self.with_col(|col| col.storage.get_field_names(input.into())) self.with_col(|col| col.storage.get_field_names(input.into()))
.map(Into::into) .map(Into::into)
} }
} }
impl From<pb::Notetype> for Notetype { impl From<pb::notetypes::Notetype> for Notetype {
fn from(n: pb::Notetype) -> Self { fn from(n: pb::notetypes::Notetype) -> Self {
Notetype { Notetype {
id: n.id.into(), id: n.id.into(),
name: n.name, name: n.name,
@ -192,9 +224,9 @@ impl From<pb::Notetype> for Notetype {
} }
} }
impl From<NotetypeChangeInfo> for pb::ChangeNotetypeInfo { impl From<NotetypeChangeInfo> for pb::notetypes::ChangeNotetypeInfo {
fn from(i: NotetypeChangeInfo) -> Self { fn from(i: NotetypeChangeInfo) -> Self {
pb::ChangeNotetypeInfo { pb::notetypes::ChangeNotetypeInfo {
old_notetype_name: i.old_notetype_name, old_notetype_name: i.old_notetype_name,
old_field_names: i.old_field_names, old_field_names: i.old_field_names,
old_template_names: i.old_template_names, old_template_names: i.old_template_names,
@ -205,8 +237,8 @@ impl From<NotetypeChangeInfo> for pb::ChangeNotetypeInfo {
} }
} }
impl From<pb::ChangeNotetypeRequest> for ChangeNotetypeInput { impl From<pb::notetypes::ChangeNotetypeRequest> for ChangeNotetypeInput {
fn from(i: pb::ChangeNotetypeRequest) -> Self { fn from(i: pb::notetypes::ChangeNotetypeRequest) -> Self {
ChangeNotetypeInput { ChangeNotetypeInput {
current_schema: i.current_schema.into(), current_schema: i.current_schema.into(),
note_ids: i.note_ids.into_newtype(NoteId), note_ids: i.note_ids.into_newtype(NoteId),
@ -234,9 +266,9 @@ impl From<pb::ChangeNotetypeRequest> for ChangeNotetypeInput {
} }
} }
impl From<ChangeNotetypeInput> for pb::ChangeNotetypeRequest { impl From<ChangeNotetypeInput> for pb::notetypes::ChangeNotetypeRequest {
fn from(i: ChangeNotetypeInput) -> Self { fn from(i: ChangeNotetypeInput) -> Self {
pb::ChangeNotetypeRequest { pb::notetypes::ChangeNotetypeRequest {
current_schema: i.current_schema.into(), current_schema: i.current_schema.into(),
note_ids: i.note_ids.into_iter().map(Into::into).collect(), note_ids: i.note_ids.into_iter().map(Into::into).collect(),
old_notetype_name: i.old_notetype_name, old_notetype_name: i.old_notetype_name,

View file

@ -8,9 +8,9 @@ use crate::{
undo::{UndoOutput, UndoStatus}, undo::{UndoOutput, UndoStatus},
}; };
impl From<OpChanges> for pb::OpChanges { impl From<OpChanges> for pb::collection::OpChanges {
fn from(c: OpChanges) -> Self { fn from(c: OpChanges) -> Self {
pb::OpChanges { pb::collection::OpChanges {
card: c.changes.card, card: c.changes.card,
note: c.changes.note, note: c.changes.note,
deck: c.changes.deck, deck: c.changes.deck,
@ -28,8 +28,8 @@ impl From<OpChanges> for pb::OpChanges {
} }
impl UndoStatus { impl UndoStatus {
pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::UndoStatus { pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::collection::UndoStatus {
pb::UndoStatus { pb::collection::UndoStatus {
undo: self.undo.map(|op| op.describe(tr)).unwrap_or_default(), undo: self.undo.map(|op| op.describe(tr)).unwrap_or_default(),
redo: self.redo.map(|op| op.describe(tr)).unwrap_or_default(), redo: self.redo.map(|op| op.describe(tr)).unwrap_or_default(),
last_step: self.last_step as u32, last_step: self.last_step as u32,
@ -37,24 +37,24 @@ impl UndoStatus {
} }
} }
impl From<OpOutput<()>> for pb::OpChanges { impl From<OpOutput<()>> for pb::collection::OpChanges {
fn from(o: OpOutput<()>) -> Self { fn from(o: OpOutput<()>) -> Self {
o.changes.into() o.changes.into()
} }
} }
impl From<OpOutput<usize>> for pb::OpChangesWithCount { impl From<OpOutput<usize>> for pb::collection::OpChangesWithCount {
fn from(out: OpOutput<usize>) -> Self { fn from(out: OpOutput<usize>) -> Self {
pb::OpChangesWithCount { pb::collection::OpChangesWithCount {
count: out.output as u32, count: out.output as u32,
changes: Some(out.changes.into()), changes: Some(out.changes.into()),
} }
} }
} }
impl From<OpOutput<i64>> for pb::OpChangesWithId { impl From<OpOutput<i64>> for pb::collection::OpChangesWithId {
fn from(out: OpOutput<i64>) -> Self { fn from(out: OpOutput<i64>) -> Self {
pb::OpChangesWithId { pb::collection::OpChangesWithId {
id: out.output, id: out.output,
changes: Some(out.changes.into()), changes: Some(out.changes.into()),
} }
@ -62,8 +62,8 @@ impl From<OpOutput<i64>> for pb::OpChangesWithId {
} }
impl OpOutput<UndoOutput> { impl OpOutput<UndoOutput> {
pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::OpChangesAfterUndo { pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::collection::OpChangesAfterUndo {
pb::OpChangesAfterUndo { pb::collection::OpChangesAfterUndo {
changes: Some(self.changes.into()), changes: Some(self.changes.into()),
operation: self.output.undone_op.describe(tr), operation: self.output.undone_op.describe(tr),
reverted_to_timestamp: self.output.reverted_to.0, reverted_to_timestamp: self.output.reverted_to.0,

View file

@ -55,17 +55,21 @@ pub(super) enum Progress {
Export(ExportProgress), Export(ExportProgress),
} }
pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::Progress { pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::collection::Progress {
let progress = if let Some(progress) = progress { let progress = if let Some(progress) = progress {
match progress { match progress {
Progress::MediaSync(p) => pb::progress::Value::MediaSync(media_sync_progress(p, tr)), Progress::MediaSync(p) => {
Progress::MediaCheck(n) => { pb::collection::progress::Value::MediaSync(media_sync_progress(p, tr))
pb::progress::Value::MediaCheck(tr.media_check_checked(n).into())
} }
Progress::FullSync(p) => pb::progress::Value::FullSync(pb::progress::FullSync { Progress::MediaCheck(n) => {
pb::collection::progress::Value::MediaCheck(tr.media_check_checked(n).into())
}
Progress::FullSync(p) => {
pb::collection::progress::Value::FullSync(pb::collection::progress::FullSync {
transferred: p.transferred_bytes as u32, transferred: p.transferred_bytes as u32,
total: p.total_bytes as u32, total: p.total_bytes as u32,
}), })
}
Progress::NormalSync(p) => { Progress::NormalSync(p) => {
let stage = match p.stage { let stage = match p.stage {
SyncStage::Connecting => tr.sync_syncing(), SyncStage::Connecting => tr.sync_syncing(),
@ -79,7 +83,7 @@ pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::Pr
let removed = tr let removed = tr
.sync_media_removed_count(p.local_remove, p.remote_remove) .sync_media_removed_count(p.local_remove, p.remote_remove)
.into(); .into();
pb::progress::Value::NormalSync(pb::progress::NormalSync { pb::collection::progress::Value::NormalSync(pb::collection::progress::NormalSync {
stage, stage,
added, added,
removed, removed,
@ -100,13 +104,15 @@ pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::Pr
DatabaseCheckProgress::History => tr.database_check_checking_history(), DatabaseCheckProgress::History => tr.database_check_checking_history(),
} }
.to_string(); .to_string();
pb::progress::Value::DatabaseCheck(pb::progress::DatabaseCheck { pb::collection::progress::Value::DatabaseCheck(
pb::collection::progress::DatabaseCheck {
stage, stage,
stage_total, stage_total,
stage_current, stage_current,
}) },
)
} }
Progress::Import(progress) => pb::progress::Value::Importing( Progress::Import(progress) => pb::collection::progress::Value::Importing(
match progress { match progress {
ImportProgress::File => tr.importing_importing_file(), ImportProgress::File => tr.importing_importing_file(),
ImportProgress::Media(n) => tr.importing_processed_media_file(n), ImportProgress::Media(n) => tr.importing_processed_media_file(n),
@ -117,7 +123,7 @@ pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::Pr
} }
.into(), .into(),
), ),
Progress::Export(progress) => pb::progress::Value::Exporting( Progress::Export(progress) => pb::collection::progress::Value::Exporting(
match progress { match progress {
ExportProgress::File => tr.exporting_exporting_file(), ExportProgress::File => tr.exporting_exporting_file(),
ExportProgress::Media(n) => tr.exporting_processed_media_files(n), ExportProgress::Media(n) => tr.exporting_processed_media_files(n),
@ -129,15 +135,15 @@ pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::Pr
), ),
} }
} else { } else {
pb::progress::Value::None(pb::Empty {}) pb::collection::progress::Value::None(pb::generic::Empty {})
}; };
pb::Progress { pb::collection::Progress {
value: Some(progress), value: Some(progress),
} }
} }
fn media_sync_progress(p: MediaSyncProgress, tr: &I18n) -> pb::progress::MediaSync { fn media_sync_progress(p: MediaSyncProgress, tr: &I18n) -> pb::collection::progress::MediaSync {
pb::progress::MediaSync { pb::collection::progress::MediaSync {
checked: tr.sync_media_checked_count(p.checked).into(), checked: tr.sync_media_checked_count(p.checked).into(),
added: tr added: tr
.sync_media_added_count(p.uploaded_files, p.downloaded_files) .sync_media_added_count(p.uploaded_files, p.downloaded_files)

View file

@ -12,8 +12,8 @@ use crate::{
}, },
}; };
impl From<pb::CardAnswer> for CardAnswer { impl From<pb::scheduler::CardAnswer> for CardAnswer {
fn from(mut answer: pb::CardAnswer) -> Self { fn from(mut answer: pb::scheduler::CardAnswer) -> Self {
let mut new_state = mem::take(&mut answer.new_state).unwrap_or_default(); let mut new_state = mem::take(&mut answer.new_state).unwrap_or_default();
let custom_data = mem::take(&mut new_state.custom_data); let custom_data = mem::take(&mut new_state.custom_data);
CardAnswer { CardAnswer {
@ -28,34 +28,38 @@ impl From<pb::CardAnswer> for CardAnswer {
} }
} }
impl From<pb::card_answer::Rating> for Rating { impl From<pb::scheduler::card_answer::Rating> for Rating {
fn from(rating: pb::card_answer::Rating) -> Self { fn from(rating: pb::scheduler::card_answer::Rating) -> Self {
match rating { match rating {
pb::card_answer::Rating::Again => Rating::Again, pb::scheduler::card_answer::Rating::Again => Rating::Again,
pb::card_answer::Rating::Hard => Rating::Hard, pb::scheduler::card_answer::Rating::Hard => Rating::Hard,
pb::card_answer::Rating::Good => Rating::Good, pb::scheduler::card_answer::Rating::Good => Rating::Good,
pb::card_answer::Rating::Easy => Rating::Easy, pb::scheduler::card_answer::Rating::Easy => Rating::Easy,
} }
} }
} }
impl From<QueuedCard> for pb::queued_cards::QueuedCard { impl From<QueuedCard> for pb::scheduler::queued_cards::QueuedCard {
fn from(queued_card: QueuedCard) -> Self { fn from(queued_card: QueuedCard) -> Self {
Self { Self {
card: Some(queued_card.card.into()), card: Some(queued_card.card.into()),
states: Some(queued_card.states.into()), states: Some(queued_card.states.into()),
queue: match queued_card.kind { queue: match queued_card.kind {
crate::scheduler::queue::QueueEntryKind::New => pb::queued_cards::Queue::New, crate::scheduler::queue::QueueEntryKind::New => {
crate::scheduler::queue::QueueEntryKind::Review => pb::queued_cards::Queue::Review, pb::scheduler::queued_cards::Queue::New
}
crate::scheduler::queue::QueueEntryKind::Review => {
pb::scheduler::queued_cards::Queue::Review
}
crate::scheduler::queue::QueueEntryKind::Learning => { crate::scheduler::queue::QueueEntryKind::Learning => {
pb::queued_cards::Queue::Learning pb::scheduler::queued_cards::Queue::Learning
} }
} as i32, } as i32,
} }
} }
} }
impl From<QueuedCards> for pb::QueuedCards { impl From<QueuedCards> for pb::scheduler::QueuedCards {
fn from(queued_cards: QueuedCards) -> Self { fn from(queued_cards: QueuedCards) -> Self {
Self { Self {
cards: queued_cards.cards.into_iter().map(Into::into).collect(), cards: queued_cards.cards.into_iter().map(Into::into).collect(),

View file

@ -5,7 +5,7 @@ mod answering;
mod states; mod states;
use super::Backend; use super::Backend;
pub(super) use crate::pb::scheduler_service::Service as SchedulerService; pub(super) use crate::pb::scheduler::scheduler_service::Service as SchedulerService;
use crate::{ use crate::{
pb, pb,
prelude::*, prelude::*,
@ -19,7 +19,10 @@ use crate::{
impl SchedulerService for Backend { impl SchedulerService for Backend {
/// This behaves like _updateCutoff() in older code - it also unburies at the start of /// This behaves like _updateCutoff() in older code - it also unburies at the start of
/// a new day. /// a new day.
fn sched_timing_today(&self, _input: pb::Empty) -> Result<pb::SchedTimingTodayResponse> { fn sched_timing_today(
&self,
_input: pb::generic::Empty,
) -> Result<pb::scheduler::SchedTimingTodayResponse> {
self.with_col(|col| { self.with_col(|col| {
let timing = col.timing_today()?; let timing = col.timing_today()?;
col.unbury_if_day_rolled_over(timing)?; col.unbury_if_day_rolled_over(timing)?;
@ -28,16 +31,19 @@ impl SchedulerService for Backend {
} }
/// Fetch data from DB and return rendered string. /// Fetch data from DB and return rendered string.
fn studied_today(&self, _input: pb::Empty) -> Result<pb::String> { fn studied_today(&self, _input: pb::generic::Empty) -> Result<pb::generic::String> {
self.with_col(|col| col.studied_today().map(Into::into)) self.with_col(|col| col.studied_today().map(Into::into))
} }
/// Message rendering only, for old graphs. /// Message rendering only, for old graphs.
fn studied_today_message(&self, input: pb::StudiedTodayMessageRequest) -> Result<pb::String> { fn studied_today_message(
&self,
input: pb::scheduler::StudiedTodayMessageRequest,
) -> Result<pb::generic::String> {
Ok(studied_today(input.cards, input.seconds as f32, &self.tr).into()) Ok(studied_today(input.cards, input.seconds as f32, &self.tr).into())
} }
fn update_stats(&self, input: pb::UpdateStatsRequest) -> Result<pb::Empty> { fn update_stats(&self, input: pb::scheduler::UpdateStatsRequest) -> Result<pb::generic::Empty> {
self.with_col(|col| { self.with_col(|col| {
col.transact_no_undo(|col| { col.transact_no_undo(|col| {
let today = col.current_due_day(0)?; let today = col.current_due_day(0)?;
@ -47,7 +53,10 @@ impl SchedulerService for Backend {
}) })
} }
fn extend_limits(&self, input: pb::ExtendLimitsRequest) -> Result<pb::Empty> { fn extend_limits(
&self,
input: pb::scheduler::ExtendLimitsRequest,
) -> Result<pb::generic::Empty> {
self.with_col(|col| { self.with_col(|col| {
col.transact_no_undo(|col| { col.transact_no_undo(|col| {
let today = col.current_due_day(0)?; let today = col.current_due_day(0)?;
@ -64,20 +73,32 @@ impl SchedulerService for Backend {
}) })
} }
fn counts_for_deck_today(&self, input: pb::DeckId) -> Result<pb::CountsForDeckTodayResponse> { fn counts_for_deck_today(
&self,
input: pb::decks::DeckId,
) -> Result<pb::scheduler::CountsForDeckTodayResponse> {
self.with_col(|col| col.counts_for_deck_today(input.did.into())) self.with_col(|col| col.counts_for_deck_today(input.did.into()))
} }
fn congrats_info(&self, _input: pb::Empty) -> Result<pb::CongratsInfoResponse> { fn congrats_info(
&self,
_input: pb::generic::Empty,
) -> Result<pb::scheduler::CongratsInfoResponse> {
self.with_col(|col| col.congrats_info()) self.with_col(|col| col.congrats_info())
} }
fn restore_buried_and_suspended_cards(&self, input: pb::CardIds) -> Result<pb::OpChanges> { fn restore_buried_and_suspended_cards(
&self,
input: pb::cards::CardIds,
) -> Result<pb::collection::OpChanges> {
let cids: Vec<_> = input.into(); let cids: Vec<_> = input.into();
self.with_col(|col| col.unbury_or_unsuspend_cards(&cids).map(Into::into)) self.with_col(|col| col.unbury_or_unsuspend_cards(&cids).map(Into::into))
} }
fn unbury_deck(&self, input: pb::UnburyDeckRequest) -> Result<pb::OpChanges> { fn unbury_deck(
&self,
input: pb::scheduler::UnburyDeckRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| { self.with_col(|col| {
col.unbury_deck(input.deck_id.into(), input.mode()) col.unbury_deck(input.deck_id.into(), input.mode())
.map(Into::into) .map(Into::into)
@ -86,8 +107,8 @@ impl SchedulerService for Backend {
fn bury_or_suspend_cards( fn bury_or_suspend_cards(
&self, &self,
input: pb::BuryOrSuspendCardsRequest, input: pb::scheduler::BuryOrSuspendCardsRequest,
) -> Result<pb::OpChangesWithCount> { ) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| { self.with_col(|col| {
let mode = input.mode(); let mode = input.mode();
let cids = if input.card_ids.is_empty() { let cids = if input.card_ids.is_empty() {
@ -100,15 +121,21 @@ impl SchedulerService for Backend {
}) })
} }
fn empty_filtered_deck(&self, input: pb::DeckId) -> Result<pb::OpChanges> { fn empty_filtered_deck(&self, input: pb::decks::DeckId) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.empty_filtered_deck(input.did.into()).map(Into::into)) self.with_col(|col| col.empty_filtered_deck(input.did.into()).map(Into::into))
} }
fn rebuild_filtered_deck(&self, input: pb::DeckId) -> Result<pb::OpChangesWithCount> { fn rebuild_filtered_deck(
&self,
input: pb::decks::DeckId,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| col.rebuild_filtered_deck(input.did.into()).map(Into::into)) self.with_col(|col| col.rebuild_filtered_deck(input.did.into()).map(Into::into))
} }
fn schedule_cards_as_new(&self, input: pb::ScheduleCardsAsNewRequest) -> Result<pb::OpChanges> { fn schedule_cards_as_new(
&self,
input: pb::scheduler::ScheduleCardsAsNewRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| { self.with_col(|col| {
let cids = input.card_ids.into_newtype(CardId); let cids = input.card_ids.into_newtype(CardId);
col.reschedule_cards_as_new( col.reschedule_cards_as_new(
@ -118,7 +145,7 @@ impl SchedulerService for Backend {
input.reset_counts, input.reset_counts,
input input
.context .context
.and_then(pb::schedule_cards_as_new_request::Context::from_i32), .and_then(pb::scheduler::schedule_cards_as_new_request::Context::from_i32),
) )
.map(Into::into) .map(Into::into)
}) })
@ -126,19 +153,25 @@ impl SchedulerService for Backend {
fn schedule_cards_as_new_defaults( fn schedule_cards_as_new_defaults(
&self, &self,
input: pb::ScheduleCardsAsNewDefaultsRequest, input: pb::scheduler::ScheduleCardsAsNewDefaultsRequest,
) -> Result<pb::ScheduleCardsAsNewDefaultsResponse> { ) -> Result<pb::scheduler::ScheduleCardsAsNewDefaultsResponse> {
self.with_col(|col| Ok(col.reschedule_cards_as_new_defaults(input.context()))) self.with_col(|col| Ok(col.reschedule_cards_as_new_defaults(input.context())))
} }
fn set_due_date(&self, input: pb::SetDueDateRequest) -> Result<pb::OpChanges> { fn set_due_date(
&self,
input: pb::scheduler::SetDueDateRequest,
) -> Result<pb::collection::OpChanges> {
let config = input.config_key.map(|v| v.key().into()); let config = input.config_key.map(|v| v.key().into());
let days = input.days; let days = input.days;
let cids = input.card_ids.into_newtype(CardId); let cids = input.card_ids.into_newtype(CardId);
self.with_col(|col| col.set_due_date(&cids, &days, config).map(Into::into)) self.with_col(|col| col.set_due_date(&cids, &days, config).map(Into::into))
} }
fn sort_cards(&self, input: pb::SortCardsRequest) -> Result<pb::OpChangesWithCount> { fn sort_cards(
&self,
input: pb::scheduler::SortCardsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
let cids = input.card_ids.into_newtype(CardId); let cids = input.card_ids.into_newtype(CardId);
let (start, step, random, shift) = ( let (start, step, random, shift) = (
input.starting_from, input.starting_from,
@ -157,66 +190,86 @@ impl SchedulerService for Backend {
}) })
} }
fn reposition_defaults(&self, _input: pb::Empty) -> Result<pb::RepositionDefaultsResponse> { fn reposition_defaults(
&self,
_input: pb::generic::Empty,
) -> Result<pb::scheduler::RepositionDefaultsResponse> {
self.with_col(|col| Ok(col.reposition_defaults())) self.with_col(|col| Ok(col.reposition_defaults()))
} }
fn sort_deck(&self, input: pb::SortDeckRequest) -> Result<pb::OpChangesWithCount> { fn sort_deck(
&self,
input: pb::scheduler::SortDeckRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| { self.with_col(|col| {
col.sort_deck_legacy(input.deck_id.into(), input.randomize) col.sort_deck_legacy(input.deck_id.into(), input.randomize)
.map(Into::into) .map(Into::into)
}) })
} }
fn get_scheduling_states(&self, input: pb::CardId) -> Result<pb::SchedulingStates> { fn get_scheduling_states(
&self,
input: pb::cards::CardId,
) -> Result<pb::scheduler::SchedulingStates> {
let cid: CardId = input.into(); let cid: CardId = input.into();
self.with_col(|col| col.get_scheduling_states(cid)) self.with_col(|col| col.get_scheduling_states(cid))
.map(Into::into) .map(Into::into)
} }
fn describe_next_states(&self, input: pb::SchedulingStates) -> Result<pb::StringList> { fn describe_next_states(
&self,
input: pb::scheduler::SchedulingStates,
) -> Result<pb::generic::StringList> {
let states: SchedulingStates = input.into(); let states: SchedulingStates = input.into();
self.with_col(|col| col.describe_next_states(states)) self.with_col(|col| col.describe_next_states(states))
.map(Into::into) .map(Into::into)
} }
fn state_is_leech(&self, input: pb::SchedulingState) -> Result<pb::Bool> { fn state_is_leech(&self, input: pb::scheduler::SchedulingState) -> Result<pb::generic::Bool> {
let state: CardState = input.into(); let state: CardState = input.into();
Ok(state.leeched().into()) Ok(state.leeched().into())
} }
fn answer_card(&self, input: pb::CardAnswer) -> Result<pb::OpChanges> { fn answer_card(&self, input: pb::scheduler::CardAnswer) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.answer_card(&mut input.into())) self.with_col(|col| col.answer_card(&mut input.into()))
.map(Into::into) .map(Into::into)
} }
fn upgrade_scheduler(&self, _input: pb::Empty) -> Result<pb::Empty> { fn upgrade_scheduler(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
self.with_col(|col| col.transact_no_undo(|col| col.upgrade_to_v2_scheduler())) self.with_col(|col| col.transact_no_undo(|col| col.upgrade_to_v2_scheduler()))
.map(Into::into) .map(Into::into)
} }
fn get_queued_cards(&self, input: pb::GetQueuedCardsRequest) -> Result<pb::QueuedCards> { fn get_queued_cards(
&self,
input: pb::scheduler::GetQueuedCardsRequest,
) -> Result<pb::scheduler::QueuedCards> {
self.with_col(|col| { self.with_col(|col| {
col.get_queued_cards(input.fetch_limit as usize, input.intraday_learning_only) col.get_queued_cards(input.fetch_limit as usize, input.intraday_learning_only)
.map(Into::into) .map(Into::into)
}) })
} }
fn custom_study(&self, input: pb::CustomStudyRequest) -> Result<pb::OpChanges> { fn custom_study(
&self,
input: pb::scheduler::CustomStudyRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.custom_study(input)).map(Into::into) self.with_col(|col| col.custom_study(input)).map(Into::into)
} }
fn custom_study_defaults( fn custom_study_defaults(
&self, &self,
input: pb::CustomStudyDefaultsRequest, input: pb::scheduler::CustomStudyDefaultsRequest,
) -> Result<pb::CustomStudyDefaultsResponse> { ) -> Result<pb::scheduler::CustomStudyDefaultsResponse> {
self.with_col(|col| col.custom_study_defaults(input.deck_id.into())) self.with_col(|col| col.custom_study_defaults(input.deck_id.into()))
} }
} }
impl From<crate::scheduler::timing::SchedTimingToday> for pb::SchedTimingTodayResponse { impl From<crate::scheduler::timing::SchedTimingToday> for pb::scheduler::SchedTimingTodayResponse {
fn from(t: crate::scheduler::timing::SchedTimingToday) -> pb::SchedTimingTodayResponse { fn from(
pb::SchedTimingTodayResponse { t: crate::scheduler::timing::SchedTimingToday,
) -> pb::scheduler::SchedTimingTodayResponse {
pb::scheduler::SchedTimingTodayResponse {
days_elapsed: t.days_elapsed, days_elapsed: t.days_elapsed,
next_day_at: t.next_day_at.0, next_day_at: t.next_day_at.0,
} }

View file

@ -3,31 +3,30 @@
use crate::{pb, scheduler::states::FilteredState}; use crate::{pb, scheduler::states::FilteredState};
impl From<FilteredState> for pb::scheduling_state::Filtered { impl From<FilteredState> for pb::scheduler::scheduling_state::Filtered {
fn from(state: FilteredState) -> Self { fn from(state: FilteredState) -> Self {
pb::scheduling_state::Filtered { pb::scheduler::scheduling_state::Filtered {
value: Some(match state { value: Some(match state {
FilteredState::Preview(state) => { FilteredState::Preview(state) => {
pb::scheduling_state::filtered::Value::Preview(state.into()) pb::scheduler::scheduling_state::filtered::Value::Preview(state.into())
} }
FilteredState::Rescheduling(state) => { FilteredState::Rescheduling(state) => {
pb::scheduling_state::filtered::Value::Rescheduling(state.into()) pb::scheduler::scheduling_state::filtered::Value::Rescheduling(state.into())
} }
}), }),
} }
} }
} }
impl From<pb::scheduling_state::Filtered> for FilteredState { impl From<pb::scheduler::scheduling_state::Filtered> for FilteredState {
fn from(state: pb::scheduling_state::Filtered) -> Self { fn from(state: pb::scheduler::scheduling_state::Filtered) -> Self {
match state match state.value.unwrap_or_else(|| {
.value pb::scheduler::scheduling_state::filtered::Value::Preview(Default::default())
.unwrap_or_else(|| pb::scheduling_state::filtered::Value::Preview(Default::default())) }) {
{ pb::scheduler::scheduling_state::filtered::Value::Preview(state) => {
pb::scheduling_state::filtered::Value::Preview(state) => {
FilteredState::Preview(state.into()) FilteredState::Preview(state.into())
} }
pb::scheduling_state::filtered::Value::Rescheduling(state) => { pb::scheduler::scheduling_state::filtered::Value::Rescheduling(state) => {
FilteredState::Rescheduling(state.into()) FilteredState::Rescheduling(state.into())
} }
} }

View file

@ -3,8 +3,8 @@
use crate::{pb, scheduler::states::LearnState}; use crate::{pb, scheduler::states::LearnState};
impl From<pb::scheduling_state::Learning> for LearnState { impl From<pb::scheduler::scheduling_state::Learning> for LearnState {
fn from(state: pb::scheduling_state::Learning) -> Self { fn from(state: pb::scheduler::scheduling_state::Learning) -> Self {
LearnState { LearnState {
remaining_steps: state.remaining_steps, remaining_steps: state.remaining_steps,
scheduled_secs: state.scheduled_secs, scheduled_secs: state.scheduled_secs,
@ -12,9 +12,9 @@ impl From<pb::scheduling_state::Learning> for LearnState {
} }
} }
impl From<LearnState> for pb::scheduling_state::Learning { impl From<LearnState> for pb::scheduler::scheduling_state::Learning {
fn from(state: LearnState) -> Self { fn from(state: LearnState) -> Self {
pb::scheduling_state::Learning { pb::scheduler::scheduling_state::Learning {
remaining_steps: state.remaining_steps, remaining_steps: state.remaining_steps,
scheduled_secs: state.scheduled_secs, scheduled_secs: state.scheduled_secs,
} }

View file

@ -15,9 +15,9 @@ use crate::{
scheduler::states::{CardState, NewState, NormalState, SchedulingStates}, scheduler::states::{CardState, NewState, NormalState, SchedulingStates},
}; };
impl From<SchedulingStates> for pb::SchedulingStates { impl From<SchedulingStates> for pb::scheduler::SchedulingStates {
fn from(choices: SchedulingStates) -> Self { fn from(choices: SchedulingStates) -> Self {
pb::SchedulingStates { pb::scheduler::SchedulingStates {
current: Some(choices.current.into()), current: Some(choices.current.into()),
again: Some(choices.again.into()), again: Some(choices.again.into()),
hard: Some(choices.hard.into()), hard: Some(choices.hard.into()),
@ -27,8 +27,8 @@ impl From<SchedulingStates> for pb::SchedulingStates {
} }
} }
impl From<pb::SchedulingStates> for SchedulingStates { impl From<pb::scheduler::SchedulingStates> for SchedulingStates {
fn from(choices: pb::SchedulingStates) -> Self { fn from(choices: pb::scheduler::SchedulingStates) -> Self {
SchedulingStates { SchedulingStates {
current: choices.current.unwrap_or_default().into(), current: choices.current.unwrap_or_default().into(),
again: choices.again.unwrap_or_default().into(), again: choices.again.unwrap_or_default().into(),
@ -39,24 +39,30 @@ impl From<pb::SchedulingStates> for SchedulingStates {
} }
} }
impl From<CardState> for pb::SchedulingState { impl From<CardState> for pb::scheduler::SchedulingState {
fn from(state: CardState) -> Self { fn from(state: CardState) -> Self {
pb::SchedulingState { pb::scheduler::SchedulingState {
value: Some(match state { value: Some(match state {
CardState::Normal(state) => pb::scheduling_state::Value::Normal(state.into()), CardState::Normal(state) => {
CardState::Filtered(state) => pb::scheduling_state::Value::Filtered(state.into()), pb::scheduler::scheduling_state::Value::Normal(state.into())
}
CardState::Filtered(state) => {
pb::scheduler::scheduling_state::Value::Filtered(state.into())
}
}), }),
custom_data: None, custom_data: None,
} }
} }
} }
impl From<pb::SchedulingState> for CardState { impl From<pb::scheduler::SchedulingState> for CardState {
fn from(state: pb::SchedulingState) -> Self { fn from(state: pb::scheduler::SchedulingState) -> Self {
if let Some(value) = state.value { if let Some(value) = state.value {
match value { match value {
pb::scheduling_state::Value::Normal(normal) => CardState::Normal(normal.into()), pb::scheduler::scheduling_state::Value::Normal(normal) => {
pb::scheduling_state::Value::Filtered(filtered) => { CardState::Normal(normal.into())
}
pb::scheduler::scheduling_state::Value::Filtered(filtered) => {
CardState::Filtered(filtered.into()) CardState::Filtered(filtered.into())
} }
} }

View file

@ -3,17 +3,17 @@
use crate::{pb, scheduler::states::NewState}; use crate::{pb, scheduler::states::NewState};
impl From<pb::scheduling_state::New> for NewState { impl From<pb::scheduler::scheduling_state::New> for NewState {
fn from(state: pb::scheduling_state::New) -> Self { fn from(state: pb::scheduler::scheduling_state::New) -> Self {
NewState { NewState {
position: state.position, position: state.position,
} }
} }
} }
impl From<NewState> for pb::scheduling_state::New { impl From<NewState> for pb::scheduler::scheduling_state::New {
fn from(state: NewState) -> Self { fn from(state: NewState) -> Self {
pb::scheduling_state::New { pb::scheduler::scheduling_state::New {
position: state.position, position: state.position,
} }
} }

View file

@ -3,37 +3,42 @@
use crate::{pb, scheduler::states::NormalState}; use crate::{pb, scheduler::states::NormalState};
impl From<NormalState> for pb::scheduling_state::Normal { impl From<NormalState> for pb::scheduler::scheduling_state::Normal {
fn from(state: NormalState) -> Self { fn from(state: NormalState) -> Self {
pb::scheduling_state::Normal { pb::scheduler::scheduling_state::Normal {
value: Some(match state { value: Some(match state {
NormalState::New(state) => pb::scheduling_state::normal::Value::New(state.into()), NormalState::New(state) => {
pb::scheduler::scheduling_state::normal::Value::New(state.into())
}
NormalState::Learning(state) => { NormalState::Learning(state) => {
pb::scheduling_state::normal::Value::Learning(state.into()) pb::scheduler::scheduling_state::normal::Value::Learning(state.into())
} }
NormalState::Review(state) => { NormalState::Review(state) => {
pb::scheduling_state::normal::Value::Review(state.into()) pb::scheduler::scheduling_state::normal::Value::Review(state.into())
} }
NormalState::Relearning(state) => { NormalState::Relearning(state) => {
pb::scheduling_state::normal::Value::Relearning(state.into()) pb::scheduler::scheduling_state::normal::Value::Relearning(state.into())
} }
}), }),
} }
} }
} }
impl From<pb::scheduling_state::Normal> for NormalState { impl From<pb::scheduler::scheduling_state::Normal> for NormalState {
fn from(state: pb::scheduling_state::Normal) -> Self { fn from(state: pb::scheduler::scheduling_state::Normal) -> Self {
match state match state.value.unwrap_or_else(|| {
.value pb::scheduler::scheduling_state::normal::Value::New(Default::default())
.unwrap_or_else(|| pb::scheduling_state::normal::Value::New(Default::default())) }) {
{ pb::scheduler::scheduling_state::normal::Value::New(state) => {
pb::scheduling_state::normal::Value::New(state) => NormalState::New(state.into()), NormalState::New(state.into())
pb::scheduling_state::normal::Value::Learning(state) => { }
pb::scheduler::scheduling_state::normal::Value::Learning(state) => {
NormalState::Learning(state.into()) NormalState::Learning(state.into())
} }
pb::scheduling_state::normal::Value::Review(state) => NormalState::Review(state.into()), pb::scheduler::scheduling_state::normal::Value::Review(state) => {
pb::scheduling_state::normal::Value::Relearning(state) => { NormalState::Review(state.into())
}
pb::scheduler::scheduling_state::normal::Value::Relearning(state) => {
NormalState::Relearning(state.into()) NormalState::Relearning(state.into())
} }
} }

View file

@ -3,8 +3,8 @@
use crate::{pb, scheduler::states::PreviewState}; use crate::{pb, scheduler::states::PreviewState};
impl From<pb::scheduling_state::Preview> for PreviewState { impl From<pb::scheduler::scheduling_state::Preview> for PreviewState {
fn from(state: pb::scheduling_state::Preview) -> Self { fn from(state: pb::scheduler::scheduling_state::Preview) -> Self {
PreviewState { PreviewState {
scheduled_secs: state.scheduled_secs, scheduled_secs: state.scheduled_secs,
finished: state.finished, finished: state.finished,
@ -12,9 +12,9 @@ impl From<pb::scheduling_state::Preview> for PreviewState {
} }
} }
impl From<PreviewState> for pb::scheduling_state::Preview { impl From<PreviewState> for pb::scheduler::scheduling_state::Preview {
fn from(state: PreviewState) -> Self { fn from(state: PreviewState) -> Self {
pb::scheduling_state::Preview { pb::scheduler::scheduling_state::Preview {
scheduled_secs: state.scheduled_secs, scheduled_secs: state.scheduled_secs,
finished: state.finished, finished: state.finished,
} }

View file

@ -3,8 +3,8 @@
use crate::{pb, scheduler::states::RelearnState}; use crate::{pb, scheduler::states::RelearnState};
impl From<pb::scheduling_state::Relearning> for RelearnState { impl From<pb::scheduler::scheduling_state::Relearning> for RelearnState {
fn from(state: pb::scheduling_state::Relearning) -> Self { fn from(state: pb::scheduler::scheduling_state::Relearning) -> Self {
RelearnState { RelearnState {
review: state.review.unwrap_or_default().into(), review: state.review.unwrap_or_default().into(),
learning: state.learning.unwrap_or_default().into(), learning: state.learning.unwrap_or_default().into(),
@ -12,9 +12,9 @@ impl From<pb::scheduling_state::Relearning> for RelearnState {
} }
} }
impl From<RelearnState> for pb::scheduling_state::Relearning { impl From<RelearnState> for pb::scheduler::scheduling_state::Relearning {
fn from(state: RelearnState) -> Self { fn from(state: RelearnState) -> Self {
pb::scheduling_state::Relearning { pb::scheduler::scheduling_state::Relearning {
review: Some(state.review.into()), review: Some(state.review.into()),
learning: Some(state.learning.into()), learning: Some(state.learning.into()),
} }

View file

@ -3,17 +3,17 @@
use crate::{pb, scheduler::states::ReschedulingFilterState}; use crate::{pb, scheduler::states::ReschedulingFilterState};
impl From<pb::scheduling_state::ReschedulingFilter> for ReschedulingFilterState { impl From<pb::scheduler::scheduling_state::ReschedulingFilter> for ReschedulingFilterState {
fn from(state: pb::scheduling_state::ReschedulingFilter) -> Self { fn from(state: pb::scheduler::scheduling_state::ReschedulingFilter) -> Self {
ReschedulingFilterState { ReschedulingFilterState {
original_state: state.original_state.unwrap_or_default().into(), original_state: state.original_state.unwrap_or_default().into(),
} }
} }
} }
impl From<ReschedulingFilterState> for pb::scheduling_state::ReschedulingFilter { impl From<ReschedulingFilterState> for pb::scheduler::scheduling_state::ReschedulingFilter {
fn from(state: ReschedulingFilterState) -> Self { fn from(state: ReschedulingFilterState) -> Self {
pb::scheduling_state::ReschedulingFilter { pb::scheduler::scheduling_state::ReschedulingFilter {
original_state: Some(state.original_state.into()), original_state: Some(state.original_state.into()),
} }
} }

View file

@ -3,8 +3,8 @@
use crate::{pb, scheduler::states::ReviewState}; use crate::{pb, scheduler::states::ReviewState};
impl From<pb::scheduling_state::Review> for ReviewState { impl From<pb::scheduler::scheduling_state::Review> for ReviewState {
fn from(state: pb::scheduling_state::Review) -> Self { fn from(state: pb::scheduler::scheduling_state::Review) -> Self {
ReviewState { ReviewState {
scheduled_days: state.scheduled_days, scheduled_days: state.scheduled_days,
elapsed_days: state.elapsed_days, elapsed_days: state.elapsed_days,
@ -15,9 +15,9 @@ impl From<pb::scheduling_state::Review> for ReviewState {
} }
} }
impl From<ReviewState> for pb::scheduling_state::Review { impl From<ReviewState> for pb::scheduler::scheduling_state::Review {
fn from(state: ReviewState) -> Self { fn from(state: ReviewState) -> Self {
pb::scheduling_state::Review { pb::scheduler::scheduling_state::Review {
scheduled_days: state.scheduled_days, scheduled_days: state.scheduled_days,
elapsed_days: state.elapsed_days, elapsed_days: state.elapsed_days,
ease_factor: state.ease_factor, ease_factor: state.ease_factor,

View file

@ -6,8 +6,8 @@ use std::str::FromStr;
use crate::{browser_table, i18n::I18n, pb}; use crate::{browser_table, i18n::I18n, pb};
impl browser_table::Column { impl browser_table::Column {
pub fn to_pb_column(self, i18n: &I18n) -> pb::browser_columns::Column { pub fn to_pb_column(self, i18n: &I18n) -> pb::search::browser_columns::Column {
pb::browser_columns::Column { pb::search::browser_columns::Column {
key: self.to_string(), key: self.to_string(),
cards_mode_label: self.cards_mode_label(i18n), cards_mode_label: self.cards_mode_label(i18n),
notes_mode_label: self.notes_mode_label(i18n), notes_mode_label: self.notes_mode_label(i18n),
@ -20,8 +20,8 @@ impl browser_table::Column {
} }
} }
impl From<pb::StringList> for Vec<browser_table::Column> { impl From<pb::generic::StringList> for Vec<browser_table::Column> {
fn from(input: pb::StringList) -> Self { fn from(input: pb::generic::StringList) -> Self {
input input
.vals .vals
.iter() .iter()

View file

@ -7,56 +7,62 @@ mod search_node;
use std::{str::FromStr, sync::Arc}; use std::{str::FromStr, sync::Arc};
use super::{notes::to_note_ids, Backend}; use super::{notes::to_note_ids, Backend};
pub(super) use crate::pb::search_service::Service as SearchService; pub(super) use crate::pb::search::search_service::Service as SearchService;
use crate::{ use crate::{
browser_table::Column, browser_table::Column,
pb, pb,
pb::sort_order::Value as SortOrderProto, pb::search::sort_order::Value as SortOrderProto,
prelude::*, prelude::*,
search::{replace_search_node, JoinSearches, Node, SortMode}, search::{replace_search_node, JoinSearches, Node, SortMode},
}; };
impl SearchService for Backend { impl SearchService for Backend {
fn build_search_string(&self, input: pb::SearchNode) -> Result<pb::String> { fn build_search_string(&self, input: pb::search::SearchNode) -> Result<pb::generic::String> {
let node: Node = input.try_into()?; let node: Node = input.try_into()?;
Ok(SearchBuilder::from_root(node).write().into()) Ok(SearchBuilder::from_root(node).write().into())
} }
fn search_cards(&self, input: pb::SearchRequest) -> Result<pb::SearchResponse> { fn search_cards(&self, input: pb::search::SearchRequest) -> Result<pb::search::SearchResponse> {
self.with_col(|col| { self.with_col(|col| {
let order = input.order.unwrap_or_default().value.into(); let order = input.order.unwrap_or_default().value.into();
let cids = col.search_cards(&input.search, order)?; let cids = col.search_cards(&input.search, order)?;
Ok(pb::SearchResponse { Ok(pb::search::SearchResponse {
ids: cids.into_iter().map(|v| v.0).collect(), ids: cids.into_iter().map(|v| v.0).collect(),
}) })
}) })
} }
fn search_notes(&self, input: pb::SearchRequest) -> Result<pb::SearchResponse> { fn search_notes(&self, input: pb::search::SearchRequest) -> Result<pb::search::SearchResponse> {
self.with_col(|col| { self.with_col(|col| {
let order = input.order.unwrap_or_default().value.into(); let order = input.order.unwrap_or_default().value.into();
let nids = col.search_notes(&input.search, order)?; let nids = col.search_notes(&input.search, order)?;
Ok(pb::SearchResponse { Ok(pb::search::SearchResponse {
ids: nids.into_iter().map(|v| v.0).collect(), ids: nids.into_iter().map(|v| v.0).collect(),
}) })
}) })
} }
fn join_search_nodes(&self, input: pb::JoinSearchNodesRequest) -> Result<pb::String> { fn join_search_nodes(
&self,
input: pb::search::JoinSearchNodesRequest,
) -> Result<pb::generic::String> {
let existing_node: Node = input.existing_node.unwrap_or_default().try_into()?; let existing_node: Node = input.existing_node.unwrap_or_default().try_into()?;
let additional_node: Node = input.additional_node.unwrap_or_default().try_into()?; let additional_node: Node = input.additional_node.unwrap_or_default().try_into()?;
Ok( Ok(
match pb::search_node::group::Joiner::from_i32(input.joiner).unwrap_or_default() { match pb::search::search_node::group::Joiner::from_i32(input.joiner).unwrap_or_default() {
pb::search_node::group::Joiner::And => existing_node.and_flat(additional_node), pb::search::search_node::group::Joiner::And => existing_node.and_flat(additional_node),
pb::search_node::group::Joiner::Or => existing_node.or_flat(additional_node), pb::search::search_node::group::Joiner::Or => existing_node.or_flat(additional_node),
} }
.write() .write()
.into(), .into(),
) )
} }
fn replace_search_node(&self, input: pb::ReplaceSearchNodeRequest) -> Result<pb::String> { fn replace_search_node(
&self,
input: pb::search::ReplaceSearchNodeRequest,
) -> Result<pb::generic::String> {
let existing = { let existing = {
let node = input.existing_node.unwrap_or_default().try_into()?; let node = input.existing_node.unwrap_or_default().try_into()?;
if let Node::Group(nodes) = node { if let Node::Group(nodes) = node {
@ -69,7 +75,10 @@ impl SearchService for Backend {
Ok(replace_search_node(existing, replacement).into()) Ok(replace_search_node(existing, replacement).into())
} }
fn find_and_replace(&self, input: pb::FindAndReplaceRequest) -> Result<pb::OpChangesWithCount> { fn find_and_replace(
&self,
input: pb::search::FindAndReplaceRequest,
) -> Result<pb::collection::OpChangesWithCount> {
let mut search = if input.regex { let mut search = if input.regex {
input.search input.search
} else { } else {
@ -94,11 +103,17 @@ impl SearchService for Backend {
}) })
} }
fn all_browser_columns(&self, _input: pb::Empty) -> Result<pb::BrowserColumns> { fn all_browser_columns(
&self,
_input: pb::generic::Empty,
) -> Result<pb::search::BrowserColumns> {
self.with_col(|col| Ok(col.all_browser_columns())) self.with_col(|col| Ok(col.all_browser_columns()))
} }
fn set_active_browser_columns(&self, input: pb::StringList) -> Result<pb::Empty> { fn set_active_browser_columns(
&self,
input: pb::generic::StringList,
) -> Result<pb::generic::Empty> {
self.with_col(|col| { self.with_col(|col| {
col.state.active_browser_columns = Some(Arc::new(input.into())); col.state.active_browser_columns = Some(Arc::new(input.into()));
Ok(()) Ok(())
@ -106,15 +121,15 @@ impl SearchService for Backend {
.map(Into::into) .map(Into::into)
} }
fn browser_row_for_id(&self, input: pb::Int64) -> Result<pb::BrowserRow> { fn browser_row_for_id(&self, input: pb::generic::Int64) -> Result<pb::search::BrowserRow> {
self.with_col(|col| col.browser_row_for_id(input.val).map(Into::into)) self.with_col(|col| col.browser_row_for_id(input.val).map(Into::into))
} }
} }
impl From<Option<SortOrderProto>> for SortMode { impl From<Option<SortOrderProto>> for SortMode {
fn from(order: Option<SortOrderProto>) -> Self { fn from(order: Option<SortOrderProto>) -> Self {
use pb::sort_order::Value as V; use pb::search::sort_order::Value as V;
match order.unwrap_or(V::None(pb::Empty {})) { match order.unwrap_or(V::None(pb::generic::Empty {})) {
V::None(_) => SortMode::NoOrder, V::None(_) => SortMode::NoOrder,
V::Custom(s) => SortMode::Custom(s), V::Custom(s) => SortMode::Custom(s),
V::Builtin(b) => SortMode::Builtin { V::Builtin(b) => SortMode::Builtin {

View file

@ -12,11 +12,11 @@ use crate::{
text::{escape_anki_wildcards, escape_anki_wildcards_for_search_node}, text::{escape_anki_wildcards, escape_anki_wildcards_for_search_node},
}; };
impl TryFrom<pb::SearchNode> for Node { impl TryFrom<pb::search::SearchNode> for Node {
type Error = AnkiError; type Error = AnkiError;
fn try_from(msg: pb::SearchNode) -> std::result::Result<Self, Self::Error> { fn try_from(msg: pb::search::SearchNode) -> std::result::Result<Self, Self::Error> {
use pb::search_node::{group::Joiner, Filter, Flag}; use pb::search::search_node::{group::Joiner, Filter, Flag};
Ok(if let Some(filter) = msg.filter { Ok(if let Some(filter) = msg.filter {
match filter { match filter {
Filter::Tag(s) => SearchNode::from_tag_name(&s).into(), Filter::Tag(s) => SearchNode::from_tag_name(&s).into(),
@ -52,7 +52,7 @@ impl TryFrom<pb::SearchNode> for Node {
}), }),
Filter::EditedInDays(u) => Node::Search(SearchNode::EditedInDays(u)), Filter::EditedInDays(u) => Node::Search(SearchNode::EditedInDays(u)),
Filter::CardState(state) => Node::Search(SearchNode::State( Filter::CardState(state) => Node::Search(SearchNode::State(
pb::search_node::CardState::from_i32(state) pb::search::search_node::CardState::from_i32(state)
.unwrap_or_default() .unwrap_or_default()
.into(), .into(),
)), )),
@ -114,33 +114,33 @@ impl TryFrom<pb::SearchNode> for Node {
} }
} }
impl From<pb::search_node::Rating> for RatingKind { impl From<pb::search::search_node::Rating> for RatingKind {
fn from(r: pb::search_node::Rating) -> Self { fn from(r: pb::search::search_node::Rating) -> Self {
match r { match r {
pb::search_node::Rating::Again => RatingKind::AnswerButton(1), pb::search::search_node::Rating::Again => RatingKind::AnswerButton(1),
pb::search_node::Rating::Hard => RatingKind::AnswerButton(2), pb::search::search_node::Rating::Hard => RatingKind::AnswerButton(2),
pb::search_node::Rating::Good => RatingKind::AnswerButton(3), pb::search::search_node::Rating::Good => RatingKind::AnswerButton(3),
pb::search_node::Rating::Easy => RatingKind::AnswerButton(4), pb::search::search_node::Rating::Easy => RatingKind::AnswerButton(4),
pb::search_node::Rating::Any => RatingKind::AnyAnswerButton, pb::search::search_node::Rating::Any => RatingKind::AnyAnswerButton,
pb::search_node::Rating::ByReschedule => RatingKind::ManualReschedule, pb::search::search_node::Rating::ByReschedule => RatingKind::ManualReschedule,
} }
} }
} }
impl From<pb::search_node::CardState> for StateKind { impl From<pb::search::search_node::CardState> for StateKind {
fn from(k: pb::search_node::CardState) -> Self { fn from(k: pb::search::search_node::CardState) -> Self {
match k { match k {
pb::search_node::CardState::New => StateKind::New, pb::search::search_node::CardState::New => StateKind::New,
pb::search_node::CardState::Learn => StateKind::Learning, pb::search::search_node::CardState::Learn => StateKind::Learning,
pb::search_node::CardState::Review => StateKind::Review, pb::search::search_node::CardState::Review => StateKind::Review,
pb::search_node::CardState::Due => StateKind::Due, pb::search::search_node::CardState::Due => StateKind::Due,
pb::search_node::CardState::Suspended => StateKind::Suspended, pb::search::search_node::CardState::Suspended => StateKind::Suspended,
pb::search_node::CardState::Buried => StateKind::Buried, pb::search::search_node::CardState::Buried => StateKind::Buried,
} }
} }
} }
impl pb::search_node::IdList { impl pb::search::search_node::IdList {
fn into_id_string(self) -> String { fn into_id_string(self) -> String {
self.ids self.ids
.iter() .iter()

View file

@ -2,23 +2,29 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend; use super::Backend;
pub(super) use crate::pb::stats_service::Service as StatsService; pub(super) use crate::pb::stats::stats_service::Service as StatsService;
use crate::{pb, prelude::*, revlog::RevlogReviewKind}; use crate::{pb, prelude::*, revlog::RevlogReviewKind};
impl StatsService for Backend { impl StatsService for Backend {
fn card_stats(&self, input: pb::CardId) -> Result<pb::CardStatsResponse> { fn card_stats(&self, input: pb::cards::CardId) -> Result<pb::stats::CardStatsResponse> {
self.with_col(|col| col.card_stats(input.into())) self.with_col(|col| col.card_stats(input.into()))
} }
fn graphs(&self, input: pb::GraphsRequest) -> Result<pb::GraphsResponse> { fn graphs(&self, input: pb::stats::GraphsRequest) -> Result<pb::stats::GraphsResponse> {
self.with_col(|col| col.graph_data_for_search(&input.search, input.days)) self.with_col(|col| col.graph_data_for_search(&input.search, input.days))
} }
fn get_graph_preferences(&self, _input: pb::Empty) -> Result<pb::GraphPreferences> { fn get_graph_preferences(
&self,
_input: pb::generic::Empty,
) -> Result<pb::stats::GraphPreferences> {
self.with_col(|col| Ok(col.get_graph_preferences())) self.with_col(|col| Ok(col.get_graph_preferences()))
} }
fn set_graph_preferences(&self, input: pb::GraphPreferences) -> Result<pb::Empty> { fn set_graph_preferences(
&self,
input: pb::stats::GraphPreferences,
) -> Result<pb::generic::Empty> {
self.with_col(|col| col.set_graph_preferences(input)) self.with_col(|col| col.set_graph_preferences(input))
.map(Into::into) .map(Into::into)
} }
@ -27,11 +33,11 @@ impl StatsService for Backend {
impl From<RevlogReviewKind> for i32 { impl From<RevlogReviewKind> for i32 {
fn from(kind: RevlogReviewKind) -> Self { fn from(kind: RevlogReviewKind) -> Self {
(match kind { (match kind {
RevlogReviewKind::Learning => pb::revlog_entry::ReviewKind::Learning, RevlogReviewKind::Learning => pb::stats::revlog_entry::ReviewKind::Learning,
RevlogReviewKind::Review => pb::revlog_entry::ReviewKind::Review, RevlogReviewKind::Review => pb::stats::revlog_entry::ReviewKind::Review,
RevlogReviewKind::Relearning => pb::revlog_entry::ReviewKind::Relearning, RevlogReviewKind::Relearning => pb::stats::revlog_entry::ReviewKind::Relearning,
RevlogReviewKind::Filtered => pb::revlog_entry::ReviewKind::Filtered, RevlogReviewKind::Filtered => pb::stats::revlog_entry::ReviewKind::Filtered,
RevlogReviewKind::Manual => pb::revlog_entry::ReviewKind::Manual, RevlogReviewKind::Manual => pb::stats::revlog_entry::ReviewKind::Manual,
}) as i32 }) as i32
} }
} }

View file

@ -9,7 +9,7 @@ use futures::future::{AbortHandle, AbortRegistration, Abortable};
use slog::warn; use slog::warn;
use super::{progress::AbortHandleSlot, Backend}; use super::{progress::AbortHandleSlot, Backend};
pub(super) use crate::pb::sync_service::Service as SyncService; pub(super) use crate::pb::sync::sync_service::Service as SyncService;
use crate::{ use crate::{
media::MediaManager, media::MediaManager,
pb, pb,
@ -30,47 +30,47 @@ pub(super) struct SyncState {
#[derive(Default, Debug)] #[derive(Default, Debug)]
pub(super) struct RemoteSyncStatus { pub(super) struct RemoteSyncStatus {
pub last_check: TimestampSecs, pub last_check: TimestampSecs,
pub last_response: pb::sync_status_response::Required, pub last_response: pb::sync::sync_status_response::Required,
} }
impl RemoteSyncStatus { impl RemoteSyncStatus {
pub(super) fn update(&mut self, required: pb::sync_status_response::Required) { pub(super) fn update(&mut self, required: pb::sync::sync_status_response::Required) {
self.last_check = TimestampSecs::now(); self.last_check = TimestampSecs::now();
self.last_response = required self.last_response = required
} }
} }
impl From<SyncOutput> for pb::SyncCollectionResponse { impl From<SyncOutput> for pb::sync::SyncCollectionResponse {
fn from(o: SyncOutput) -> Self { fn from(o: SyncOutput) -> Self {
pb::SyncCollectionResponse { pb::sync::SyncCollectionResponse {
host_number: o.host_number, host_number: o.host_number,
server_message: o.server_message, server_message: o.server_message,
required: match o.required { required: match o.required {
SyncActionRequired::NoChanges => { SyncActionRequired::NoChanges => {
pb::sync_collection_response::ChangesRequired::NoChanges as i32 pb::sync::sync_collection_response::ChangesRequired::NoChanges as i32
} }
SyncActionRequired::FullSyncRequired { SyncActionRequired::FullSyncRequired {
upload_ok, upload_ok,
download_ok, download_ok,
} => { } => {
if !upload_ok { if !upload_ok {
pb::sync_collection_response::ChangesRequired::FullDownload as i32 pb::sync::sync_collection_response::ChangesRequired::FullDownload as i32
} else if !download_ok { } else if !download_ok {
pb::sync_collection_response::ChangesRequired::FullUpload as i32 pb::sync::sync_collection_response::ChangesRequired::FullUpload as i32
} else { } else {
pb::sync_collection_response::ChangesRequired::FullSync as i32 pb::sync::sync_collection_response::ChangesRequired::FullSync as i32
} }
} }
SyncActionRequired::NormalSyncRequired => { SyncActionRequired::NormalSyncRequired => {
pb::sync_collection_response::ChangesRequired::NormalSync as i32 pb::sync::sync_collection_response::ChangesRequired::NormalSync as i32
} }
}, },
} }
} }
} }
impl From<pb::SyncAuth> for SyncAuth { impl From<pb::sync::SyncAuth> for SyncAuth {
fn from(a: pb::SyncAuth) -> Self { fn from(a: pb::sync::SyncAuth) -> Self {
SyncAuth { SyncAuth {
hkey: a.hkey, hkey: a.hkey,
host_number: a.host_number, host_number: a.host_number,
@ -79,11 +79,11 @@ impl From<pb::SyncAuth> for SyncAuth {
} }
impl SyncService for Backend { impl SyncService for Backend {
fn sync_media(&self, input: pb::SyncAuth) -> Result<pb::Empty> { fn sync_media(&self, input: pb::sync::SyncAuth) -> Result<pb::generic::Empty> {
self.sync_media_inner(input).map(Into::into) self.sync_media_inner(input).map(Into::into)
} }
fn abort_sync(&self, _input: pb::Empty) -> Result<pb::Empty> { fn abort_sync(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
if let Some(handle) = self.sync_abort.lock().unwrap().take() { if let Some(handle) = self.sync_abort.lock().unwrap().take() {
handle.abort(); handle.abort();
} }
@ -91,7 +91,7 @@ impl SyncService for Backend {
} }
/// Abort the media sync. Does not wait for completion. /// Abort the media sync. Does not wait for completion.
fn abort_media_sync(&self, _input: pb::Empty) -> Result<pb::Empty> { fn abort_media_sync(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
let guard = self.state.lock().unwrap(); let guard = self.state.lock().unwrap();
if let Some(handle) = &guard.sync.media_sync_abort { if let Some(handle) = &guard.sync.media_sync_abort {
handle.abort(); handle.abort();
@ -99,33 +99,39 @@ impl SyncService for Backend {
Ok(().into()) Ok(().into())
} }
fn before_upload(&self, _input: pb::Empty) -> Result<pb::Empty> { fn before_upload(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
self.with_col(|col| col.before_upload().map(Into::into)) self.with_col(|col| col.before_upload().map(Into::into))
} }
fn sync_login(&self, input: pb::SyncLoginRequest) -> Result<pb::SyncAuth> { fn sync_login(&self, input: pb::sync::SyncLoginRequest) -> Result<pb::sync::SyncAuth> {
self.sync_login_inner(input) self.sync_login_inner(input)
} }
fn sync_status(&self, input: pb::SyncAuth) -> Result<pb::SyncStatusResponse> { fn sync_status(&self, input: pb::sync::SyncAuth) -> Result<pb::sync::SyncStatusResponse> {
self.sync_status_inner(input) self.sync_status_inner(input)
} }
fn sync_collection(&self, input: pb::SyncAuth) -> Result<pb::SyncCollectionResponse> { fn sync_collection(
&self,
input: pb::sync::SyncAuth,
) -> Result<pb::sync::SyncCollectionResponse> {
self.sync_collection_inner(input) self.sync_collection_inner(input)
} }
fn full_upload(&self, input: pb::SyncAuth) -> Result<pb::Empty> { fn full_upload(&self, input: pb::sync::SyncAuth) -> Result<pb::generic::Empty> {
self.full_sync_inner(input, true)?; self.full_sync_inner(input, true)?;
Ok(().into()) Ok(().into())
} }
fn full_download(&self, input: pb::SyncAuth) -> Result<pb::Empty> { fn full_download(&self, input: pb::sync::SyncAuth) -> Result<pb::generic::Empty> {
self.full_sync_inner(input, false)?; self.full_sync_inner(input, false)?;
Ok(().into()) Ok(().into())
} }
fn sync_server_method(&self, input: pb::SyncServerMethodRequest) -> Result<pb::Json> { fn sync_server_method(
&self,
input: pb::sync::SyncServerMethodRequest,
) -> Result<pb::generic::Json> {
let req = SyncRequest::from_method_and_data(input.method(), input.data)?; let req = SyncRequest::from_method_and_data(input.method(), input.data)?;
self.sync_server_method_inner(req).map(Into::into) self.sync_server_method_inner(req).map(Into::into)
} }
@ -160,7 +166,7 @@ impl Backend {
Ok((guard, abort_reg)) Ok((guard, abort_reg))
} }
pub(super) fn sync_media_inner(&self, input: pb::SyncAuth) -> Result<()> { pub(super) fn sync_media_inner(&self, input: pb::sync::SyncAuth) -> Result<()> {
// mark media sync as active // mark media sync as active
let (abort_handle, abort_reg) = AbortHandle::new_pair(); let (abort_handle, abort_reg) = AbortHandle::new_pair();
{ {
@ -220,7 +226,10 @@ impl Backend {
} }
} }
pub(super) fn sync_login_inner(&self, input: pb::SyncLoginRequest) -> Result<pb::SyncAuth> { pub(super) fn sync_login_inner(
&self,
input: pb::sync::SyncLoginRequest,
) -> Result<pb::sync::SyncAuth> {
let (_guard, abort_reg) = self.sync_abort_handle()?; let (_guard, abort_reg) = self.sync_abort_handle()?;
let rt = self.runtime_handle(); let rt = self.runtime_handle();
@ -230,16 +239,19 @@ impl Backend {
Ok(sync_result) => sync_result, Ok(sync_result) => sync_result,
Err(_) => Err(AnkiError::Interrupted), Err(_) => Err(AnkiError::Interrupted),
}; };
ret.map(|a| pb::SyncAuth { ret.map(|a| pb::sync::SyncAuth {
hkey: a.hkey, hkey: a.hkey,
host_number: a.host_number, host_number: a.host_number,
}) })
} }
pub(super) fn sync_status_inner(&self, input: pb::SyncAuth) -> Result<pb::SyncStatusResponse> { pub(super) fn sync_status_inner(
&self,
input: pb::sync::SyncAuth,
) -> Result<pb::sync::SyncStatusResponse> {
// any local changes mean we can skip the network round-trip // any local changes mean we can skip the network round-trip
let req = self.with_col(|col| col.get_local_sync_status())?; let req = self.with_col(|col| col.get_local_sync_status())?;
if req != pb::sync_status_response::Required::NoChanges { if req != pb::sync::sync_status_response::Required::NoChanges {
return Ok(req.into()); return Ok(req.into());
} }
@ -273,8 +285,8 @@ impl Backend {
pub(super) fn sync_collection_inner( pub(super) fn sync_collection_inner(
&self, &self,
input: pb::SyncAuth, input: pb::sync::SyncAuth,
) -> Result<pb::SyncCollectionResponse> { ) -> Result<pb::sync::SyncCollectionResponse> {
let (_guard, abort_reg) = self.sync_abort_handle()?; let (_guard, abort_reg) = self.sync_abort_handle()?;
let rt = self.runtime_handle(); let rt = self.runtime_handle();
@ -314,7 +326,7 @@ impl Backend {
Ok(output.into()) Ok(output.into())
} }
pub(super) fn full_sync_inner(&self, input: pb::SyncAuth, upload: bool) -> Result<()> { pub(super) fn full_sync_inner(&self, input: pb::sync::SyncAuth, upload: bool) -> Result<()> {
self.abort_media_sync_and_wait(); self.abort_media_sync_and_wait();
let rt = self.runtime_handle(); let rt = self.runtime_handle();
@ -356,7 +368,7 @@ impl Backend {
.unwrap() .unwrap()
.sync .sync
.remote_sync_status .remote_sync_status
.update(pb::sync_status_response::Required::NoChanges); .update(pb::sync::sync_status_response::Required::NoChanges);
} }
sync_result sync_result
} }

View file

@ -2,16 +2,19 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{notes::to_note_ids, Backend}; use super::{notes::to_note_ids, Backend};
pub(super) use crate::pb::tags_service::Service as TagsService; pub(super) use crate::pb::tags::tags_service::Service as TagsService;
use crate::{pb, prelude::*}; use crate::{pb, prelude::*};
impl TagsService for Backend { impl TagsService for Backend {
fn clear_unused_tags(&self, _input: pb::Empty) -> Result<pb::OpChangesWithCount> { fn clear_unused_tags(
&self,
_input: pb::generic::Empty,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| col.clear_unused_tags().map(Into::into)) self.with_col(|col| col.clear_unused_tags().map(Into::into))
} }
fn all_tags(&self, _input: pb::Empty) -> Result<pb::StringList> { fn all_tags(&self, _input: pb::generic::Empty) -> Result<pb::generic::StringList> {
Ok(pb::StringList { Ok(pb::generic::StringList {
vals: self.with_col(|col| { vals: self.with_col(|col| {
Ok(col Ok(col
.storage .storage
@ -23,22 +26,28 @@ impl TagsService for Backend {
}) })
} }
fn remove_tags(&self, tags: pb::String) -> Result<pb::OpChangesWithCount> { fn remove_tags(&self, tags: pb::generic::String) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| col.remove_tags(tags.val.as_str()).map(Into::into)) self.with_col(|col| col.remove_tags(tags.val.as_str()).map(Into::into))
} }
fn set_tag_collapsed(&self, input: pb::SetTagCollapsedRequest) -> Result<pb::OpChanges> { fn set_tag_collapsed(
&self,
input: pb::tags::SetTagCollapsedRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| { self.with_col(|col| {
col.set_tag_collapsed(&input.name, input.collapsed) col.set_tag_collapsed(&input.name, input.collapsed)
.map(Into::into) .map(Into::into)
}) })
} }
fn tag_tree(&self, _input: pb::Empty) -> Result<pb::TagTreeNode> { fn tag_tree(&self, _input: pb::generic::Empty) -> Result<pb::tags::TagTreeNode> {
self.with_col(|col| col.tag_tree()) self.with_col(|col| col.tag_tree())
} }
fn reparent_tags(&self, input: pb::ReparentTagsRequest) -> Result<pb::OpChangesWithCount> { fn reparent_tags(
&self,
input: pb::tags::ReparentTagsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
let source_tags = input.tags; let source_tags = input.tags;
let target_tag = if input.new_parent.is_empty() { let target_tag = if input.new_parent.is_empty() {
None None
@ -49,19 +58,28 @@ impl TagsService for Backend {
.map(Into::into) .map(Into::into)
} }
fn rename_tags(&self, input: pb::RenameTagsRequest) -> Result<pb::OpChangesWithCount> { fn rename_tags(
&self,
input: pb::tags::RenameTagsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| col.rename_tag(&input.current_prefix, &input.new_prefix)) self.with_col(|col| col.rename_tag(&input.current_prefix, &input.new_prefix))
.map(Into::into) .map(Into::into)
} }
fn add_note_tags(&self, input: pb::NoteIdsAndTagsRequest) -> Result<pb::OpChangesWithCount> { fn add_note_tags(
&self,
input: pb::tags::NoteIdsAndTagsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| { self.with_col(|col| {
col.add_tags_to_notes(&to_note_ids(input.note_ids), &input.tags) col.add_tags_to_notes(&to_note_ids(input.note_ids), &input.tags)
.map(Into::into) .map(Into::into)
}) })
} }
fn remove_note_tags(&self, input: pb::NoteIdsAndTagsRequest) -> Result<pb::OpChangesWithCount> { fn remove_note_tags(
&self,
input: pb::tags::NoteIdsAndTagsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| { self.with_col(|col| {
col.remove_tags_from_notes(&to_note_ids(input.note_ids), &input.tags) col.remove_tags_from_notes(&to_note_ids(input.note_ids), &input.tags)
.map(Into::into) .map(Into::into)
@ -70,8 +88,8 @@ impl TagsService for Backend {
fn find_and_replace_tag( fn find_and_replace_tag(
&self, &self,
input: pb::FindAndReplaceTagRequest, input: pb::tags::FindAndReplaceTagRequest,
) -> Result<pb::OpChangesWithCount> { ) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| { self.with_col(|col| {
let note_ids = if input.note_ids.is_empty() { let note_ids = if input.note_ids.is_empty() {
col.search_notes_unordered("")? col.search_notes_unordered("")?
@ -89,10 +107,13 @@ impl TagsService for Backend {
}) })
} }
fn complete_tag(&self, input: pb::CompleteTagRequest) -> Result<pb::CompleteTagResponse> { fn complete_tag(
&self,
input: pb::tags::CompleteTagRequest,
) -> Result<pb::tags::CompleteTagResponse> {
self.with_col(|col| { self.with_col(|col| {
let tags = col.complete_tag(&input.input, input.match_limit as usize)?; let tags = col.complete_tag(&input.input, input.match_limit as usize)?;
Ok(pb::CompleteTagResponse { tags }) Ok(pb::tags::CompleteTagResponse { tags })
}) })
} }
} }

View file

@ -182,8 +182,8 @@ impl Column {
.into() .into()
} }
pub fn default_order(self) -> pb::browser_columns::Sorting { pub fn default_order(self) -> pb::search::browser_columns::Sorting {
use pb::browser_columns::Sorting; use pb::search::browser_columns::Sorting;
match self { match self {
Column::Question | Column::Answer | Column::Custom => Sorting::None, Column::Question | Column::Answer | Column::Custom => Sorting::None,
Column::SortField | Column::Tags | Column::Notetype | Column::Deck => { Column::SortField | Column::Tags | Column::Notetype | Column::Deck => {
@ -205,8 +205,8 @@ impl Column {
matches!(self, Self::Question | Self::Answer | Self::SortField) matches!(self, Self::Question | Self::Answer | Self::SortField)
} }
pub fn alignment(self) -> pb::browser_columns::Alignment { pub fn alignment(self) -> pb::search::browser_columns::Alignment {
use pb::browser_columns::Alignment; use pb::search::browser_columns::Alignment;
match self { match self {
Self::Question Self::Question
| Self::Answer | Self::Answer
@ -221,16 +221,16 @@ impl Column {
} }
impl Collection { impl Collection {
pub fn all_browser_columns(&self) -> pb::BrowserColumns { pub fn all_browser_columns(&self) -> pb::search::BrowserColumns {
let mut columns: Vec<pb::browser_columns::Column> = Column::iter() let mut columns: Vec<pb::search::browser_columns::Column> = Column::iter()
.filter(|&c| c != Column::Custom) .filter(|&c| c != Column::Custom)
.map(|c| c.to_pb_column(&self.tr)) .map(|c| c.to_pb_column(&self.tr))
.collect(); .collect();
columns.sort_by(|c1, c2| c1.cards_mode_label.cmp(&c2.cards_mode_label)); columns.sort_by(|c1, c2| c1.cards_mode_label.cmp(&c2.cards_mode_label));
pb::BrowserColumns { columns } pb::search::BrowserColumns { columns }
} }
pub fn browser_row_for_id(&mut self, id: i64) -> Result<pb::BrowserRow> { pub fn browser_row_for_id(&mut self, id: i64) -> Result<pb::search::BrowserRow> {
let notes_mode = self.get_config_bool(BoolKey::BrowserTableShowNotesMode); let notes_mode = self.get_config_bool(BoolKey::BrowserTableShowNotesMode);
let columns = Arc::clone( let columns = Arc::clone(
self.state self.state
@ -361,8 +361,8 @@ impl RowContext {
}) })
} }
fn browser_row(&self, columns: &[Column]) -> Result<pb::BrowserRow> { fn browser_row(&self, columns: &[Column]) -> Result<pb::search::BrowserRow> {
Ok(pb::BrowserRow { Ok(pb::search::BrowserRow {
cells: columns cells: columns
.iter() .iter()
.map(|&column| self.get_cell(column)) .map(|&column| self.get_cell(column))
@ -373,8 +373,8 @@ impl RowContext {
}) })
} }
fn get_cell(&self, column: Column) -> Result<pb::browser_row::Cell> { fn get_cell(&self, column: Column) -> Result<pb::search::browser_row::Cell> {
Ok(pb::browser_row::Cell { Ok(pb::search::browser_row::Cell {
text: self.get_cell_text(column)?, text: self.get_cell_text(column)?,
is_rtl: self.get_is_rtl(column), is_rtl: self.get_is_rtl(column),
}) })
@ -546,8 +546,8 @@ impl RowContext {
Ok(self.template()?.config.browser_font_size) Ok(self.template()?.config.browser_font_size)
} }
fn get_row_color(&self) -> pb::browser_row::Color { fn get_row_color(&self) -> pb::search::browser_row::Color {
use pb::browser_row::Color; use pb::search::browser_row::Color;
if self.notes_mode { if self.notes_mode {
if self.note.is_marked() { if self.note.is_marked() {
Color::Marked Color::Marked

View file

@ -18,7 +18,7 @@ pub fn extract_av_tags<S: Into<String> + AsRef<str>>(
txt: S, txt: S,
question_side: bool, question_side: bool,
tr: &I18n, tr: &I18n,
) -> (String, Vec<pb::AvTag>) { ) -> (String, Vec<pb::card_rendering::AvTag>) {
nodes_or_text_only(txt.as_ref()) nodes_or_text_only(txt.as_ref())
.map(|nodes| nodes.write_and_extract_av_tags(question_side, tr)) .map(|nodes| nodes.write_and_extract_av_tags(question_side, tr))
.unwrap_or_else(|| (txt.into(), vec![])) .unwrap_or_else(|| (txt.into(), vec![]))
@ -122,17 +122,21 @@ mod test {
( (
"foo [anki:play:q:0] baz [anki:play:q:1]", "foo [anki:play:q:0] baz [anki:play:q:1]",
vec![ vec![
pb::AvTag { pb::card_rendering::AvTag {
value: Some(pb::av_tag::Value::SoundOrVideo("bar.mp3".to_string())) value: Some(pb::card_rendering::av_tag::Value::SoundOrVideo(
"bar.mp3".to_string()
))
}, },
pb::AvTag { pb::card_rendering::AvTag {
value: Some(pb::av_tag::Value::Tts(pb::TtsTag { value: Some(pb::card_rendering::av_tag::Value::Tts(
pb::card_rendering::TtsTag {
field_text: tr.card_templates_blank().to_string(), field_text: tr.card_templates_blank().to_string(),
lang: "en_US".to_string(), lang: "en_US".to_string(),
voices: vec![], voices: vec![],
speed: 1.0, speed: 1.0,
other_args: vec![], other_args: vec![],
})) }
))
} }
], ],
), ),

View file

@ -19,7 +19,7 @@ impl<'a> CardNodes<'a> {
&self, &self,
question_side: bool, question_side: bool,
tr: &I18n, tr: &I18n,
) -> (String, Vec<pb::AvTag>) { ) -> (String, Vec<pb::card_rendering::AvTag>) {
let mut extractor = AvExtractor::new(question_side, tr); let mut extractor = AvExtractor::new(question_side, tr);
(extractor.write(self), extractor.tags) (extractor.write(self), extractor.tags)
} }
@ -119,7 +119,7 @@ impl Write for AvStripper {
struct AvExtractor<'a> { struct AvExtractor<'a> {
side: char, side: char,
tags: Vec<pb::AvTag>, tags: Vec<pb::card_rendering::AvTag>,
tr: &'a I18n, tr: &'a I18n,
} }
@ -147,8 +147,8 @@ impl<'a> AvExtractor<'a> {
impl Write for AvExtractor<'_> { impl Write for AvExtractor<'_> {
fn write_sound(&mut self, buf: &mut String, resource: &str) { fn write_sound(&mut self, buf: &mut String, resource: &str) {
self.write_play_tag(buf); self.write_play_tag(buf);
self.tags.push(pb::AvTag { self.tags.push(pb::card_rendering::AvTag {
value: Some(pb::av_tag::Value::SoundOrVideo( value: Some(pb::card_rendering::av_tag::Value::SoundOrVideo(
decode_entities(resource).into(), decode_entities(resource).into(),
)), )),
}); });
@ -161,8 +161,9 @@ impl Write for AvExtractor<'_> {
} }
self.write_play_tag(buf); self.write_play_tag(buf);
self.tags.push(pb::AvTag { self.tags.push(pb::card_rendering::AvTag {
value: Some(pb::av_tag::Value::Tts(pb::TtsTag { value: Some(pb::card_rendering::av_tag::Value::Tts(
pb::card_rendering::TtsTag {
field_text: self.transform_tts_content(directive), field_text: self.transform_tts_content(directive),
lang: directive.lang.into(), lang: directive.lang.into(),
voices: directive.voices.iter().map(ToString::to_string).collect(), voices: directive.voices.iter().map(ToString::to_string).collect(),
@ -172,7 +173,8 @@ impl Write for AvExtractor<'_> {
.iter() .iter()
.map(|(key, val)| format!("{}={}", key, val)) .map(|(key, val)| format!("{}={}", key, val))
.collect(), .collect(),
})), },
)),
}); });
} }
} }

View file

@ -5,7 +5,9 @@ use std::{
ffi::OsStr, ffi::OsStr,
fs::{read_dir, remove_file, DirEntry}, fs::{read_dir, remove_file, DirEntry},
path::{Path, PathBuf}, path::{Path, PathBuf},
thread::{self, JoinHandle}, thread::{
JoinHandle, {self},
},
time::SystemTime, time::SystemTime,
}; };
@ -15,7 +17,7 @@ use log::error;
use crate::{ use crate::{
import_export::package::export_colpkg_from_data, io::read_file, log, import_export::package::export_colpkg_from_data, io::read_file, log,
pb::preferences::BackupLimits, prelude::*, pb::config::preferences::BackupLimits, prelude::*,
}; };
const BACKUP_FORMAT_STRING: &str = "backup-%Y-%m-%d-%H.%M.%S.colpkg"; const BACKUP_FORMAT_STRING: &str = "backup-%Y-%m-%d-%H.%M.%S.colpkg";

View file

@ -18,7 +18,7 @@ pub use self::{
bool::BoolKey, deck::DeckConfigKey, notetype::get_aux_notetype_config_key, bool::BoolKey, deck::DeckConfigKey, notetype::get_aux_notetype_config_key,
number::I32ConfigKey, string::StringKey, number::I32ConfigKey, string::StringKey,
}; };
use crate::{pb::preferences::BackupLimits, prelude::*}; use crate::{pb::config::preferences::BackupLimits, prelude::*};
/// Only used when updating/undoing. /// Only used when updating/undoing.
#[derive(Debug)] #[derive(Debug)]

View file

@ -8,7 +8,7 @@ mod update;
pub use schema11::{DeckConfSchema11, NewCardOrderSchema11}; pub use schema11::{DeckConfSchema11, NewCardOrderSchema11};
pub use update::UpdateDeckConfigsRequest; pub use update::UpdateDeckConfigsRequest;
pub use crate::pb::deck_config::{ pub use crate::pb::deckconfig::deck_config::{
config::{ config::{
LeechAction, NewCardGatherPriority, NewCardInsertOrder, NewCardSortOrder, ReviewCardOrder, LeechAction, NewCardGatherPriority, NewCardInsertOrder, NewCardSortOrder, ReviewCardOrder,
ReviewMix, ReviewMix,

View file

@ -13,8 +13,8 @@ use crate::{
decks::NormalDeck, decks::NormalDeck,
pb, pb,
pb::{ pb::{
deck::normal::DayLimit, deckconfig::deck_configs_for_update::{current_deck::Limits, ConfigWithExtra, CurrentDeck},
deck_configs_for_update::{current_deck::Limits, ConfigWithExtra, CurrentDeck}, decks::deck::normal::DayLimit,
}, },
prelude::*, prelude::*,
search::{JoinSearches, SearchNode}, search::{JoinSearches, SearchNode},
@ -36,8 +36,8 @@ impl Collection {
pub fn get_deck_configs_for_update( pub fn get_deck_configs_for_update(
&mut self, &mut self,
deck: DeckId, deck: DeckId,
) -> Result<pb::DeckConfigsForUpdate> { ) -> Result<pb::deckconfig::DeckConfigsForUpdate> {
Ok(pb::DeckConfigsForUpdate { Ok(pb::deckconfig::DeckConfigsForUpdate {
all_config: self.get_deck_config_with_extra_for_update()?, all_config: self.get_deck_config_with_extra_for_update()?,
current_deck: Some(self.get_current_deck_for_update(deck)?), current_deck: Some(self.get_current_deck_for_update(deck)?),
defaults: Some(DeckConfig::default().into()), defaults: Some(DeckConfig::default().into()),

View file

@ -42,11 +42,11 @@ impl Collection {
pub(crate) fn counts_for_deck_today( pub(crate) fn counts_for_deck_today(
&mut self, &mut self,
did: DeckId, did: DeckId,
) -> Result<pb::CountsForDeckTodayResponse> { ) -> Result<pb::scheduler::CountsForDeckTodayResponse> {
let today = self.current_due_day(0)?; let today = self.current_due_day(0)?;
let mut deck = self.storage.get_deck(did)?.or_not_found(did)?; let mut deck = self.storage.get_deck(did)?.or_not_found(did)?;
deck.reset_stats_if_day_changed(today); deck.reset_stats_if_day_changed(today);
Ok(pb::CountsForDeckTodayResponse { Ok(pb::scheduler::CountsForDeckTodayResponse {
new: deck.common.new_studied, new: deck.common.new_studied,
review: deck.common.review_studied, review: deck.common.review_studied,
}) })

View file

@ -21,7 +21,7 @@ pub(crate) use name::immediate_parent_name;
pub use name::NativeDeckName; pub use name::NativeDeckName;
pub use schema11::DeckSchema11; pub use schema11::DeckSchema11;
pub use crate::pb::{ pub use crate::pb::decks::{
deck::{ deck::{
filtered::{search_term::Order as FilteredSearchOrder, SearchTerm as FilteredSearchTerm}, filtered::{search_term::Order as FilteredSearchOrder, SearchTerm as FilteredSearchTerm},
kind_container::Kind as DeckKind, kind_container::Kind as DeckKind,

View file

@ -23,7 +23,9 @@ pub enum DeckSchema11 {
// serde doesn't support integer/bool enum tags, so we manually pick the correct variant // serde doesn't support integer/bool enum tags, so we manually pick the correct variant
mod dynfix { mod dynfix {
use serde::de::{self, Deserialize, Deserializer}; use serde::de::{
Deserialize, Deserializer, {self},
};
use serde_json::{Map, Value}; use serde_json::{Map, Value};
use super::{DeckSchema11, FilteredDeckSchema11, NormalDeckSchema11}; use super::{DeckSchema11, FilteredDeckSchema11, NormalDeckSchema11};

View file

@ -23,7 +23,7 @@ impl Collection {
&mut self, &mut self,
today: u32, today: u32,
usn: Usn, usn: Usn,
input: pb::UpdateStatsRequest, input: pb::scheduler::UpdateStatsRequest,
) -> Result<()> { ) -> Result<()> {
let did = input.deck_id.into(); let did = input.deck_id.into();
let mutator = |c: &mut DeckCommon| { let mutator = |c: &mut DeckCommon| {

View file

@ -14,8 +14,10 @@ use super::{
limits::{remaining_limits_map, RemainingLimits}, limits::{remaining_limits_map, RemainingLimits},
DueCounts, DueCounts,
}; };
pub use crate::pb::set_deck_collapsed_request::Scope as DeckCollapseScope; pub use crate::pb::decks::set_deck_collapsed_request::Scope as DeckCollapseScope;
use crate::{config::SchedulerVersion, ops::OpOutput, pb::DeckTreeNode, prelude::*, undo::Op}; use crate::{
config::SchedulerVersion, ops::OpOutput, pb::decks::DeckTreeNode, prelude::*, undo::Op,
};
fn deck_names_to_tree(names: impl Iterator<Item = (DeckId, String)>) -> DeckTreeNode { fn deck_names_to_tree(names: impl Iterator<Item = (DeckId, String)>) -> DeckTreeNode {
let mut top = DeckTreeNode::default(); let mut top = DeckTreeNode::default();

View file

@ -38,8 +38,8 @@ impl PartialEq for InvalidInputError {
impl Eq for InvalidInputError {} impl Eq for InvalidInputError {}
/// Allows generating [AnkiError::InvalidInput] from [Option::None] and the /// Allows generating [AnkiError::InvalidInput] from [None] and the
/// typical [core::result::Result::Err]. /// typical [Err].
pub trait OrInvalid { pub trait OrInvalid {
type Value; type Value;
fn or_invalid(self, message: impl Into<String>) -> Result<Self::Value>; fn or_invalid(self, message: impl Into<String>) -> Result<Self::Value>;

View file

@ -35,7 +35,7 @@ impl PartialEq for NotFoundError {
impl Eq for NotFoundError {} impl Eq for NotFoundError {}
/// Allows generating [AnkiError::NotFound] from [Option::None]. /// Allows generating [AnkiError::NotFound] from [None].
pub trait OrNotFound { pub trait OrNotFound {
type Value; type Value;
fn or_not_found(self, identifier: impl fmt::Display) -> Result<Self::Value>; fn or_not_found(self, identifier: impl fmt::Display) -> Result<Self::Value>;
@ -67,9 +67,6 @@ mod test {
#[test] #[test]
fn test_unqualified_lowercase_type_name() { fn test_unqualified_lowercase_type_name() {
assert_eq!( assert_eq!(unqualified_lowercase_type_name::<CardId>(), "card id");
unqualified_lowercase_type_name::<crate::card::CardId>(),
"card id"
);
} }
} }

View file

@ -8,7 +8,7 @@ pub mod text;
use std::marker::PhantomData; use std::marker::PhantomData;
pub use crate::pb::import_response::{Log as NoteLog, Note as LogNote}; pub use crate::pb::import_export::import_response::{Log as NoteLog, Note as LogNote};
use crate::{ use crate::{
prelude::*, prelude::*,
text::{ text::{

View file

@ -153,7 +153,7 @@ impl<'n> NoteContext<'n> {
} }
fn add_notetype_with_remapped_id(&mut self, notetype: &mut Notetype) -> Result<()> { fn add_notetype_with_remapped_id(&mut self, notetype: &mut Notetype) -> Result<()> {
let old_id = std::mem::take(&mut notetype.id); let old_id = mem::take(&mut notetype.id);
notetype.usn = self.usn; notetype.usn = self.usn;
self.target_col self.target_col
.add_notetype_inner(notetype, self.usn, true)?; .add_notetype_inner(notetype, self.usn, true)?;

View file

@ -141,7 +141,7 @@ impl Collection {
for file in [SAMPLE_JPG, SAMPLE_JS, &new_mp3_name] { for file in [SAMPLE_JPG, SAMPLE_JS, &new_mp3_name] {
assert!(self.media_folder.join(file).exists()); assert!(self.media_folder.join(file).exists());
assert!(*csums.get(file).unwrap() != [0; 20]); assert_ne!(*csums.get(file).unwrap(), [0; 20]);
} }
let imported_note = self.storage.get_note(note.id).unwrap().unwrap(); let imported_note = self.storage.get_note(note.id).unwrap().unwrap();

View file

@ -6,7 +6,9 @@ use std::{
collections::HashMap, collections::HashMap,
ffi::OsStr, ffi::OsStr,
fs::File, fs::File,
io::{self, Read, Write}, io::{
Read, Write, {self},
},
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
@ -268,7 +270,7 @@ fn write_media_map(
buf buf
}; };
let size = encoded_bytes.len(); let size = encoded_bytes.len();
let mut cursor = std::io::Cursor::new(encoded_bytes); let mut cursor = io::Cursor::new(encoded_bytes);
if meta.zstd_compressed() { if meta.zstd_compressed() {
zstd_copy(&mut cursor, zip, size)?; zstd_copy(&mut cursor, zip, size)?;
} else { } else {

View file

@ -3,12 +3,17 @@
use std::{ use std::{
fs::File, fs::File,
io::{self, Write}, io::{
Write, {self},
},
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use zip::{read::ZipFile, ZipArchive}; use zip::{read::ZipFile, ZipArchive};
use zstd::{self, stream::copy_decode}; use zstd::{
stream::copy_decode,
{self},
};
use crate::{ use crate::{
collection::CollectionBuilder, collection::CollectionBuilder,

View file

@ -4,7 +4,9 @@
use std::{ use std::{
borrow::Cow, borrow::Cow,
collections::HashMap, collections::HashMap,
fs::{self, File}, fs::{
File, {self},
},
io, io,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };

View file

@ -3,14 +3,16 @@
use std::{ use std::{
fs::File, fs::File,
io::{self, Read}, io::{
Read, {self},
},
}; };
use prost::Message; use prost::Message;
use zip::ZipArchive; use zip::ZipArchive;
use zstd::stream::copy_decode; use zstd::stream::copy_decode;
pub(super) use crate::pb::{package_metadata::Version, PackageMetadata as Meta}; pub(super) use crate::pb::import_export::{package_metadata::Version, PackageMetadata as Meta};
use crate::{error::ImportError, prelude::*, storage::SchemaVersion}; use crate::{error::ImportError, prelude::*, storage::SchemaVersion};
impl Version { impl Version {
@ -98,7 +100,7 @@ impl Meta {
pub(super) fn copy( pub(super) fn copy(
&self, &self,
reader: &mut impl io::Read, reader: &mut impl Read,
writer: &mut impl io::Write, writer: &mut impl io::Write,
) -> io::Result<()> { ) -> io::Result<()> {
if self.zstd_compressed() { if self.zstd_compressed() {

View file

@ -11,4 +11,4 @@ pub(crate) use colpkg::export::export_colpkg_from_data;
pub use colpkg::import::import_colpkg; pub use colpkg::import::import_colpkg;
pub(self) use meta::{Meta, Version}; pub(self) use meta::{Meta, Version};
pub(self) use crate::pb::{media_entries::MediaEntry, MediaEntries}; pub(self) use crate::pb::import_export::{media_entries::MediaEntry, MediaEntries};

View file

@ -11,7 +11,7 @@ use super::metadata::Delimiter;
use crate::{ use crate::{
import_export::{ExportProgress, IncrementableProgress}, import_export::{ExportProgress, IncrementableProgress},
notetype::RenderCardOutput, notetype::RenderCardOutput,
pb::ExportNoteCsvRequest, pb::import_export::ExportNoteCsvRequest,
prelude::*, prelude::*,
search::{SearchNode, SortMode}, search::{SearchNode, SortMode},
template::RenderedNode, template::RenderedNode,

View file

@ -22,7 +22,7 @@ use crate::{
import_export::text::NameOrId, import_export::text::NameOrId,
io::open_file, io::open_file,
notetype::NoteField, notetype::NoteField,
pb::StringList, pb::generic::StringList,
prelude::*, prelude::*,
text::{html_to_text_line, is_html}, text::{html_to_text_line, is_html},
}; };

View file

@ -8,7 +8,7 @@ mod json;
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
use super::LogNote; use super::LogNote;
use crate::pb::csv_metadata::DupeResolution; use crate::pb::import_export::csv_metadata::DupeResolution;
#[derive(Debug, Clone, Default, Serialize, Deserialize)] #[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[serde(default)] #[serde(default)]

View file

@ -15,7 +15,7 @@ use crate::{
pub(crate) type Result<T, E = FileIoError> = std::result::Result<T, E>; pub(crate) type Result<T, E = FileIoError> = std::result::Result<T, E>;
/// See [std::fs::File::open]. /// See [File::open].
pub(crate) fn open_file(path: impl AsRef<Path>) -> Result<File> { pub(crate) fn open_file(path: impl AsRef<Path>) -> Result<File> {
File::open(&path).context(FileIoSnafu { File::open(&path).context(FileIoSnafu {
path: path.as_ref(), path: path.as_ref(),

View file

@ -286,7 +286,7 @@ pub(crate) fn sha1_of_file(path: &Path) -> Result<Sha1Hash, FileIoError> {
} }
/// Return the SHA1 of a stream. /// Return the SHA1 of a stream.
pub(crate) fn sha1_of_reader(reader: &mut impl Read) -> std::io::Result<Sha1Hash> { pub(crate) fn sha1_of_reader(reader: &mut impl Read) -> io::Result<Sha1Hash> {
let mut hasher = Sha1::new(); let mut hasher = Sha1::new();
let mut buf = [0; 64 * 1024]; let mut buf = [0; 64 * 1024];
loop { loop {

View file

@ -712,7 +712,7 @@ fn zip_files<'a>(
let buf = vec![]; let buf = vec![];
let mut invalid_entries = vec![]; let mut invalid_entries = vec![];
let w = std::io::Cursor::new(buf); let w = io::Cursor::new(buf);
let mut zip = zip::ZipWriter::new(w); let mut zip = zip::ZipWriter::new(w);
let options = let options =

View file

@ -18,7 +18,7 @@ use crate::{
notetype::{CardGenContext, NoteField}, notetype::{CardGenContext, NoteField},
ops::StateChanges, ops::StateChanges,
pb, pb,
pb::note_fields_check_response::State as NoteFieldsState, pb::notes::note_fields_check_response::State as NoteFieldsState,
prelude::*, prelude::*,
template::field_is_empty, template::field_is_empty,
text::{ensure_string_in_nfc, normalize_to_nfc, strip_html_preserving_media_filenames}, text::{ensure_string_in_nfc, normalize_to_nfc, strip_html_preserving_media_filenames},
@ -169,7 +169,7 @@ impl Note {
/// Prepare note for saving to the database. Does not mark it as modified. /// Prepare note for saving to the database. Does not mark it as modified.
pub(crate) fn prepare_for_update(&mut self, nt: &Notetype, normalize_text: bool) -> Result<()> { pub(crate) fn prepare_for_update(&mut self, nt: &Notetype, normalize_text: bool) -> Result<()> {
assert!(nt.id == self.notetype_id); assert_eq!(nt.id, self.notetype_id);
let notetype_field_count = nt.fields.len().max(1); let notetype_field_count = nt.fields.len().max(1);
require!( require!(
notetype_field_count == self.fields.len(), notetype_field_count == self.fields.len(),
@ -258,9 +258,9 @@ pub(crate) fn normalize_field(field: &mut String, normalize_text: bool) {
} }
} }
impl From<Note> for pb::Note { impl From<Note> for pb::notes::Note {
fn from(n: Note) -> Self { fn from(n: Note) -> Self {
pb::Note { pb::notes::Note {
id: n.id.0, id: n.id.0,
guid: n.guid, guid: n.guid,
notetype_id: n.notetype_id.0, notetype_id: n.notetype_id.0,
@ -272,8 +272,8 @@ impl From<Note> for pb::Note {
} }
} }
impl From<pb::Note> for Note { impl From<pb::notes::Note> for Note {
fn from(n: pb::Note) -> Self { fn from(n: pb::notes::Note) -> Self {
Note { Note {
id: NoteId(n.id), id: NoteId(n.id),
guid: n.guid, guid: n.guid,

View file

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{NoteFieldConfig, NoteFieldProto}; use super::{NoteFieldConfig, NoteFieldProto};
use crate::{pb::UInt32, prelude::*}; use crate::{pb::generic::UInt32, prelude::*};
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct NoteField { pub struct NoteField {

View file

@ -30,7 +30,7 @@ pub use stock::all_stock_notetypes;
pub use templates::CardTemplate; pub use templates::CardTemplate;
use unicase::UniCase; use unicase::UniCase;
pub use crate::pb::{ pub use crate::pb::notetypes::{
notetype::{ notetype::{
config::{ config::{
card_requirement::Kind as CardRequirementKind, CardRequirement, Kind as NotetypeKind, card_requirement::Kind as CardRequirementKind, CardRequirement, Kind as NotetypeKind,

View file

@ -7,7 +7,7 @@ use crate::{
error::Result, error::Result,
i18n::I18n, i18n::I18n,
notetype::Notetype, notetype::Notetype,
pb::stock_notetype::Kind, pb::notetypes::stock_notetype::Kind,
storage::SqliteStorage, storage::SqliteStorage,
timestamp::TimestampSecs, timestamp::TimestampSecs,
}; };

View file

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{CardTemplateConfig, CardTemplateProto}; use super::{CardTemplateConfig, CardTemplateProto};
use crate::{pb::UInt32, prelude::*, template::ParsedTemplate}; use crate::{pb::generic::UInt32, prelude::*, template::ParsedTemplate};
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct CardTemplate { pub struct CardTemplate {

View file

@ -2,34 +2,29 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
macro_rules! protobuf { macro_rules! protobuf {
($ident:ident) => { ($ident:ident, $name:literal) => {
pub mod $ident { pub mod $ident {
#![allow(clippy::derive_partial_eq_without_eq)] include!(concat!(env!("OUT_DIR"), "/anki.", $name, ".rs"));
include!(concat!(
env!("OUT_DIR"),
concat!("/anki.", stringify!($ident), ".rs")
));
} }
pub use $ident::*;
}; };
} }
protobuf!(backend); protobuf!(backend, "backend");
protobuf!(card_rendering); protobuf!(card_rendering, "card_rendering");
protobuf!(cards); protobuf!(cards, "cards");
protobuf!(collection); protobuf!(collection, "collection");
protobuf!(config); protobuf!(config, "config");
protobuf!(deckconfig); protobuf!(deckconfig, "deckconfig");
protobuf!(decks); protobuf!(decks, "decks");
protobuf!(generic); protobuf!(generic, "generic");
protobuf!(i18n); protobuf!(i18n, "i18n");
protobuf!(import_export); protobuf!(import_export, "import_export");
protobuf!(links); protobuf!(links, "links");
protobuf!(media); protobuf!(media, "media");
protobuf!(notes); protobuf!(notes, "notes");
protobuf!(notetypes); protobuf!(notetypes, "notetypes");
protobuf!(scheduler); protobuf!(scheduler, "scheduler");
protobuf!(search); protobuf!(search, "search");
protobuf!(stats); protobuf!(stats, "stats");
protobuf!(sync); protobuf!(sync, "sync");
protobuf!(tags); protobuf!(tags, "tags");

View file

@ -5,7 +5,7 @@ use crate::{
collection::Collection, collection::Collection,
config::{BoolKey, StringKey}, config::{BoolKey, StringKey},
error::Result, error::Result,
pb::{ pb::config::{
preferences::{scheduling::NewReviewMix as NewRevMixPB, Editing, Reviewing, Scheduling}, preferences::{scheduling::NewReviewMix as NewRevMixPB, Editing, Reviewing, Scheduling},
Preferences, Preferences,
}, },

View file

@ -330,7 +330,7 @@ impl Collection {
self.update_deck_stats( self.update_deck_stats(
updater.timing.days_elapsed, updater.timing.days_elapsed,
usn, usn,
pb::UpdateStatsRequest { pb::scheduler::UpdateStatsRequest {
deck_id: updater.deck.id.0, deck_id: updater.deck.id.0,
new_delta, new_delta,
review_delta, review_delta,

View file

@ -5,7 +5,7 @@ use super::timing::SchedTimingToday;
use crate::{ use crate::{
card::CardQueue, card::CardQueue,
config::SchedulerVersion, config::SchedulerVersion,
pb::{ pb::scheduler::{
bury_or_suspend_cards_request::Mode as BuryOrSuspendMode, bury_or_suspend_cards_request::Mode as BuryOrSuspendMode,
unbury_deck_request::Mode as UnburyDeckMode, unbury_deck_request::Mode as UnburyDeckMode,
}, },

View file

@ -14,7 +14,7 @@ pub(crate) struct CongratsInfo {
} }
impl Collection { impl Collection {
pub fn congrats_info(&mut self) -> Result<pb::CongratsInfoResponse> { pub fn congrats_info(&mut self) -> Result<pb::scheduler::CongratsInfoResponse> {
let deck = self.get_current_deck()?; let deck = self.get_current_deck()?;
let today = self.timing_today()?.days_elapsed; let today = self.timing_today()?.days_elapsed;
let info = self.storage.congrats_info(&deck, today)?; let info = self.storage.congrats_info(&deck, today)?;
@ -27,7 +27,7 @@ impl Collection {
((info.next_learn_due as i64) - self.learn_ahead_secs() as i64 - TimestampSecs::now().0) ((info.next_learn_due as i64) - self.learn_ahead_secs() as i64 - TimestampSecs::now().0)
.max(60) as u32 .max(60) as u32
}; };
Ok(pb::CongratsInfoResponse { Ok(pb::scheduler::CongratsInfoResponse {
learn_remaining: info.learn_count, learn_remaining: info.learn_count,
review_remaining: info.review_remaining, review_remaining: info.review_remaining,
new_remaining: info.new_remaining, new_remaining: info.new_remaining,
@ -51,7 +51,7 @@ mod test {
let info = col.congrats_info().unwrap(); let info = col.congrats_info().unwrap();
assert_eq!( assert_eq!(
info, info,
crate::pb::CongratsInfoResponse { crate::pb::scheduler::CongratsInfoResponse {
learn_remaining: 0, learn_remaining: 0,
review_remaining: false, review_remaining: false,
new_remaining: false, new_remaining: false,

View file

@ -9,22 +9,25 @@ use crate::{
decks::{FilteredDeck, FilteredSearchOrder, FilteredSearchTerm}, decks::{FilteredDeck, FilteredSearchOrder, FilteredSearchTerm},
error::{CustomStudyError, FilteredDeckError}, error::{CustomStudyError, FilteredDeckError},
pb::{ pb::{
self as pb, scheduler::custom_study_request::{cram::CramKind, Cram, Value as CustomStudyValue},
custom_study_request::{cram::CramKind, Cram, Value as CustomStudyValue}, {self as pb},
}, },
prelude::*, prelude::*,
search::{JoinSearches, Negated, PropertyKind, RatingKind, SearchNode, StateKind}, search::{JoinSearches, Negated, PropertyKind, RatingKind, SearchNode, StateKind},
}; };
impl Collection { impl Collection {
pub fn custom_study(&mut self, input: pb::CustomStudyRequest) -> Result<OpOutput<()>> { pub fn custom_study(
&mut self,
input: pb::scheduler::CustomStudyRequest,
) -> Result<OpOutput<()>> {
self.transact(Op::CreateCustomStudy, |col| col.custom_study_inner(input)) self.transact(Op::CreateCustomStudy, |col| col.custom_study_inner(input))
} }
pub fn custom_study_defaults( pub fn custom_study_defaults(
&mut self, &mut self,
deck_id: DeckId, deck_id: DeckId,
) -> Result<pb::CustomStudyDefaultsResponse> { ) -> Result<pb::scheduler::CustomStudyDefaultsResponse> {
// daily counts // daily counts
let deck = self.get_deck(deck_id)?.or_not_found(deck_id)?; let deck = self.get_deck(deck_id)?.or_not_found(deck_id)?;
let normal = deck.normal()?; let normal = deck.normal()?;
@ -70,11 +73,11 @@ impl Collection {
); );
let mut all_tags: Vec<_> = self.all_tags_in_deck(deck_id)?.into_iter().collect(); let mut all_tags: Vec<_> = self.all_tags_in_deck(deck_id)?.into_iter().collect();
all_tags.sort_unstable(); all_tags.sort_unstable();
let tags: Vec<pb::custom_study_defaults_response::Tag> = all_tags let tags: Vec<pb::scheduler::custom_study_defaults_response::Tag> = all_tags
.into_iter() .into_iter()
.map(|tag| { .map(|tag| {
let tag = tag.into_inner(); let tag = tag.into_inner();
pb::custom_study_defaults_response::Tag { pb::scheduler::custom_study_defaults_response::Tag {
include: include_tags.contains(&tag), include: include_tags.contains(&tag),
exclude: exclude_tags.contains(&tag), exclude: exclude_tags.contains(&tag),
name: tag, name: tag,
@ -82,7 +85,7 @@ impl Collection {
}) })
.collect(); .collect();
Ok(pb::CustomStudyDefaultsResponse { Ok(pb::scheduler::CustomStudyDefaultsResponse {
tags, tags,
extend_new, extend_new,
extend_review, extend_review,
@ -95,7 +98,7 @@ impl Collection {
} }
impl Collection { impl Collection {
fn custom_study_inner(&mut self, input: pb::CustomStudyRequest) -> Result<()> { fn custom_study_inner(&mut self, input: pb::scheduler::CustomStudyRequest) -> Result<()> {
let mut deck = self let mut deck = self
.storage .storage
.get_deck(input.deck_id.into())? .get_deck(input.deck_id.into())?
@ -292,8 +295,8 @@ mod test {
use super::*; use super::*;
use crate::{ use crate::{
collection::open_test_collection, collection::open_test_collection,
pb::{ pb::scheduler::{
scheduler::custom_study_request::{cram::CramKind, Cram, Value}, custom_study_request::{cram::CramKind, Cram, Value},
CustomStudyRequest, CustomStudyRequest,
}, },
}; };

View file

@ -266,7 +266,7 @@ mod test {
use crate::{ use crate::{
card::{CardQueue, CardType}, card::{CardQueue, CardType},
collection::open_test_collection, collection::open_test_collection,
pb::deck_config::config::{NewCardGatherPriority, NewCardSortOrder}, pb::deckconfig::deck_config::config::{NewCardGatherPriority, NewCardSortOrder},
}; };
impl Collection { impl Collection {

View file

@ -737,8 +737,8 @@ mod test {
use Node::*; use Node::*;
use SearchNode::*; use SearchNode::*;
assert_eq!(parse("")?, vec![Search(SearchNode::WholeCollection)]); assert_eq!(parse("")?, vec![Search(WholeCollection)]);
assert_eq!(parse(" ")?, vec![Search(SearchNode::WholeCollection)]); assert_eq!(parse(" ")?, vec![Search(WholeCollection)]);
// leading/trailing/interspersed whitespace // leading/trailing/interspersed whitespace
assert_eq!( assert_eq!(

View file

@ -14,7 +14,7 @@ use crate::timestamp::TimestampSecs;
pub(crate) fn default_on_invalid<'de, T, D>(deserializer: D) -> Result<T, D::Error> pub(crate) fn default_on_invalid<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where where
T: Default + DeTrait<'de>, T: Default + DeTrait<'de>,
D: serde::de::Deserializer<'de>, D: Deserializer<'de>,
{ {
let v: Value = DeTrait::deserialize(deserializer)?; let v: Value = DeTrait::deserialize(deserializer)?;
Ok(T::deserialize(v).unwrap_or_default()) Ok(T::deserialize(v).unwrap_or_default())

View file

@ -9,7 +9,7 @@ use crate::{
}; };
impl Collection { impl Collection {
pub fn card_stats(&mut self, cid: CardId) -> Result<pb::CardStatsResponse> { pub fn card_stats(&mut self, cid: CardId) -> Result<pb::stats::CardStatsResponse> {
let card = self.storage.get_card(cid)?.or_not_found(cid)?; let card = self.storage.get_card(cid)?.or_not_found(cid)?;
let note = self let note = self
.storage .storage
@ -27,7 +27,7 @@ impl Collection {
let (average_secs, total_secs) = average_and_total_secs_strings(&revlog); let (average_secs, total_secs) = average_and_total_secs_strings(&revlog);
let (due_date, due_position) = self.due_date_and_position(&card)?; let (due_date, due_position) = self.due_date_and_position(&card)?;
Ok(pb::CardStatsResponse { Ok(pb::stats::CardStatsResponse {
card_id: card.id.into(), card_id: card.id.into(),
note_id: card.note_id.into(), note_id: card.note_id.into(),
deck: deck.human_name(), deck: deck.human_name(),
@ -92,8 +92,8 @@ fn average_and_total_secs_strings(revlog: &[RevlogEntry]) -> (f32, f32) {
} }
} }
fn stats_revlog_entry(entry: &RevlogEntry) -> pb::card_stats_response::StatsRevlogEntry { fn stats_revlog_entry(entry: &RevlogEntry) -> pb::stats::card_stats_response::StatsRevlogEntry {
pb::card_stats_response::StatsRevlogEntry { pb::stats::card_stats_response::StatsRevlogEntry {
time: entry.id.as_secs().0, time: entry.id.as_secs().0,
review_kind: entry.review_kind.into(), review_kind: entry.review_kind.into(),
button_chosen: entry.button_chosen as u32, button_chosen: entry.button_chosen as u32,

View file

@ -14,13 +14,13 @@ impl Collection {
&mut self, &mut self,
search: &str, search: &str,
days: u32, days: u32,
) -> Result<pb::GraphsResponse> { ) -> Result<pb::stats::GraphsResponse> {
let guard = self.search_cards_into_table(search, SortMode::NoOrder)?; let guard = self.search_cards_into_table(search, SortMode::NoOrder)?;
let all = search.trim().is_empty(); let all = search.trim().is_empty();
guard.col.graph_data(all, days) guard.col.graph_data(all, days)
} }
fn graph_data(&mut self, all: bool, days: u32) -> Result<pb::GraphsResponse> { fn graph_data(&mut self, all: bool, days: u32) -> Result<pb::stats::GraphsResponse> {
let timing = self.timing_today()?; let timing = self.timing_today()?;
let revlog_start = if days > 0 { let revlog_start = if days > 0 {
timing timing
@ -41,7 +41,7 @@ impl Collection {
.get_pb_revlog_entries_for_searched_cards(revlog_start)? .get_pb_revlog_entries_for_searched_cards(revlog_start)?
}; };
Ok(pb::GraphsResponse { Ok(pb::stats::GraphsResponse {
cards: cards.into_iter().map(Into::into).collect(), cards: cards.into_iter().map(Into::into).collect(),
revlog, revlog,
days_elapsed: timing.days_elapsed, days_elapsed: timing.days_elapsed,
@ -51,8 +51,8 @@ impl Collection {
}) })
} }
pub(crate) fn get_graph_preferences(&self) -> pb::GraphPreferences { pub(crate) fn get_graph_preferences(&self) -> pb::stats::GraphPreferences {
pb::GraphPreferences { pb::stats::GraphPreferences {
calendar_first_day_of_week: self.get_first_day_of_week() as i32, calendar_first_day_of_week: self.get_first_day_of_week() as i32,
card_counts_separate_inactive: self card_counts_separate_inactive: self
.get_config_bool(BoolKey::CardCountsSeparateInactive), .get_config_bool(BoolKey::CardCountsSeparateInactive),
@ -61,7 +61,10 @@ impl Collection {
} }
} }
pub(crate) fn set_graph_preferences(&mut self, prefs: pb::GraphPreferences) -> Result<()> { pub(crate) fn set_graph_preferences(
&mut self,
prefs: pb::stats::GraphPreferences,
) -> Result<()> {
self.set_first_day_of_week(match prefs.calendar_first_day_of_week { self.set_first_day_of_week(match prefs.calendar_first_day_of_week {
1 => Weekday::Monday, 1 => Weekday::Monday,
5 => Weekday::Friday, 5 => Weekday::Friday,
@ -77,9 +80,9 @@ impl Collection {
} }
} }
impl From<RevlogEntry> for pb::RevlogEntry { impl From<RevlogEntry> for pb::stats::RevlogEntry {
fn from(e: RevlogEntry) -> Self { fn from(e: RevlogEntry) -> Self {
pb::RevlogEntry { pb::stats::RevlogEntry {
id: e.id.0, id: e.id.0,
cid: e.cid.0, cid: e.cid.0,
usn: e.usn.0, usn: e.usn.0,

View file

@ -29,7 +29,7 @@ use crate::{
}; };
impl FromSql for CardType { impl FromSql for CardType {
fn column_result(value: ValueRef<'_>) -> std::result::Result<Self, FromSqlError> { fn column_result(value: ValueRef<'_>) -> result::Result<Self, FromSqlError> {
if let ValueRef::Integer(i) = value { if let ValueRef::Integer(i) = value {
Ok(Self::try_from(i as u8).map_err(|_| FromSqlError::InvalidType)?) Ok(Self::try_from(i as u8).map_err(|_| FromSqlError::InvalidType)?)
} else { } else {
@ -39,7 +39,7 @@ impl FromSql for CardType {
} }
impl FromSql for CardQueue { impl FromSql for CardQueue {
fn column_result(value: ValueRef<'_>) -> std::result::Result<Self, FromSqlError> { fn column_result(value: ValueRef<'_>) -> result::Result<Self, FromSqlError> {
if let ValueRef::Integer(i) = value { if let ValueRef::Integer(i) = value {
Ok(Self::try_from(i as i8).map_err(|_| FromSqlError::InvalidType)?) Ok(Self::try_from(i as i8).map_err(|_| FromSqlError::InvalidType)?)
} else { } else {

View file

@ -153,7 +153,7 @@ impl SqliteStorage {
// caller should ensure name unique // caller should ensure name unique
pub(crate) fn add_deck(&self, deck: &mut Deck) -> Result<()> { pub(crate) fn add_deck(&self, deck: &mut Deck) -> Result<()> {
assert!(deck.id.0 == 0); assert_eq!(deck.id.0, 0);
deck.id.0 = self deck.id.0 = self
.db .db
.prepare(include_str!("alloc_id.sql"))? .prepare(include_str!("alloc_id.sql"))?

View file

@ -51,7 +51,7 @@ impl super::SqliteStorage {
/// If fields have been modified, caller must call note.prepare_for_update() prior to calling this. /// If fields have been modified, caller must call note.prepare_for_update() prior to calling this.
pub(crate) fn update_note(&self, note: &Note) -> Result<()> { pub(crate) fn update_note(&self, note: &Note) -> Result<()> {
assert!(note.id.0 != 0); assert_ne!(note.id.0, 0);
let mut stmt = self.db.prepare_cached(include_str!("update.sql"))?; let mut stmt = self.db.prepare_cached(include_str!("update.sql"))?;
stmt.execute(params![ stmt.execute(params![
note.guid, note.guid,
@ -68,7 +68,7 @@ impl super::SqliteStorage {
} }
pub(crate) fn add_note(&self, note: &mut Note) -> Result<()> { pub(crate) fn add_note(&self, note: &mut Note) -> Result<()> {
assert!(note.id.0 == 0); assert_eq!(note.id.0, 0);
let mut stmt = self.db.prepare_cached(include_str!("add.sql"))?; let mut stmt = self.db.prepare_cached(include_str!("add.sql"))?;
stmt.execute(params![ stmt.execute(params![
TimestampMillis::now(), TimestampMillis::now(),

View file

@ -226,7 +226,7 @@ impl SqliteStorage {
} }
pub(crate) fn add_notetype(&self, nt: &mut Notetype) -> Result<()> { pub(crate) fn add_notetype(&self, nt: &mut Notetype) -> Result<()> {
assert!(nt.id.0 == 0); assert_eq!(nt.id.0, 0);
let mut stmt = self.db.prepare_cached(include_str!("add_notetype.sql"))?; let mut stmt = self.db.prepare_cached(include_str!("add_notetype.sql"))?;
let mut config_bytes = vec![]; let mut config_bytes = vec![];

View file

@ -113,7 +113,7 @@ impl SqliteStorage {
pub(crate) fn get_pb_revlog_entries_for_searched_cards( pub(crate) fn get_pb_revlog_entries_for_searched_cards(
&self, &self,
after: TimestampSecs, after: TimestampSecs,
) -> Result<Vec<pb::RevlogEntry>> { ) -> Result<Vec<pb::stats::RevlogEntry>> {
self.db self.db
.prepare_cached(concat!( .prepare_cached(concat!(
include_str!("get.sql"), include_str!("get.sql"),
@ -137,7 +137,7 @@ impl SqliteStorage {
pub(crate) fn get_all_revlog_entries( pub(crate) fn get_all_revlog_entries(
&self, &self,
after: TimestampSecs, after: TimestampSecs,
) -> Result<Vec<pb::RevlogEntry>> { ) -> Result<Vec<pb::stats::RevlogEntry>> {
self.db self.db
.prepare_cached(concat!(include_str!("get.sql"), " where id >= ?"))? .prepare_cached(concat!(include_str!("get.sql"), " where id >= ?"))?
.query_and_then([after.0 * 1000], |r| row_to_revlog_entry(r).map(Into::into))? .query_and_then([after.0 * 1000], |r| row_to_revlog_entry(r).map(Into::into))?

View file

@ -81,8 +81,8 @@ mod test {
#[test] #[test]
#[allow(clippy::assertions_on_constants)] #[allow(clippy::assertions_on_constants)]
fn assert_18_is_latest_schema_version() { fn assert_18_is_latest_schema_version() {
assert!( assert_eq!(
18 == SCHEMA_MAX_VERSION, 18, SCHEMA_MAX_VERSION,
"must implement SqliteStorage::downgrade_to(SchemaVersion::V18)" "must implement SqliteStorage::downgrade_to(SchemaVersion::V18)"
); );
} }

View file

@ -6,7 +6,7 @@ use std::path::PathBuf;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::{Chunk, Graves, SanityCheckCounts, UnchunkedChanges}; use super::{Chunk, Graves, SanityCheckCounts, UnchunkedChanges};
use crate::{io::read_file, pb::sync_server_method_request::Method, prelude::*}; use crate::{io::read_file, pb::sync::sync_server_method_request::Method, prelude::*};
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub enum SyncRequest { pub enum SyncRequest {

View file

@ -65,7 +65,7 @@ pub struct Timeouts {
impl Timeouts { impl Timeouts {
pub fn new() -> Self { pub fn new() -> Self {
let io_secs = if std::env::var("LONG_IO_TIMEOUT").is_ok() { let io_secs = if env::var("LONG_IO_TIMEOUT").is_ok() {
3600 3600
} else { } else {
300 300
@ -314,7 +314,7 @@ impl HttpSyncClient {
usize, usize,
impl Stream<Item = std::result::Result<Bytes, reqwest::Error>>, impl Stream<Item = std::result::Result<Bytes, reqwest::Error>>,
)> { )> {
let resp: reqwest::Response = self.request_bytes("download", b"{}", true).await?; let resp: Response = self.request_bytes("download", b"{}", true).await?;
let len = resp.content_length().unwrap_or_default(); let len = resp.content_length().unwrap_or_default();
Ok((len as usize, resp.bytes_stream())) Ok((len as usize, resp.bytes_stream()))
} }
@ -379,7 +379,7 @@ where
} }
fn sync_endpoint(host_number: u32) -> String { fn sync_endpoint(host_number: u32) -> String {
if let Ok(endpoint) = std::env::var("SYNC_ENDPOINT") { if let Ok(endpoint) = env::var("SYNC_ENDPOINT") {
endpoint endpoint
} else { } else {
let suffix = if host_number > 0 { let suffix = if host_number > 0 {
@ -484,13 +484,13 @@ mod test {
#[test] #[test]
fn http_client() -> Result<()> { fn http_client() -> Result<()> {
let user = match std::env::var("TEST_SYNC_USER") { let user = match env::var("TEST_SYNC_USER") {
Ok(s) => s, Ok(s) => s,
Err(_) => { Err(_) => {
return Ok(()); return Ok(());
} }
}; };
let pass = std::env::var("TEST_SYNC_PASS").unwrap(); let pass = env::var("TEST_SYNC_PASS").unwrap();
env_logger::init(); env_logger::init();
let rt = Runtime::new().unwrap(); let rt = Runtime::new().unwrap();

View file

@ -23,7 +23,7 @@ use crate::{
io::atomic_rename, io::atomic_rename,
notes::Note, notes::Note,
notetype::{Notetype, NotetypeSchema11}, notetype::{Notetype, NotetypeSchema11},
pb::{sync_status_response, SyncStatusResponse}, pb::sync::{sync_status_response, SyncStatusResponse},
prelude::*, prelude::*,
revlog::RevlogEntry, revlog::RevlogEntry,
serde::{default_on_invalid, deserialize_int_from_number}, serde::{default_on_invalid, deserialize_int_from_number},

View file

@ -6,7 +6,7 @@ use std::{collections::HashSet, iter::Peekable};
use unicase::UniCase; use unicase::UniCase;
use super::{immediate_parent_name_unicase, Tag}; use super::{immediate_parent_name_unicase, Tag};
use crate::{pb::TagTreeNode, prelude::*}; use crate::{pb::tags::TagTreeNode, prelude::*};
impl Collection { impl Collection {
pub fn tag_tree(&mut self) -> Result<TagTreeNode> { pub fn tag_tree(&mut self) -> Result<TagTreeNode> {

View file

@ -78,7 +78,7 @@ fn tokens<'a>(template: &'a str) -> Box<dyn Iterator<Item = TemplateResult<Token
} }
fn new_tokens(mut data: &str) -> impl Iterator<Item = TemplateResult<Token>> { fn new_tokens(mut data: &str) -> impl Iterator<Item = TemplateResult<Token>> {
std::iter::from_fn(move || { iter::from_fn(move || {
if data.is_empty() { if data.is_empty() {
return None; return None;
} }
@ -158,7 +158,7 @@ fn alternate_handlebar_token(s: &str) -> nom::IResult<&str, Token> {
} }
fn legacy_tokens(mut data: &str) -> impl Iterator<Item = TemplateResult<Token>> { fn legacy_tokens(mut data: &str) -> impl Iterator<Item = TemplateResult<Token>> {
std::iter::from_fn(move || { iter::from_fn(move || {
if data.is_empty() { if data.is_empty() {
return None; return None;
} }

Some files were not shown because too many files have changed in this diff Show more