Move generated protobuf into anki_proto

Due to the orphan rule, this meant removing our usages of impl ProtoStruct,
or converting them to a trait when they were used commonly.

rslib now directly references anki_proto and anki_i18n, instead of
'pub use'-ing them, and we can put the generated files back in OUT_DIR.
This commit is contained in:
Damien Elmes 2023-06-12 11:40:10 +10:00
parent 651ba88393
commit a83c4a7da7
135 changed files with 1801 additions and 1595 deletions

24
Cargo.lock generated
View file

@ -130,7 +130,6 @@ dependencies = [
"scopeguard",
"serde",
"serde-aux",
"serde_derive",
"serde_json",
"serde_repr",
"serde_tuple",
@ -191,9 +190,14 @@ version = "0.0.0"
dependencies = [
"anyhow",
"inflections",
"num_enum",
"prost",
"prost-build",
"prost-reflect",
"prost-types",
"serde",
"snafu",
"strum",
]
[[package]]
@ -2497,23 +2501,23 @@ dependencies = [
[[package]]
name = "num_enum"
version = "0.5.11"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9"
checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1"
dependencies = [
"num_enum_derive",
]
[[package]]
name = "num_enum_derive"
version = "0.5.11"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799"
checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6"
dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.12",
]
[[package]]
@ -3557,9 +3561,9 @@ checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
[[package]]
name = "serde"
version = "1.0.159"
version = "1.0.164"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c04e8343c3daeec41f58990b9d77068df31209f2af111e059e9fe9646693065"
checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d"
dependencies = [
"serde_derive",
]
@ -3577,9 +3581,9 @@ dependencies = [
[[package]]
name = "serde_derive"
version = "1.0.159"
version = "1.0.164"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c614d17805b093df4b147b51339e7e44bf05ef59fba1e45d83500bcfb4d8585"
checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68"
dependencies = [
"proc-macro2",
"quote",

View file

@ -17,6 +17,7 @@ service CardsService {
rpc SetDeck(SetDeckRequest) returns (collection.OpChangesWithCount);
rpc SetFlag(SetFlagRequest) returns (collection.OpChangesWithCount);
}
message CardId {
int64 cid = 1;
}

View file

@ -80,7 +80,7 @@ lazy_static = "1.4.0"
nom = "7.1.3"
num-integer = "0.1.45"
num_cpus = "1.15.0"
num_enum = "0.5.11"
num_enum = "0.6.1"
once_cell = "1.17.1"
pin-project = "1.0.12"
prost = "0.11.8"
@ -89,9 +89,8 @@ rand = "0.8.5"
regex = "1.7.3"
rusqlite = { version = "0.29.0", features = ["trace", "functions", "collation", "bundled"] }
scopeguard = "1.1.0"
serde = "1.0.159"
serde = { version = "1.0.159", features = ["derive"] }
serde-aux = "4.1.2"
serde_derive = "1.0.159"
serde_json = "1.0.95"
serde_repr = "0.1.12"
serde_tuple = "0.5.0"

View file

@ -7,6 +7,8 @@ use std::borrow::Cow;
use std::env;
use std::iter;
use anki::links::help_page_link_suffix;
use anki::links::help_page_to_link;
use anki::links::HelpPage;
use futures::StreamExt;
use itertools::Itertools;
@ -38,14 +40,14 @@ enum CheckableUrl {
impl CheckableUrl {
fn url(&self) -> Cow<str> {
match *self {
Self::HelpPage(page) => page.to_link().into(),
Self::HelpPage(page) => help_page_to_link(page).into(),
Self::String(s) => s.into(),
}
}
fn anchor(&self) -> Cow<str> {
match *self {
Self::HelpPage(page) => page.to_link_suffix().into(),
Self::HelpPage(page) => help_page_link_suffix(page).into(),
Self::String(s) => s.split('#').last().unwrap_or_default().into(),
}
}

View file

@ -15,3 +15,10 @@ inflections = "1.1.1"
prost-build = "0.11.9"
prost-reflect = "0.11.4"
prost-types = "0.11.9"
[dependencies]
num_enum = "0.6.1"
prost = "0.11.9"
serde = { version = "1.0.164", features = ["derive"] }
snafu = "0.7.4"
strum = { version = "0.24.1", features = ["derive"] }

View file

@ -16,11 +16,16 @@ pub fn write_backend_proto_rs(descriptors_path: &Path) -> Result<DescriptorPool>
set_protoc_path();
let proto_dir = PathBuf::from("../../proto");
let paths = gather_proto_paths(&proto_dir)?;
let out_dir = Path::new("../../out/rslib/proto");
fs::create_dir_all(out_dir).with_context(|| format!("{:?}", out_dir))?;
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
fs::create_dir_all(
descriptors_path
.parent()
.context("no parent found for descriptors path")?,
)
.with_context(|| format!("creating {descriptors_path:?}"))?;
prost_build::Config::new()
.out_dir(out_dir)
.out_dir(&out_dir)
.file_descriptor_set_path(descriptors_path)
.service_generator(RustCodeGenerator::boxed())
.type_attribute(
@ -29,26 +34,26 @@ pub fn write_backend_proto_rs(descriptors_path: &Path) -> Result<DescriptorPool>
)
.type_attribute(
"Deck.Normal.DayLimit",
"#[derive(Copy, Eq, serde_derive::Deserialize, serde_derive::Serialize)]",
"#[derive(Copy, Eq, serde::Deserialize, serde::Serialize)]",
)
.type_attribute("HelpPageLinkRequest.HelpPage", "#[derive(strum::EnumIter)]")
.type_attribute("CsvMetadata.Delimiter", "#[derive(strum::EnumIter)]")
.type_attribute(
"Preferences.BackupLimits",
"#[derive(Copy, serde_derive::Deserialize, serde_derive::Serialize)]",
"#[derive(Copy, serde::Deserialize, serde::Serialize)]",
)
.type_attribute(
"CsvMetadata.DupeResolution",
"#[derive(serde_derive::Deserialize, serde_derive::Serialize)]",
"#[derive(serde::Deserialize, serde::Serialize)]",
)
.type_attribute(
"CsvMetadata.MatchScope",
"#[derive(serde_derive::Deserialize, serde_derive::Serialize)]",
"#[derive(serde::Deserialize, serde::Serialize)]",
)
.compile_protos(paths.as_slice(), &[proto_dir])
.context("prost build")?;
write_service_index(out_dir, descriptors_path)
write_service_index(&out_dir, descriptors_path)
}
fn write_service_index(out_dir: &Path, descriptors_path: &Path) -> Result<DescriptorPool> {
@ -115,7 +120,9 @@ impl RustCodeGenerator {
buf.push_str(
r#"
pub trait Service {
fn run_method(&self, method: u32, input: &[u8]) -> Result<Vec<u8>> {
type Error: From<crate::ProtoError>;
fn run_method(&self, method: u32, input: &[u8]) -> Result<Vec<u8>, Self::Error> {
match method {
"#,
);
@ -123,9 +130,9 @@ pub trait Service {
write!(
buf,
concat!(" ",
"{idx} => {{ let input = super::{input_type}::decode(input)?;\n",
"{idx} => {{ let input = super::{input_type}::decode(input).map_err(crate::ProtoError::from)?;\n",
"let output = self.{rust_method}(input)?;\n",
"let mut out_bytes = Vec::new(); output.encode(&mut out_bytes)?; Ok(out_bytes) }}, "),
"let mut out_bytes = Vec::new(); output.encode(&mut out_bytes).map_err(crate::ProtoError::from)?; Ok(out_bytes) }}, "),
idx = idx,
input_type = method.input_type,
rust_method = method.name
@ -134,7 +141,7 @@ pub trait Service {
}
buf.push_str(
r#"
_ => crate::invalid_input!("invalid command"),
_ => Err(crate::ProtoError::InvalidMethodIndex.into()),
}
}
"#,
@ -145,7 +152,7 @@ pub trait Service {
buf,
concat!(
" fn {method_name}(&self, input: super::{input_type}) -> ",
"Result<super::{output_type}>;\n"
"Result<super::{output_type}, Self::Error>;\n"
),
method_name = method.name,
input_type = method.input_type,
@ -163,7 +170,6 @@ impl ServiceGenerator for RustCodeGenerator {
buf,
"pub mod {name}_service {{
use prost::Message;
use crate::error::Result;
",
name = service.name.replace("Service", "").to_ascii_lowercase()
)

View file

@ -0,0 +1,56 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
impl From<Vec<u8>> for crate::generic::Json {
fn from(json: Vec<u8>) -> Self {
crate::generic::Json { json }
}
}
impl From<String> for crate::generic::String {
fn from(val: String) -> Self {
crate::generic::String { val }
}
}
impl From<Vec<String>> for crate::generic::StringList {
fn from(vals: Vec<String>) -> Self {
crate::generic::StringList { vals }
}
}
impl From<bool> for crate::generic::Bool {
fn from(val: bool) -> Self {
crate::generic::Bool { val }
}
}
impl From<i32> for crate::generic::Int32 {
fn from(val: i32) -> Self {
crate::generic::Int32 { val }
}
}
impl From<i64> for crate::generic::Int64 {
fn from(val: i64) -> Self {
crate::generic::Int64 { val }
}
}
impl From<u32> for crate::generic::UInt32 {
fn from(val: u32) -> Self {
crate::generic::UInt32 { val }
}
}
impl From<usize> for crate::generic::UInt32 {
fn from(val: usize) -> Self {
crate::generic::UInt32 { val: val as u32 }
}
}
impl From<()> for crate::generic::Empty {
fn from(_val: ()) -> Self {
crate::generic::Empty {}
}
}

View file

@ -1,2 +1,51 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
mod generic_helpers;
macro_rules! protobuf {
($ident:ident, $name:literal) => {
pub mod $ident {
include!(concat!(env!("OUT_DIR"), "/anki.", $name, ".rs"));
}
};
}
use snafu::Snafu;
#[derive(Debug, Snafu)]
pub enum ProtoError {
InvalidMethodIndex,
#[snafu(context(false))]
DecodeError {
source: prost::DecodeError,
},
#[snafu(context(false))]
EncodeError {
source: prost::EncodeError,
},
}
include!(concat!(env!("OUT_DIR"), "/service_index.rs"));
protobuf!(ankidroid, "ankidroid");
protobuf!(backend, "backend");
protobuf!(card_rendering, "card_rendering");
protobuf!(cards, "cards");
protobuf!(collection, "collection");
protobuf!(config, "config");
protobuf!(deckconfig, "deckconfig");
protobuf!(decks, "decks");
protobuf!(generic, "generic");
protobuf!(i18n, "i18n");
protobuf!(image_occlusion, "image_occlusion");
protobuf!(import_export, "import_export");
protobuf!(links, "links");
protobuf!(media, "media");
protobuf!(notes, "notes");
protobuf!(notetypes, "notetypes");
protobuf!(scheduler, "scheduler");
protobuf!(search, "search");
protobuf!(stats, "stats");
protobuf!(sync, "sync");
protobuf!(tags, "tags");

View file

@ -1,8 +1,9 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::notes::DeckAndNotetype as DeckAndNotetypeProto;
use crate::adding::DeckAndNotetype;
use crate::pb::notes::DeckAndNotetype as DeckAndNotetypeProto;
impl From<DeckAndNotetype> for DeckAndNotetypeProto {
fn from(s: DeckAndNotetype) -> Self {

View file

@ -7,21 +7,21 @@ use std::sync::atomic::AtomicI32;
use std::sync::atomic::Ordering;
use std::sync::Mutex;
use anki_proto::ankidroid::sql_value::Data;
use anki_proto::ankidroid::DbResponse;
use anki_proto::ankidroid::DbResult;
use anki_proto::ankidroid::Row;
use anki_proto::ankidroid::SqlValue;
use itertools::FoldWhile;
use itertools::FoldWhile::Continue;
use itertools::FoldWhile::Done;
use itertools::Itertools;
use lazy_static::lazy_static;
use rusqlite::ToSql;
use serde_derive::Deserialize;
use serde::Deserialize;
use crate::collection::Collection;
use crate::error::Result;
use crate::pb::ankidroid::sql_value::Data;
use crate::pb::ankidroid::DbResponse;
use crate::pb::ankidroid::DbResult;
use crate::pb::ankidroid::Row;
use crate::pb::ankidroid::SqlValue;
/// A pointer to the SqliteStorage object stored in a collection, used to
/// uniquely index results from multiple open collections at once.
@ -279,12 +279,13 @@ pub(crate) fn execute_for_row_count(col: &Collection, req: &[u8]) -> Result<i64>
#[cfg(test)]
mod tests {
use anki_proto::ankidroid::sql_value;
use anki_proto::ankidroid::Row;
use anki_proto::ankidroid::SqlValue;
use super::*;
use crate::backend::ankidroid::db::select_slice_of_size;
use crate::backend::ankidroid::db::Sizable;
use crate::pb::ankidroid::sql_value;
use crate::pb::ankidroid::Row;
use crate::pb::ankidroid::SqlValue;
fn gen_data() -> Vec<SqlValue> {
vec![

View file

@ -4,6 +4,12 @@
pub(crate) mod db;
pub(crate) mod error;
pub(super) use anki_proto::ankidroid::ankidroid_service::Service as AnkidroidService;
use anki_proto::ankidroid::DbResponse;
use anki_proto::ankidroid::GetActiveSequenceNumbersResponse;
use anki_proto::ankidroid::GetNextResultPageRequest;
use anki_proto::generic;
use self::db::active_sequences;
use self::error::debug_produce_error;
use super::dbproxy::db_command_bytes;
@ -11,24 +17,17 @@ use super::dbproxy::db_command_proto;
use super::Backend;
use crate::backend::ankidroid::db::execute_for_row_count;
use crate::backend::ankidroid::db::insert_for_id;
use crate::pb;
pub(super) use crate::pb::ankidroid::ankidroid_service::Service as AnkidroidService;
use crate::pb::ankidroid::DbResponse;
use crate::pb::ankidroid::GetActiveSequenceNumbersResponse;
use crate::pb::ankidroid::GetNextResultPageRequest;
use crate::pb::generic;
use crate::pb::generic::Empty;
use crate::pb::generic::Int32;
use crate::pb::generic::Json;
use crate::prelude::*;
use crate::scheduler::timing;
use crate::scheduler::timing::fixed_offset_from_minutes;
impl AnkidroidService for Backend {
type Error = AnkiError;
fn sched_timing_today_legacy(
&self,
input: pb::ankidroid::SchedTimingTodayLegacyRequest,
) -> Result<pb::scheduler::SchedTimingTodayResponse> {
input: anki_proto::ankidroid::SchedTimingTodayLegacyRequest,
) -> Result<anki_proto::scheduler::SchedTimingTodayResponse> {
let result = timing::sched_timing_today(
TimestampSecs::from(input.created_secs),
TimestampSecs::from(input.now_secs),
@ -36,40 +35,42 @@ impl AnkidroidService for Backend {
fixed_offset_from_minutes(input.now_mins_west),
Some(input.rollover_hour as u8),
)?;
Ok(pb::scheduler::SchedTimingTodayResponse::from(result))
Ok(anki_proto::scheduler::SchedTimingTodayResponse::from(
result,
))
}
fn local_minutes_west_legacy(&self, input: pb::generic::Int64) -> Result<pb::generic::Int32> {
Ok(pb::generic::Int32 {
fn local_minutes_west_legacy(&self, input: generic::Int64) -> Result<generic::Int32> {
Ok(generic::Int32 {
val: timing::local_minutes_west_for_stamp(input.val.into())?,
})
}
fn run_db_command(&self, input: Json) -> Result<Json> {
fn run_db_command(&self, input: generic::Json) -> Result<generic::Json> {
self.with_col(|col| db_command_bytes(col, &input.json))
.map(|json| Json { json })
.map(|json| generic::Json { json })
}
fn run_db_command_proto(&self, input: Json) -> Result<DbResponse> {
fn run_db_command_proto(&self, input: generic::Json) -> Result<DbResponse> {
self.with_col(|col| db_command_proto(col, &input.json))
}
fn run_db_command_for_row_count(&self, input: Json) -> Result<pb::generic::Int64> {
fn run_db_command_for_row_count(&self, input: generic::Json) -> Result<generic::Int64> {
self.with_col(|col| execute_for_row_count(col, &input.json))
.map(|val| pb::generic::Int64 { val })
.map(|val| generic::Int64 { val })
}
fn flush_all_queries(&self, _input: Empty) -> Result<Empty> {
fn flush_all_queries(&self, _input: generic::Empty) -> Result<generic::Empty> {
self.with_col(|col| {
db::flush_collection(col);
Ok(Empty {})
Ok(generic::Empty {})
})
}
fn flush_query(&self, input: Int32) -> Result<Empty> {
fn flush_query(&self, input: generic::Int32) -> Result<generic::Empty> {
self.with_col(|col| {
db::flush_single_result(col, input.val);
Ok(Empty {})
Ok(generic::Empty {})
})
}
@ -79,11 +80,11 @@ impl AnkidroidService for Backend {
})
}
fn insert_for_id(&self, input: Json) -> Result<pb::generic::Int64> {
fn insert_for_id(&self, input: generic::Json) -> Result<generic::Int64> {
self.with_col(|col| insert_for_id(col, &input.json).map(Into::into))
}
fn set_page_size(&self, input: pb::generic::Int64) -> Result<Empty> {
fn set_page_size(&self, input: generic::Int64) -> Result<generic::Empty> {
// we don't require an open collection, but should avoid modifying this
// concurrently
let _guard = self.col.lock();
@ -91,10 +92,7 @@ impl AnkidroidService for Backend {
Ok(().into())
}
fn get_column_names_from_query(
&self,
input: generic::String,
) -> Result<pb::generic::StringList> {
fn get_column_names_from_query(&self, input: generic::String) -> Result<generic::StringList> {
self.with_col(|col| {
let stmt = col.storage.db.prepare(&input.val)?;
let names = stmt.column_names();
@ -105,7 +103,7 @@ impl AnkidroidService for Backend {
fn get_active_sequence_numbers(
&self,
_input: Empty,
_input: generic::Empty,
) -> Result<GetActiveSequenceNumbersResponse> {
self.with_col(|col| {
Ok(GetActiveSequenceNumbersResponse {
@ -114,7 +112,20 @@ impl AnkidroidService for Backend {
})
}
fn debug_produce_error(&self, input: generic::String) -> Result<Empty> {
fn debug_produce_error(&self, input: generic::String) -> Result<generic::Empty> {
Err(debug_produce_error(&input.val))
}
}
impl From<crate::scheduler::timing::SchedTimingToday>
for anki_proto::scheduler::SchedTimingTodayResponse
{
fn from(
t: crate::scheduler::timing::SchedTimingToday,
) -> anki_proto::scheduler::SchedTimingTodayResponse {
anki_proto::scheduler::SchedTimingTodayResponse {
days_elapsed: t.days_elapsed,
next_day_at: t.next_day_at.0,
}
}
}

View file

@ -1,15 +1,18 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub(super) use anki_proto::cards::cards_service::Service as CardsService;
use anki_proto::generic;
use super::Backend;
use crate::card::CardQueue;
use crate::card::CardType;
use crate::pb;
pub(super) use crate::pb::cards::cards_service::Service as CardsService;
use crate::prelude::*;
impl CardsService for Backend {
fn get_card(&self, input: pb::cards::CardId) -> Result<pb::cards::Card> {
type Error = AnkiError;
fn get_card(&self, input: anki_proto::cards::CardId) -> Result<anki_proto::cards::Card> {
let cid = input.into();
self.with_col(|col| {
col.storage
@ -21,8 +24,8 @@ impl CardsService for Backend {
fn update_cards(
&self,
input: pb::cards::UpdateCardsRequest,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::cards::UpdateCardsRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
let cards = input
.cards
@ -37,7 +40,7 @@ impl CardsService for Backend {
.map(Into::into)
}
fn remove_cards(&self, input: pb::cards::RemoveCardsRequest) -> Result<pb::generic::Empty> {
fn remove_cards(&self, input: anki_proto::cards::RemoveCardsRequest) -> Result<generic::Empty> {
self.with_col(|col| {
col.transact_no_undo(|col| {
col.remove_cards_and_orphaned_notes(
@ -54,8 +57,8 @@ impl CardsService for Backend {
fn set_deck(
&self,
input: pb::cards::SetDeckRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: anki_proto::cards::SetDeckRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
let cids: Vec<_> = input.card_ids.into_iter().map(CardId).collect();
let deck_id = input.deck_id.into();
self.with_col(|col| col.set_deck(&cids, deck_id).map(Into::into))
@ -63,8 +66,8 @@ impl CardsService for Backend {
fn set_flag(
&self,
input: pb::cards::SetFlagRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: anki_proto::cards::SetFlagRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
col.set_card_flag(&to_card_ids(input.card_ids), input.flag)
.map(Into::into)
@ -72,10 +75,10 @@ impl CardsService for Backend {
}
}
impl TryFrom<pb::cards::Card> for Card {
impl TryFrom<anki_proto::cards::Card> for Card {
type Error = AnkiError;
fn try_from(c: pb::cards::Card) -> Result<Self, Self::Error> {
fn try_from(c: anki_proto::cards::Card) -> Result<Self, Self::Error> {
let ctype = CardType::try_from(c.ctype as u8).or_invalid("invalid card type")?;
let queue = CardQueue::try_from(c.queue as i8).or_invalid("invalid card queue")?;
Ok(Card {
@ -102,9 +105,9 @@ impl TryFrom<pb::cards::Card> for Card {
}
}
impl From<Card> for pb::cards::Card {
impl From<Card> for anki_proto::cards::Card {
fn from(c: Card) -> Self {
pb::cards::Card {
anki_proto::cards::Card {
id: c.id.0,
note_id: c.note_id.0,
deck_id: c.deck_id.0,
@ -131,3 +134,9 @@ impl From<Card> for pb::cards::Card {
fn to_card_ids(v: Vec<i64>) -> Vec<CardId> {
v.into_iter().map(CardId).collect()
}
impl From<anki_proto::cards::CardId> for CardId {
fn from(cid: anki_proto::cards::CardId) -> Self {
CardId(cid.cid)
}
}

View file

@ -1,6 +1,10 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub(super) use anki_proto::card_rendering::cardrendering_service::Service as CardRenderingService;
use anki_proto::card_rendering::ExtractClozeForTypingRequest;
use anki_proto::generic;
use super::Backend;
use crate::card_rendering::extract_av_tags;
use crate::card_rendering::strip_av_tags;
@ -12,9 +16,6 @@ use crate::latex::ExtractedLatex;
use crate::markdown::render_markdown;
use crate::notetype::CardTemplateSchema11;
use crate::notetype::RenderCardOutput;
use crate::pb;
pub(super) use crate::pb::card_rendering::cardrendering_service::Service as CardRenderingService;
use crate::pb::card_rendering::ExtractClozeForTypingRequest;
use crate::prelude::*;
use crate::template::RenderedNode;
use crate::text::decode_iri_paths;
@ -25,12 +26,14 @@ use crate::text::strip_html_preserving_media_filenames;
use crate::typeanswer::compare_answer;
impl CardRenderingService for Backend {
type Error = AnkiError;
fn extract_av_tags(
&self,
input: pb::card_rendering::ExtractAvTagsRequest,
) -> Result<pb::card_rendering::ExtractAvTagsResponse> {
input: anki_proto::card_rendering::ExtractAvTagsRequest,
) -> Result<anki_proto::card_rendering::ExtractAvTagsResponse> {
let out = extract_av_tags(input.text, input.question_side, self.i18n());
Ok(pb::card_rendering::ExtractAvTagsResponse {
Ok(anki_proto::card_rendering::ExtractAvTagsResponse {
text: out.0,
av_tags: out.1,
})
@ -38,8 +41,8 @@ impl CardRenderingService for Backend {
fn extract_latex(
&self,
input: pb::card_rendering::ExtractLatexRequest,
) -> Result<pb::card_rendering::ExtractLatexResponse> {
input: anki_proto::card_rendering::ExtractLatexRequest,
) -> Result<anki_proto::card_rendering::ExtractLatexResponse> {
let func = if input.expand_clozes {
extract_latex_expanding_clozes
} else {
@ -47,22 +50,24 @@ impl CardRenderingService for Backend {
};
let (text, extracted) = func(&input.text, input.svg);
Ok(pb::card_rendering::ExtractLatexResponse {
Ok(anki_proto::card_rendering::ExtractLatexResponse {
text,
latex: extracted
.into_iter()
.map(|e: ExtractedLatex| pb::card_rendering::ExtractedLatex {
.map(
|e: ExtractedLatex| anki_proto::card_rendering::ExtractedLatex {
filename: e.fname,
latex_body: e.latex,
})
},
)
.collect(),
})
}
fn get_empty_cards(
&self,
_input: pb::generic::Empty,
) -> Result<pb::card_rendering::EmptyCardsReport> {
_input: generic::Empty,
) -> Result<anki_proto::card_rendering::EmptyCardsReport> {
self.with_col(|col| {
let mut empty = col.empty_cards()?;
let report = col.empty_cards_report(&mut empty)?;
@ -70,14 +75,14 @@ impl CardRenderingService for Backend {
let mut outnotes = vec![];
for (_ntid, notes) in empty {
outnotes.extend(notes.into_iter().map(|e| {
pb::card_rendering::empty_cards_report::NoteWithEmptyCards {
anki_proto::card_rendering::empty_cards_report::NoteWithEmptyCards {
note_id: e.nid.0,
will_delete_note: e.empty.len() == e.current_count,
card_ids: e.empty.into_iter().map(|(_ord, id)| id.0).collect(),
}
}))
}
Ok(pb::card_rendering::EmptyCardsReport {
Ok(anki_proto::card_rendering::EmptyCardsReport {
report,
notes: outnotes,
})
@ -86,8 +91,8 @@ impl CardRenderingService for Backend {
fn render_existing_card(
&self,
input: pb::card_rendering::RenderExistingCardRequest,
) -> Result<pb::card_rendering::RenderCardResponse> {
input: anki_proto::card_rendering::RenderExistingCardRequest,
) -> Result<anki_proto::card_rendering::RenderCardResponse> {
self.with_col(|col| {
col.render_existing_card(CardId(input.card_id), input.browser)
.map(Into::into)
@ -96,8 +101,8 @@ impl CardRenderingService for Backend {
fn render_uncommitted_card(
&self,
input: pb::card_rendering::RenderUncommittedCardRequest,
) -> Result<pb::card_rendering::RenderCardResponse> {
input: anki_proto::card_rendering::RenderUncommittedCardRequest,
) -> Result<anki_proto::card_rendering::RenderCardResponse> {
let template = input.template.or_invalid("missing template")?.into();
let mut note = input.note.or_invalid("missing note")?.into();
let ord = input.card_ord as u16;
@ -110,8 +115,8 @@ impl CardRenderingService for Backend {
fn render_uncommitted_card_legacy(
&self,
input: pb::card_rendering::RenderUncommittedCardLegacyRequest,
) -> Result<pb::card_rendering::RenderCardResponse> {
input: anki_proto::card_rendering::RenderUncommittedCardLegacyRequest,
) -> Result<anki_proto::card_rendering::RenderCardResponse> {
let schema11: CardTemplateSchema11 = serde_json::from_slice(&input.template)?;
let template = schema11.into();
let mut note = input.note.or_invalid("missing note")?.into();
@ -123,14 +128,14 @@ impl CardRenderingService for Backend {
})
}
fn strip_av_tags(&self, input: pb::generic::String) -> Result<pb::generic::String> {
fn strip_av_tags(&self, input: generic::String) -> Result<generic::String> {
Ok(strip_av_tags(input.val).into())
}
fn render_markdown(
&self,
input: pb::card_rendering::RenderMarkdownRequest,
) -> Result<pb::generic::String> {
input: anki_proto::card_rendering::RenderMarkdownRequest,
) -> Result<generic::String> {
let mut text = render_markdown(&input.markdown);
if input.sanitize {
// currently no images
@ -139,21 +144,21 @@ impl CardRenderingService for Backend {
Ok(text.into())
}
fn encode_iri_paths(&self, input: pb::generic::String) -> Result<pb::generic::String> {
fn encode_iri_paths(&self, input: generic::String) -> Result<generic::String> {
Ok(encode_iri_paths(&input.val).to_string().into())
}
fn decode_iri_paths(&self, input: pb::generic::String) -> Result<pb::generic::String> {
fn decode_iri_paths(&self, input: generic::String) -> Result<generic::String> {
Ok(decode_iri_paths(&input.val).to_string().into())
}
fn strip_html(
&self,
input: pb::card_rendering::StripHtmlRequest,
) -> Result<pb::generic::String> {
input: anki_proto::card_rendering::StripHtmlRequest,
) -> Result<generic::String> {
Ok(match input.mode() {
pb::card_rendering::strip_html_request::Mode::Normal => strip_html(&input.text),
pb::card_rendering::strip_html_request::Mode::PreserveMediaFilenames => {
anki_proto::card_rendering::strip_html_request::Mode::Normal => strip_html(&input.text),
anki_proto::card_rendering::strip_html_request::Mode::PreserveMediaFilenames => {
strip_html_preserving_media_filenames(&input.text)
}
}
@ -163,15 +168,15 @@ impl CardRenderingService for Backend {
fn compare_answer(
&self,
input: pb::card_rendering::CompareAnswerRequest,
) -> Result<pb::generic::String> {
input: anki_proto::card_rendering::CompareAnswerRequest,
) -> Result<generic::String> {
Ok(compare_answer(&input.expected, &input.provided).into())
}
fn extract_cloze_for_typing(
&self,
input: ExtractClozeForTypingRequest,
) -> Result<pb::generic::String> {
) -> Result<generic::String> {
Ok(extract_cloze_for_typing(&input.text, input.ordinal as u16)
.to_string()
.into())
@ -179,16 +184,16 @@ impl CardRenderingService for Backend {
fn all_tts_voices(
&self,
input: pb::card_rendering::AllTtsVoicesRequest,
) -> Result<pb::card_rendering::AllTtsVoicesResponse> {
input: anki_proto::card_rendering::AllTtsVoicesRequest,
) -> Result<anki_proto::card_rendering::AllTtsVoicesResponse> {
tts::all_voices(input.validate)
.map(|voices| pb::card_rendering::AllTtsVoicesResponse { voices })
.map(|voices| anki_proto::card_rendering::AllTtsVoicesResponse { voices })
}
fn write_tts_stream(
&self,
request: pb::card_rendering::WriteTtsStreamRequest,
) -> Result<pb::generic::Empty> {
request: anki_proto::card_rendering::WriteTtsStreamRequest,
) -> Result<generic::Empty> {
tts::write_stream(
&request.path,
&request.voice_id,
@ -201,26 +206,28 @@ impl CardRenderingService for Backend {
fn rendered_nodes_to_proto(
nodes: Vec<RenderedNode>,
) -> Vec<pb::card_rendering::RenderedTemplateNode> {
) -> Vec<anki_proto::card_rendering::RenderedTemplateNode> {
nodes
.into_iter()
.map(|n| pb::card_rendering::RenderedTemplateNode {
.map(|n| anki_proto::card_rendering::RenderedTemplateNode {
value: Some(rendered_node_to_proto(n)),
})
.collect()
}
fn rendered_node_to_proto(node: RenderedNode) -> pb::card_rendering::rendered_template_node::Value {
fn rendered_node_to_proto(
node: RenderedNode,
) -> anki_proto::card_rendering::rendered_template_node::Value {
match node {
RenderedNode::Text { text } => {
pb::card_rendering::rendered_template_node::Value::Text(text)
anki_proto::card_rendering::rendered_template_node::Value::Text(text)
}
RenderedNode::Replacement {
field_name,
current_text,
filters,
} => pb::card_rendering::rendered_template_node::Value::Replacement(
pb::card_rendering::RenderedTemplateReplacement {
} => anki_proto::card_rendering::rendered_template_node::Value::Replacement(
anki_proto::card_rendering::RenderedTemplateReplacement {
field_name,
current_text,
filters,
@ -229,9 +236,9 @@ fn rendered_node_to_proto(node: RenderedNode) -> pb::card_rendering::rendered_te
}
}
impl From<RenderCardOutput> for pb::card_rendering::RenderCardResponse {
impl From<RenderCardOutput> for anki_proto::card_rendering::RenderCardResponse {
fn from(o: RenderCardOutput) -> Self {
pb::card_rendering::RenderCardResponse {
anki_proto::card_rendering::RenderCardResponse {
question_nodes: rendered_nodes_to_proto(o.qnodes),
answer_nodes: rendered_nodes_to_proto(o.anodes),
css: o.css,

View file

@ -3,32 +3,34 @@
use std::sync::MutexGuard;
pub(super) use anki_proto::collection::collection_service::Service as CollectionService;
use anki_proto::generic;
use tracing::error;
use super::progress::Progress;
use super::Backend;
use crate::backend::progress::progress_to_proto;
use crate::collection::CollectionBuilder;
use crate::pb;
pub(super) use crate::pb::collection::collection_service::Service as CollectionService;
use crate::prelude::*;
use crate::storage::SchemaVersion;
impl CollectionService for Backend {
fn latest_progress(&self, _input: pb::generic::Empty) -> Result<pb::collection::Progress> {
type Error = AnkiError;
fn latest_progress(&self, _input: generic::Empty) -> Result<anki_proto::collection::Progress> {
let progress = self.progress_state.lock().unwrap().last_progress;
Ok(progress_to_proto(progress, &self.tr))
}
fn set_wants_abort(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
fn set_wants_abort(&self, _input: generic::Empty) -> Result<generic::Empty> {
self.progress_state.lock().unwrap().want_abort = true;
Ok(().into())
}
fn open_collection(
&self,
input: pb::collection::OpenCollectionRequest,
) -> Result<pb::generic::Empty> {
input: anki_proto::collection::OpenCollectionRequest,
) -> Result<generic::Empty> {
let mut guard = self.lock_closed_collection()?;
let mut builder = CollectionBuilder::new(input.collection_path);
@ -45,8 +47,8 @@ impl CollectionService for Backend {
fn close_collection(
&self,
input: pb::collection::CloseCollectionRequest,
) -> Result<pb::generic::Empty> {
input: anki_proto::collection::CloseCollectionRequest,
) -> Result<generic::Empty> {
let desired_version = if input.downgrade_to_schema11 {
Some(SchemaVersion::V11)
} else {
@ -66,37 +68,44 @@ impl CollectionService for Backend {
fn check_database(
&self,
_input: pb::generic::Empty,
) -> Result<pb::collection::CheckDatabaseResponse> {
_input: generic::Empty,
) -> Result<anki_proto::collection::CheckDatabaseResponse> {
let mut handler = self.new_progress_handler();
let progress_fn = move |progress, throttle| {
handler.update(Progress::DatabaseCheck(progress), throttle);
};
self.with_col(|col| {
col.check_database(progress_fn)
.map(|problems| pb::collection::CheckDatabaseResponse {
col.check_database(progress_fn).map(|problems| {
anki_proto::collection::CheckDatabaseResponse {
problems: problems.to_i18n_strings(&col.tr),
}
})
})
}
fn get_undo_status(&self, _input: pb::generic::Empty) -> Result<pb::collection::UndoStatus> {
fn get_undo_status(
&self,
_input: generic::Empty,
) -> Result<anki_proto::collection::UndoStatus> {
self.with_col(|col| Ok(col.undo_status().into_protobuf(&col.tr)))
}
fn undo(&self, _input: pb::generic::Empty) -> Result<pb::collection::OpChangesAfterUndo> {
fn undo(&self, _input: generic::Empty) -> Result<anki_proto::collection::OpChangesAfterUndo> {
self.with_col(|col| col.undo().map(|out| out.into_protobuf(&col.tr)))
}
fn redo(&self, _input: pb::generic::Empty) -> Result<pb::collection::OpChangesAfterUndo> {
fn redo(&self, _input: generic::Empty) -> Result<anki_proto::collection::OpChangesAfterUndo> {
self.with_col(|col| col.redo().map(|out| out.into_protobuf(&col.tr)))
}
fn add_custom_undo_entry(&self, input: pb::generic::String) -> Result<pb::generic::UInt32> {
fn add_custom_undo_entry(&self, input: generic::String) -> Result<generic::UInt32> {
self.with_col(|col| Ok(col.add_custom_undo_step(input.val).into()))
}
fn merge_undo_entries(&self, input: pb::generic::UInt32) -> Result<pb::collection::OpChanges> {
fn merge_undo_entries(
&self,
input: generic::UInt32,
) -> Result<anki_proto::collection::OpChanges> {
let starting_from = input.val as usize;
self.with_col(|col| col.merge_undoable_ops(starting_from))
.map(Into::into)
@ -104,8 +113,8 @@ impl CollectionService for Backend {
fn create_backup(
&self,
input: pb::collection::CreateBackupRequest,
) -> Result<pb::generic::Bool> {
input: anki_proto::collection::CreateBackupRequest,
) -> Result<generic::Bool> {
// lock collection
let mut col_lock = self.lock_open_collection()?;
let col = col_lock.as_mut().unwrap();
@ -129,7 +138,7 @@ impl CollectionService for Backend {
Ok(created.into())
}
fn await_backup_completion(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
fn await_backup_completion(&self, _input: generic::Empty) -> Result<generic::Empty> {
self.await_backup_completion()?;
Ok(().into())
}

View file

@ -1,15 +1,15 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::config::config_key::Bool as BoolKeyProto;
use anki_proto::config::config_key::String as StringKeyProto;
pub(super) use anki_proto::config::config_service::Service as ConfigService;
use anki_proto::generic;
use serde_json::Value;
use super::Backend;
use crate::config::BoolKey;
use crate::config::StringKey;
use crate::pb;
use crate::pb::config::config_key::Bool as BoolKeyProto;
use crate::pb::config::config_key::String as StringKeyProto;
pub(super) use crate::pb::config::config_service::Service as ConfigService;
use crate::prelude::*;
impl From<BoolKeyProto> for BoolKey {
@ -54,7 +54,9 @@ impl From<StringKeyProto> for StringKey {
}
impl ConfigService for Backend {
fn get_config_json(&self, input: pb::generic::String) -> Result<pb::generic::Json> {
type Error = AnkiError;
fn get_config_json(&self, input: generic::String) -> Result<generic::Json> {
self.with_col(|col| {
let val: Option<Value> = col.get_config_optional(input.val.as_str());
val.or_not_found(input.val)
@ -65,8 +67,8 @@ impl ConfigService for Backend {
fn set_config_json(
&self,
input: pb::config::SetConfigJsonRequest,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::config::SetConfigJsonRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
let val: Value = serde_json::from_slice(&input.value_json)?;
col.set_config_json(input.key.as_str(), &val, input.undoable)
@ -76,8 +78,8 @@ impl ConfigService for Backend {
fn set_config_json_no_undo(
&self,
input: pb::config::SetConfigJsonRequest,
) -> Result<pb::generic::Empty> {
input: anki_proto::config::SetConfigJsonRequest,
) -> Result<generic::Empty> {
self.with_col(|col| {
let val: Value = serde_json::from_slice(&input.value_json)?;
col.transact_no_undo(|col| col.set_config(input.key.as_str(), &val).map(|_| ()))
@ -85,12 +87,12 @@ impl ConfigService for Backend {
.map(Into::into)
}
fn remove_config(&self, input: pb::generic::String) -> Result<pb::collection::OpChanges> {
fn remove_config(&self, input: generic::String) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.remove_config(input.val.as_str()))
.map(Into::into)
}
fn get_all_config(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
fn get_all_config(&self, _input: generic::Empty) -> Result<generic::Json> {
self.with_col(|col| {
let conf = col.storage.get_all_config()?;
serde_json::to_vec(&conf).map_err(Into::into)
@ -100,10 +102,10 @@ impl ConfigService for Backend {
fn get_config_bool(
&self,
input: pb::config::GetConfigBoolRequest,
) -> Result<pb::generic::Bool> {
input: anki_proto::config::GetConfigBoolRequest,
) -> Result<generic::Bool> {
self.with_col(|col| {
Ok(pb::generic::Bool {
Ok(generic::Bool {
val: col.get_config_bool(input.key().into()),
})
})
@ -111,18 +113,18 @@ impl ConfigService for Backend {
fn set_config_bool(
&self,
input: pb::config::SetConfigBoolRequest,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::config::SetConfigBoolRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.set_config_bool(input.key().into(), input.value, input.undoable))
.map(Into::into)
}
fn get_config_string(
&self,
input: pb::config::GetConfigStringRequest,
) -> Result<pb::generic::String> {
input: anki_proto::config::GetConfigStringRequest,
) -> Result<generic::String> {
self.with_col(|col| {
Ok(pb::generic::String {
Ok(generic::String {
val: col.get_config_string(input.key().into()),
})
})
@ -130,17 +132,20 @@ impl ConfigService for Backend {
fn set_config_string(
&self,
input: pb::config::SetConfigStringRequest,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::config::SetConfigStringRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.set_config_string(input.key().into(), &input.value, input.undoable))
.map(Into::into)
}
fn get_preferences(&self, _input: pb::generic::Empty) -> Result<pb::config::Preferences> {
fn get_preferences(&self, _input: generic::Empty) -> Result<anki_proto::config::Preferences> {
self.with_col(|col| col.get_preferences())
}
fn set_preferences(&self, input: pb::config::Preferences) -> Result<pb::collection::OpChanges> {
fn set_preferences(
&self,
input: anki_proto::config::Preferences,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.set_preferences(input))
.map(Into::into)
}

View file

@ -1,6 +1,10 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::ankidroid::sql_value::Data;
use anki_proto::ankidroid::DbResponse;
use anki_proto::ankidroid::DbResult as ProtoDbResult;
use anki_proto::ankidroid::SqlValue as pb_SqlValue;
use rusqlite::params_from_iter;
use rusqlite::types::FromSql;
use rusqlite::types::FromSqlError;
@ -8,15 +12,9 @@ use rusqlite::types::ToSql;
use rusqlite::types::ToSqlOutput;
use rusqlite::types::ValueRef;
use rusqlite::OptionalExtension;
use serde_derive::Deserialize;
use serde_derive::Serialize;
use serde::Deserialize;
use serde::Serialize;
use crate::pb;
use crate::pb::ankidroid::sql_value::Data;
use crate::pb::ankidroid::DbResponse;
use crate::pb::ankidroid::DbResult as ProtoDbResult;
use crate::pb::ankidroid::Row;
use crate::pb::ankidroid::SqlValue as pb_SqlValue;
use crate::prelude::*;
use crate::storage::SqliteStorage;
@ -67,7 +65,7 @@ impl ToSql for SqlValue {
}
}
impl From<&SqlValue> for pb::ankidroid::SqlValue {
impl From<&SqlValue> for anki_proto::ankidroid::SqlValue {
fn from(item: &SqlValue) -> Self {
match item {
SqlValue::Null => pb_SqlValue { data: Option::None },
@ -87,19 +85,18 @@ impl From<&SqlValue> for pb::ankidroid::SqlValue {
}
}
impl From<&Vec<SqlValue>> for pb::ankidroid::Row {
fn from(item: &Vec<SqlValue>) -> Self {
Row {
fields: item.iter().map(pb::ankidroid::SqlValue::from).collect(),
}
fn row_to_proto(row: &[SqlValue]) -> anki_proto::ankidroid::Row {
anki_proto::ankidroid::Row {
fields: row
.iter()
.map(anki_proto::ankidroid::SqlValue::from)
.collect(),
}
}
impl From<&Vec<Vec<SqlValue>>> for pb::ankidroid::DbResult {
fn from(item: &Vec<Vec<SqlValue>>) -> Self {
ProtoDbResult {
rows: item.iter().map(Row::from).collect(),
}
fn rows_to_proto(rows: &[Vec<SqlValue>]) -> anki_proto::ankidroid::DbResult {
anki_proto::ankidroid::DbResult {
rows: rows.iter().map(|r| row_to_proto(r)).collect(),
}
}
@ -182,7 +179,7 @@ pub(crate) fn db_command_proto(col: &mut Collection, input: &[u8]) -> Result<DbR
let result = db_command_bytes_inner(col, input)?;
let proto_resp = match result {
DbResult::None => ProtoDbResult { rows: Vec::new() },
DbResult::Rows(rows) => ProtoDbResult::from(&rows),
DbResult::Rows(rows) => rows_to_proto(&rows),
};
let trimmed = super::ankidroid::db::trim_and_cache_remaining(
col,

View file

@ -1,31 +1,34 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub(super) use anki_proto::deckconfig::deckconfig_service::Service as DeckConfigService;
use anki_proto::generic;
use super::Backend;
use crate::deckconfig::DeckConfSchema11;
use crate::deckconfig::DeckConfig;
use crate::deckconfig::UpdateDeckConfigsRequest;
use crate::pb;
pub(super) use crate::pb::deckconfig::deckconfig_service::Service as DeckConfigService;
use crate::prelude::*;
impl DeckConfigService for Backend {
type Error = AnkiError;
fn add_or_update_deck_config_legacy(
&self,
input: pb::generic::Json,
) -> Result<pb::deckconfig::DeckConfigId> {
input: generic::Json,
) -> Result<anki_proto::deckconfig::DeckConfigId> {
let conf: DeckConfSchema11 = serde_json::from_slice(&input.json)?;
let mut conf: DeckConfig = conf.into();
self.with_col(|col| {
col.transact_no_undo(|col| {
col.add_or_update_deck_config_legacy(&mut conf)?;
Ok(pb::deckconfig::DeckConfigId { dcid: conf.id.0 })
Ok(anki_proto::deckconfig::DeckConfigId { dcid: conf.id.0 })
})
})
.map(Into::into)
}
fn all_deck_config_legacy(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
fn all_deck_config_legacy(&self, _input: generic::Empty) -> Result<generic::Json> {
self.with_col(|col| {
let conf: Vec<DeckConfSchema11> = col
.storage
@ -40,15 +43,15 @@ impl DeckConfigService for Backend {
fn get_deck_config(
&self,
input: pb::deckconfig::DeckConfigId,
) -> Result<pb::deckconfig::DeckConfig> {
input: anki_proto::deckconfig::DeckConfigId,
) -> Result<anki_proto::deckconfig::DeckConfig> {
self.with_col(|col| Ok(col.get_deck_config(input.into(), true)?.unwrap().into()))
}
fn get_deck_config_legacy(
&self,
input: pb::deckconfig::DeckConfigId,
) -> Result<pb::generic::Json> {
input: anki_proto::deckconfig::DeckConfigId,
) -> Result<generic::Json> {
self.with_col(|col| {
let conf = col.get_deck_config(input.into(), true)?.unwrap();
let conf: DeckConfSchema11 = conf.into();
@ -57,7 +60,7 @@ impl DeckConfigService for Backend {
.map(Into::into)
}
fn new_deck_config_legacy(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
fn new_deck_config_legacy(&self, _input: generic::Empty) -> Result<generic::Json> {
serde_json::to_vec(&DeckConfSchema11::default())
.map_err(Into::into)
.map(Into::into)
@ -65,31 +68,31 @@ impl DeckConfigService for Backend {
fn remove_deck_config(
&self,
input: pb::deckconfig::DeckConfigId,
) -> Result<pb::generic::Empty> {
input: anki_proto::deckconfig::DeckConfigId,
) -> Result<generic::Empty> {
self.with_col(|col| col.transact_no_undo(|col| col.remove_deck_config_inner(input.into())))
.map(Into::into)
}
fn get_deck_configs_for_update(
&self,
input: pb::decks::DeckId,
) -> Result<pb::deckconfig::DeckConfigsForUpdate> {
self.with_col(|col| col.get_deck_configs_for_update(input.into()))
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::deckconfig::DeckConfigsForUpdate> {
self.with_col(|col| col.get_deck_configs_for_update(input.did.into()))
}
fn update_deck_configs(
&self,
input: pb::deckconfig::UpdateDeckConfigsRequest,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::deckconfig::UpdateDeckConfigsRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.update_deck_configs(input.into()))
.map(Into::into)
}
}
impl From<DeckConfig> for pb::deckconfig::DeckConfig {
impl From<DeckConfig> for anki_proto::deckconfig::DeckConfig {
fn from(c: DeckConfig) -> Self {
pb::deckconfig::DeckConfig {
anki_proto::deckconfig::DeckConfig {
id: c.id.0,
name: c.name,
mtime_secs: c.mtime_secs.0,
@ -99,8 +102,8 @@ impl From<DeckConfig> for pb::deckconfig::DeckConfig {
}
}
impl From<pb::deckconfig::UpdateDeckConfigsRequest> for UpdateDeckConfigsRequest {
fn from(c: pb::deckconfig::UpdateDeckConfigsRequest) -> Self {
impl From<anki_proto::deckconfig::UpdateDeckConfigsRequest> for UpdateDeckConfigsRequest {
fn from(c: anki_proto::deckconfig::UpdateDeckConfigsRequest) -> Self {
UpdateDeckConfigsRequest {
target_deck_id: c.target_deck_id.into(),
configs: c.configs.into_iter().map(Into::into).collect(),
@ -113,8 +116,8 @@ impl From<pb::deckconfig::UpdateDeckConfigsRequest> for UpdateDeckConfigsRequest
}
}
impl From<pb::deckconfig::DeckConfig> for DeckConfig {
fn from(c: pb::deckconfig::DeckConfig) -> Self {
impl From<anki_proto::deckconfig::DeckConfig> for DeckConfig {
fn from(c: anki_proto::deckconfig::DeckConfig) -> Self {
DeckConfig {
id: c.id.into(),
name: c.name,
@ -124,3 +127,9 @@ impl From<pb::deckconfig::DeckConfig> for DeckConfig {
}
}
}
impl From<anki_proto::deckconfig::DeckConfigId> for DeckConfigId {
fn from(dcid: anki_proto::deckconfig::DeckConfigId) -> Self {
DeckConfigId(dcid.dcid)
}
}

View file

@ -3,25 +3,34 @@
use std::convert::TryFrom;
pub(super) use anki_proto::decks::decks_service::Service as DecksService;
use anki_proto::generic;
use super::Backend;
use crate::decks::filtered::search_order_labels;
use crate::decks::DeckSchema11;
use crate::decks::FilteredSearchOrder;
use crate::pb;
pub(super) use crate::pb::decks::decks_service::Service as DecksService;
use crate::prelude::*;
use crate::scheduler::filtered::FilteredDeckForUpdate;
impl DecksService for Backend {
fn new_deck(&self, _input: pb::generic::Empty) -> Result<pb::decks::Deck> {
type Error = AnkiError;
fn new_deck(&self, _input: generic::Empty) -> Result<anki_proto::decks::Deck> {
Ok(Deck::new_normal().into())
}
fn add_deck(&self, deck: pb::decks::Deck) -> Result<pb::collection::OpChangesWithId> {
fn add_deck(
&self,
deck: anki_proto::decks::Deck,
) -> Result<anki_proto::collection::OpChangesWithId> {
let mut deck: Deck = deck.try_into()?;
self.with_col(|col| Ok(col.add_deck(&mut deck)?.map(|_| deck.id.0).into()))
}
fn add_deck_legacy(&self, input: pb::generic::Json) -> Result<pb::collection::OpChangesWithId> {
fn add_deck_legacy(
&self,
input: generic::Json,
) -> Result<anki_proto::collection::OpChangesWithId> {
let schema11: DeckSchema11 = serde_json::from_slice(&input.json)?;
let mut deck: Deck = schema11.into();
self.with_col(|col| {
@ -32,8 +41,8 @@ impl DecksService for Backend {
fn add_or_update_deck_legacy(
&self,
input: pb::decks::AddOrUpdateDeckLegacyRequest,
) -> Result<pb::decks::DeckId> {
input: anki_proto::decks::AddOrUpdateDeckLegacyRequest,
) -> Result<anki_proto::decks::DeckId> {
self.with_col(|col| {
let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?;
let mut deck: Deck = schema11.into();
@ -45,11 +54,14 @@ impl DecksService for Backend {
} else {
col.add_or_update_deck(&mut deck)?;
}
Ok(pb::decks::DeckId { did: deck.id.0 })
Ok(anki_proto::decks::DeckId { did: deck.id.0 })
})
}
fn deck_tree(&self, input: pb::decks::DeckTreeRequest) -> Result<pb::decks::DeckTreeNode> {
fn deck_tree(
&self,
input: anki_proto::decks::DeckTreeRequest,
) -> Result<anki_proto::decks::DeckTreeNode> {
self.with_col(|col| {
let now = if input.now == 0 {
None
@ -60,7 +72,7 @@ impl DecksService for Backend {
})
}
fn deck_tree_legacy(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
fn deck_tree_legacy(&self, _input: generic::Empty) -> Result<generic::Json> {
self.with_col(|col| {
let tree = col.legacy_deck_tree()?;
serde_json::to_vec(&tree)
@ -69,7 +81,7 @@ impl DecksService for Backend {
})
}
fn get_all_decks_legacy(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
fn get_all_decks_legacy(&self, _input: generic::Empty) -> Result<generic::Json> {
self.with_col(|col| {
let decks = col.storage.get_all_decks_as_schema11()?;
serde_json::to_vec(&decks).map_err(Into::into)
@ -77,28 +89,34 @@ impl DecksService for Backend {
.map(Into::into)
}
fn get_deck_id_by_name(&self, input: pb::generic::String) -> Result<pb::decks::DeckId> {
fn get_deck_id_by_name(&self, input: generic::String) -> Result<anki_proto::decks::DeckId> {
self.with_col(|col| {
col.get_deck_id(&input.val).and_then(|d| {
d.or_not_found(input.val)
.map(|d| pb::decks::DeckId { did: d.0 })
.map(|d| anki_proto::decks::DeckId { did: d.0 })
})
})
}
fn get_deck(&self, input: pb::decks::DeckId) -> Result<pb::decks::Deck> {
fn get_deck(&self, input: anki_proto::decks::DeckId) -> Result<anki_proto::decks::Deck> {
let did = input.into();
self.with_col(|col| Ok(col.storage.get_deck(did)?.or_not_found(did)?.into()))
}
fn update_deck(&self, input: pb::decks::Deck) -> Result<pb::collection::OpChanges> {
fn update_deck(
&self,
input: anki_proto::decks::Deck,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
let mut deck = Deck::try_from(input)?;
col.update_deck(&mut deck).map(Into::into)
})
}
fn update_deck_legacy(&self, input: pb::generic::Json) -> Result<pb::collection::OpChanges> {
fn update_deck_legacy(
&self,
input: generic::Json,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
let deck: DeckSchema11 = serde_json::from_slice(&input.json)?;
let mut deck = deck.into();
@ -106,7 +124,7 @@ impl DecksService for Backend {
})
}
fn get_deck_legacy(&self, input: pb::decks::DeckId) -> Result<pb::generic::Json> {
fn get_deck_legacy(&self, input: anki_proto::decks::DeckId) -> Result<generic::Json> {
let did = input.into();
self.with_col(|col| {
let deck: DeckSchema11 = col.storage.get_deck(did)?.or_not_found(did)?.into();
@ -118,26 +136,29 @@ impl DecksService for Backend {
fn get_deck_names(
&self,
input: pb::decks::GetDeckNamesRequest,
) -> Result<pb::decks::DeckNames> {
input: anki_proto::decks::GetDeckNamesRequest,
) -> Result<anki_proto::decks::DeckNames> {
self.with_col(|col| {
let names = if input.include_filtered {
col.get_all_deck_names(input.skip_empty_default)?
} else {
col.get_all_normal_deck_names()?
};
Ok(names.into())
Ok(deck_names_to_proto(names))
})
}
fn get_deck_and_child_names(&self, input: pb::decks::DeckId) -> Result<pb::decks::DeckNames> {
fn get_deck_and_child_names(
&self,
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::decks::DeckNames> {
self.with_col(|col| {
col.get_deck_and_child_names(input.did.into())
.map(Into::into)
.map(deck_names_to_proto)
})
}
fn new_deck_legacy(&self, input: pb::generic::Bool) -> Result<pb::generic::Json> {
fn new_deck_legacy(&self, input: generic::Bool) -> Result<generic::Json> {
let deck = if input.val {
Deck::new_filtered()
} else {
@ -151,16 +172,20 @@ impl DecksService for Backend {
fn remove_decks(
&self,
input: pb::decks::DeckIds,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| col.remove_decks_and_child_decks(&Into::<Vec<DeckId>>::into(input)))
input: anki_proto::decks::DeckIds,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
col.remove_decks_and_child_decks(
&input.dids.into_iter().map(DeckId).collect::<Vec<_>>(),
)
})
.map(Into::into)
}
fn reparent_decks(
&self,
input: pb::decks::ReparentDecksRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: anki_proto::decks::ReparentDecksRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
let deck_ids: Vec<_> = input.deck_ids.into_iter().map(Into::into).collect();
let new_parent = if input.new_parent == 0 {
None
@ -173,78 +198,72 @@ impl DecksService for Backend {
fn rename_deck(
&self,
input: pb::decks::RenameDeckRequest,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::decks::RenameDeckRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.rename_deck(input.deck_id.into(), &input.new_name))
.map(Into::into)
}
fn get_or_create_filtered_deck(
&self,
input: pb::decks::DeckId,
) -> Result<pb::decks::FilteredDeckForUpdate> {
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::decks::FilteredDeckForUpdate> {
self.with_col(|col| col.get_or_create_filtered_deck(input.into()))
.map(Into::into)
}
fn add_or_update_filtered_deck(
&self,
input: pb::decks::FilteredDeckForUpdate,
) -> Result<pb::collection::OpChangesWithId> {
input: anki_proto::decks::FilteredDeckForUpdate,
) -> Result<anki_proto::collection::OpChangesWithId> {
self.with_col(|col| col.add_or_update_filtered_deck(input.into()))
.map(|out| out.map(i64::from))
.map(Into::into)
}
fn filtered_deck_order_labels(
&self,
_input: pb::generic::Empty,
) -> Result<pb::generic::StringList> {
Ok(FilteredSearchOrder::labels(&self.tr).into())
fn filtered_deck_order_labels(&self, _input: generic::Empty) -> Result<generic::StringList> {
Ok(search_order_labels(&self.tr).into())
}
fn set_deck_collapsed(
&self,
input: pb::decks::SetDeckCollapsedRequest,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::decks::SetDeckCollapsedRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
col.set_deck_collapsed(input.deck_id.into(), input.collapsed, input.scope())
})
.map(Into::into)
}
fn set_current_deck(&self, input: pb::decks::DeckId) -> Result<pb::collection::OpChanges> {
fn set_current_deck(
&self,
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.set_current_deck(input.did.into()))
.map(Into::into)
}
fn get_current_deck(&self, _input: pb::generic::Empty) -> Result<pb::decks::Deck> {
fn get_current_deck(&self, _input: generic::Empty) -> Result<anki_proto::decks::Deck> {
self.with_col(|col| col.get_current_deck())
.map(|deck| (*deck).clone().into())
}
}
impl From<pb::decks::DeckId> for DeckId {
fn from(did: pb::decks::DeckId) -> Self {
impl From<anki_proto::decks::DeckId> for DeckId {
fn from(did: anki_proto::decks::DeckId) -> Self {
DeckId(did.did)
}
}
impl From<pb::decks::DeckIds> for Vec<DeckId> {
fn from(dids: pb::decks::DeckIds) -> Self {
dids.dids.into_iter().map(DeckId).collect()
}
}
impl From<DeckId> for pb::decks::DeckId {
impl From<DeckId> for anki_proto::decks::DeckId {
fn from(did: DeckId) -> Self {
pb::decks::DeckId { did: did.0 }
anki_proto::decks::DeckId { did: did.0 }
}
}
impl From<FilteredDeckForUpdate> for pb::decks::FilteredDeckForUpdate {
impl From<FilteredDeckForUpdate> for anki_proto::decks::FilteredDeckForUpdate {
fn from(deck: FilteredDeckForUpdate) -> Self {
pb::decks::FilteredDeckForUpdate {
anki_proto::decks::FilteredDeckForUpdate {
id: deck.id.into(),
name: deck.human_name,
config: Some(deck.config),
@ -252,8 +271,8 @@ impl From<FilteredDeckForUpdate> for pb::decks::FilteredDeckForUpdate {
}
}
impl From<pb::decks::FilteredDeckForUpdate> for FilteredDeckForUpdate {
fn from(deck: pb::decks::FilteredDeckForUpdate) -> Self {
impl From<anki_proto::decks::FilteredDeckForUpdate> for FilteredDeckForUpdate {
fn from(deck: anki_proto::decks::FilteredDeckForUpdate) -> Self {
FilteredDeckForUpdate {
id: deck.id.into(),
human_name: deck.name,
@ -262,74 +281,54 @@ impl From<pb::decks::FilteredDeckForUpdate> for FilteredDeckForUpdate {
}
}
impl From<Deck> for pb::decks::Deck {
impl From<Deck> for anki_proto::decks::Deck {
fn from(d: Deck) -> Self {
pb::decks::Deck {
anki_proto::decks::Deck {
id: d.id.0,
name: d.name.human_name(),
mtime_secs: d.mtime_secs.0,
usn: d.usn.0,
common: Some(d.common),
kind: Some(d.kind.into()),
kind: Some(kind_from_inline(d.kind)),
}
}
}
impl TryFrom<pb::decks::Deck> for Deck {
impl TryFrom<anki_proto::decks::Deck> for Deck {
type Error = AnkiError;
fn try_from(d: pb::decks::Deck) -> Result<Self, Self::Error> {
fn try_from(d: anki_proto::decks::Deck) -> Result<Self, Self::Error> {
Ok(Deck {
id: DeckId(d.id),
name: NativeDeckName::from_human_name(&d.name),
mtime_secs: TimestampSecs(d.mtime_secs),
usn: Usn(d.usn),
common: d.common.unwrap_or_default(),
kind: d.kind.or_invalid("missing kind")?.into(),
kind: kind_to_inline(d.kind.or_invalid("missing kind")?),
})
}
}
impl From<DeckKind> for pb::decks::deck::Kind {
fn from(k: DeckKind) -> Self {
match k {
DeckKind::Normal(n) => pb::decks::deck::Kind::Normal(n),
DeckKind::Filtered(f) => pb::decks::deck::Kind::Filtered(f),
}
}
}
impl From<pb::decks::deck::Kind> for DeckKind {
fn from(kind: pb::decks::deck::Kind) -> Self {
fn kind_to_inline(kind: anki_proto::decks::deck::Kind) -> DeckKind {
match kind {
pb::decks::deck::Kind::Normal(normal) => DeckKind::Normal(normal),
pb::decks::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered),
}
anki_proto::decks::deck::Kind::Normal(normal) => DeckKind::Normal(normal),
anki_proto::decks::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered),
}
}
impl From<(DeckId, String)> for pb::decks::DeckNameId {
fn from(id_name: (DeckId, String)) -> Self {
pb::decks::DeckNameId {
id: id_name.0 .0,
name: id_name.1,
}
fn kind_from_inline(k: DeckKind) -> anki_proto::decks::deck::Kind {
match k {
DeckKind::Normal(n) => anki_proto::decks::deck::Kind::Normal(n),
DeckKind::Filtered(f) => anki_proto::decks::deck::Kind::Filtered(f),
}
}
impl From<Vec<(DeckId, String)>> for pb::decks::DeckNames {
fn from(id_names: Vec<(DeckId, String)>) -> Self {
pb::decks::DeckNames {
entries: id_names.into_iter().map(Into::into).collect(),
}
}
fn deck_name_to_proto((id, name): (DeckId, String)) -> anki_proto::decks::DeckNameId {
anki_proto::decks::DeckNameId { id: id.0, name }
}
// fn new_deck(&self, input: pb::generic::Bool) -> Result<pb::decks::Deck> {
// let deck = if input.val {
// Deck::new_filtered()
// } else {
// Deck::new_normal()
// };
// Ok(deck.into())
// }
fn deck_names_to_proto(names: Vec<(DeckId, String)>) -> anki_proto::decks::DeckNames {
anki_proto::decks::DeckNames {
entries: names.into_iter().map(deck_name_to_proto).collect(),
}
}

View file

@ -1,14 +1,14 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::backend::backend_error::Kind;
use crate::error::AnkiError;
use crate::error::SyncErrorKind;
use crate::pb;
use crate::pb::backend::backend_error::Kind;
use crate::prelude::*;
impl AnkiError {
pub fn into_protobuf(self, tr: &I18n) -> pb::backend::BackendError {
pub fn into_protobuf(self, tr: &I18n) -> anki_proto::backend::BackendError {
let message = self.message(tr);
let help_page = self.help_page().map(|page| page as i32);
let context = self.context();
@ -44,7 +44,7 @@ impl AnkiError {
AnkiError::WindowsError { .. } => Kind::OsError,
};
pb::backend::BackendError {
anki_proto::backend::BackendError {
kind: kind as i32,
message,
help_page,

View file

@ -1,101 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::pb;
use crate::prelude::*;
impl From<Vec<u8>> for pb::generic::Json {
fn from(json: Vec<u8>) -> Self {
pb::generic::Json { json }
}
}
impl From<String> for pb::generic::String {
fn from(val: String) -> Self {
pb::generic::String { val }
}
}
impl From<bool> for pb::generic::Bool {
fn from(val: bool) -> Self {
pb::generic::Bool { val }
}
}
impl From<i32> for pb::generic::Int32 {
fn from(val: i32) -> Self {
pb::generic::Int32 { val }
}
}
impl From<i64> for pb::generic::Int64 {
fn from(val: i64) -> Self {
pb::generic::Int64 { val }
}
}
impl From<u32> for pb::generic::UInt32 {
fn from(val: u32) -> Self {
pb::generic::UInt32 { val }
}
}
impl From<usize> for pb::generic::UInt32 {
fn from(val: usize) -> Self {
pb::generic::UInt32 { val: val as u32 }
}
}
impl From<()> for pb::generic::Empty {
fn from(_val: ()) -> Self {
pb::generic::Empty {}
}
}
impl From<pb::cards::CardId> for CardId {
fn from(cid: pb::cards::CardId) -> Self {
CardId(cid.cid)
}
}
impl From<pb::cards::CardIds> for Vec<CardId> {
fn from(c: pb::cards::CardIds) -> Self {
c.cids.into_iter().map(CardId).collect()
}
}
impl From<pb::notes::NoteId> for NoteId {
fn from(nid: pb::notes::NoteId) -> Self {
NoteId(nid.nid)
}
}
impl From<NoteId> for pb::notes::NoteId {
fn from(nid: NoteId) -> Self {
pb::notes::NoteId { nid: nid.0 }
}
}
impl From<pb::notetypes::NotetypeId> for NotetypeId {
fn from(ntid: pb::notetypes::NotetypeId) -> Self {
NotetypeId(ntid.ntid)
}
}
impl From<NotetypeId> for pb::notetypes::NotetypeId {
fn from(ntid: NotetypeId) -> Self {
pb::notetypes::NotetypeId { ntid: ntid.0 }
}
}
impl From<pb::deckconfig::DeckConfigId> for DeckConfigId {
fn from(dcid: pb::deckconfig::DeckConfigId) -> Self {
DeckConfigId(dcid.dcid)
}
}
impl From<Vec<String>> for pb::generic::StringList {
fn from(vals: Vec<String>) -> Self {
pb::generic::StringList { vals }
}
}

View file

@ -3,21 +3,23 @@
use std::collections::HashMap;
use anki_proto::generic;
pub(super) use anki_proto::i18n::i18n_service::Service as I18nService;
use fluent::FluentArgs;
use fluent::FluentValue;
use super::Backend;
use crate::pb;
pub(super) use crate::pb::i18n::i18n_service::Service as I18nService;
use crate::prelude::*;
use crate::scheduler::timespan::answer_button_time;
use crate::scheduler::timespan::time_span;
impl I18nService for Backend {
type Error = AnkiError;
fn translate_string(
&self,
input: pb::i18n::TranslateStringRequest,
) -> Result<pb::generic::String> {
input: anki_proto::i18n::TranslateStringRequest,
) -> Result<generic::String> {
let args = build_fluent_args(input.args);
Ok(self
@ -32,9 +34,9 @@ impl I18nService for Backend {
fn format_timespan(
&self,
input: pb::i18n::FormatTimespanRequest,
) -> Result<pb::generic::String> {
use pb::i18n::format_timespan_request::Context;
input: anki_proto::i18n::FormatTimespanRequest,
) -> Result<generic::String> {
use anki_proto::i18n::format_timespan_request::Context;
Ok(match input.context() {
Context::Precise => time_span(input.seconds, &self.tr, true),
Context::Intervals => time_span(input.seconds, &self.tr, false),
@ -43,14 +45,19 @@ impl I18nService for Backend {
.into())
}
fn i18n_resources(&self, input: pb::i18n::I18nResourcesRequest) -> Result<pb::generic::Json> {
fn i18n_resources(
&self,
input: anki_proto::i18n::I18nResourcesRequest,
) -> Result<generic::Json> {
serde_json::to_vec(&self.tr.resources_for_js(&input.modules))
.map(Into::into)
.map_err(Into::into)
}
}
fn build_fluent_args(input: HashMap<String, pb::i18n::TranslateArgValue>) -> FluentArgs<'static> {
fn build_fluent_args(
input: HashMap<String, anki_proto::i18n::TranslateArgValue>,
) -> FluentArgs<'static> {
let mut args = FluentArgs::new();
for (key, val) in input {
args.set(key, translate_arg_to_fluent_val(&val));
@ -58,8 +65,8 @@ fn build_fluent_args(input: HashMap<String, pb::i18n::TranslateArgValue>) -> Flu
args
}
fn translate_arg_to_fluent_val(arg: &pb::i18n::TranslateArgValue) -> FluentValue<'static> {
use pb::i18n::translate_arg_value::Value as V;
fn translate_arg_to_fluent_val(arg: &anki_proto::i18n::TranslateArgValue) -> FluentValue<'static> {
use anki_proto::i18n::translate_arg_value::Value as V;
match &arg.value {
Some(val) => match val {
V::Str(s) => FluentValue::String(s.to_owned().into()),

View file

@ -1,23 +1,26 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::generic;
pub(super) use anki_proto::image_occlusion::imageocclusion_service::Service as ImageOcclusionService;
use super::Backend;
use crate::pb;
pub(super) use crate::pb::image_occlusion::imageocclusion_service::Service as ImageOcclusionService;
use crate::prelude::*;
impl ImageOcclusionService for Backend {
type Error = AnkiError;
fn get_image_for_occlusion(
&self,
input: pb::image_occlusion::GetImageForOcclusionRequest,
) -> Result<pb::image_occlusion::GetImageForOcclusionResponse> {
input: anki_proto::image_occlusion::GetImageForOcclusionRequest,
) -> Result<anki_proto::image_occlusion::GetImageForOcclusionResponse> {
self.with_col(|col| col.get_image_for_occlusion(&input.path))
}
fn add_image_occlusion_note(
&self,
input: pb::image_occlusion::AddImageOcclusionNoteRequest,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::image_occlusion::AddImageOcclusionNoteRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
col.add_image_occlusion_note(
input.notetype_id.into(),
@ -33,15 +36,15 @@ impl ImageOcclusionService for Backend {
fn get_image_occlusion_note(
&self,
input: pb::image_occlusion::GetImageOcclusionNoteRequest,
) -> Result<pb::image_occlusion::GetImageOcclusionNoteResponse> {
input: anki_proto::image_occlusion::GetImageOcclusionNoteRequest,
) -> Result<anki_proto::image_occlusion::GetImageOcclusionNoteResponse> {
self.with_col(|col| col.get_image_occlusion_note(input.note_id.into()))
}
fn update_image_occlusion_note(
&self,
input: pb::image_occlusion::UpdateImageOcclusionNoteRequest,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::image_occlusion::UpdateImageOcclusionNoteRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
col.update_image_occlusion_note(
input.note_id.into(),
@ -56,8 +59,8 @@ impl ImageOcclusionService for Backend {
fn add_image_occlusion_notetype(
&self,
_input: pb::generic::Empty,
) -> Result<pb::collection::OpChanges> {
_input: generic::Empty,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.add_image_occlusion_notetype())
.map(Into::into)
}

View file

@ -3,24 +3,27 @@
use std::path::Path;
use anki_proto::generic;
use anki_proto::import_export::export_limit;
pub(super) use anki_proto::import_export::importexport_service::Service as ImportExportService;
use anki_proto::import_export::ExportLimit;
use super::progress::Progress;
use super::Backend;
use crate::import_export::package::import_colpkg;
use crate::import_export::ExportProgress;
use crate::import_export::ImportProgress;
use crate::import_export::NoteLog;
use crate::pb;
use crate::pb::import_export::export_limit;
pub(super) use crate::pb::import_export::importexport_service::Service as ImportExportService;
use crate::pb::import_export::ExportLimit;
use crate::prelude::*;
use crate::search::SearchNode;
impl ImportExportService for Backend {
type Error = AnkiError;
fn export_collection_package(
&self,
input: pb::import_export::ExportCollectionPackageRequest,
) -> Result<pb::generic::Empty> {
input: anki_proto::import_export::ExportCollectionPackageRequest,
) -> Result<generic::Empty> {
self.abort_media_sync_and_wait();
let mut guard = self.lock_open_collection()?;
@ -38,8 +41,8 @@ impl ImportExportService for Backend {
fn import_collection_package(
&self,
input: pb::import_export::ImportCollectionPackageRequest,
) -> Result<pb::generic::Empty> {
input: anki_proto::import_export::ImportCollectionPackageRequest,
) -> Result<generic::Empty> {
let _guard = self.lock_closed_collection()?;
import_colpkg(
@ -54,16 +57,16 @@ impl ImportExportService for Backend {
fn import_anki_package(
&self,
input: pb::import_export::ImportAnkiPackageRequest,
) -> Result<pb::import_export::ImportResponse> {
input: anki_proto::import_export::ImportAnkiPackageRequest,
) -> Result<anki_proto::import_export::ImportResponse> {
self.with_col(|col| col.import_apkg(&input.package_path, self.import_progress_fn()))
.map(Into::into)
}
fn export_anki_package(
&self,
input: pb::import_export::ExportAnkiPackageRequest,
) -> Result<pb::generic::UInt32> {
input: anki_proto::import_export::ExportAnkiPackageRequest,
) -> Result<generic::UInt32> {
self.with_col(|col| {
col.export_apkg(
&input.out_path,
@ -80,8 +83,8 @@ impl ImportExportService for Backend {
fn get_csv_metadata(
&self,
input: pb::import_export::CsvMetadataRequest,
) -> Result<pb::import_export::CsvMetadata> {
input: anki_proto::import_export::CsvMetadataRequest,
) -> Result<anki_proto::import_export::CsvMetadata> {
let delimiter = input.delimiter.is_some().then(|| input.delimiter());
self.with_col(|col| {
col.get_csv_metadata(
@ -96,8 +99,8 @@ impl ImportExportService for Backend {
fn import_csv(
&self,
input: pb::import_export::ImportCsvRequest,
) -> Result<pb::import_export::ImportResponse> {
input: anki_proto::import_export::ImportCsvRequest,
) -> Result<anki_proto::import_export::ImportResponse> {
self.with_col(|col| {
col.import_csv(
&input.path,
@ -110,16 +113,16 @@ impl ImportExportService for Backend {
fn export_note_csv(
&self,
input: pb::import_export::ExportNoteCsvRequest,
) -> Result<pb::generic::UInt32> {
input: anki_proto::import_export::ExportNoteCsvRequest,
) -> Result<generic::UInt32> {
self.with_col(|col| col.export_note_csv(input, self.export_progress_fn()))
.map(Into::into)
}
fn export_card_csv(
&self,
input: pb::import_export::ExportCardCsvRequest,
) -> Result<pb::generic::UInt32> {
input: anki_proto::import_export::ExportCardCsvRequest,
) -> Result<generic::UInt32> {
self.with_col(|col| {
col.export_card_csv(
&input.out_path,
@ -133,16 +136,16 @@ impl ImportExportService for Backend {
fn import_json_file(
&self,
input: pb::generic::String,
) -> Result<pb::import_export::ImportResponse> {
input: generic::String,
) -> Result<anki_proto::import_export::ImportResponse> {
self.with_col(|col| col.import_json_file(&input.val, self.import_progress_fn()))
.map(Into::into)
}
fn import_json_string(
&self,
input: pb::generic::String,
) -> Result<pb::import_export::ImportResponse> {
input: generic::String,
) -> Result<anki_proto::import_export::ImportResponse> {
self.with_col(|col| col.import_json_string(&input.val, self.import_progress_fn()))
.map(Into::into)
}
@ -160,7 +163,7 @@ impl Backend {
}
}
impl From<OpOutput<NoteLog>> for pb::import_export::ImportResponse {
impl From<OpOutput<NoteLog>> for anki_proto::import_export::ImportResponse {
fn from(output: OpOutput<NoteLog>) -> Self {
Self {
changes: Some(output.changes.into()),
@ -174,7 +177,7 @@ impl From<ExportLimit> for SearchNode {
use export_limit::Limit;
let limit = export_limit
.limit
.unwrap_or(Limit::WholeCollection(pb::generic::Empty {}));
.unwrap_or(Limit::WholeCollection(generic::Empty {}));
match limit {
Limit::WholeCollection(_) => Self::WholeCollection,
Limit::DeckId(did) => Self::from_deck_id(did, true),

View file

@ -1,17 +1,20 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::links::help_page_link_request::HelpPage;
pub(super) use anki_proto::links::links_service::Service as LinksService;
use super::Backend;
use crate::pb;
use crate::pb::links::help_page_link_request::HelpPage;
pub(super) use crate::pb::links::links_service::Service as LinksService;
use crate::links::help_page_to_link;
use crate::prelude::*;
impl LinksService for Backend {
fn help_page_link(&self, input: pb::links::HelpPageLinkRequest) -> Result<pb::generic::String> {
Ok(HelpPage::from_i32(input.page)
.unwrap_or(HelpPage::Index)
.to_link()
.into())
type Error = AnkiError;
fn help_page_link(
&self,
input: anki_proto::links::HelpPageLinkRequest,
) -> Result<anki_proto::generic::String> {
Ok(help_page_to_link(HelpPage::from_i32(input.page).unwrap_or(HelpPage::Index)).into())
}
}

View file

@ -1,19 +1,22 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::generic;
pub(super) use anki_proto::media::media_service::Service as MediaService;
use super::notes::to_i64s;
use super::progress::Progress;
use super::Backend;
use crate::media::check::MediaChecker;
use crate::pb;
pub(super) use crate::pb::media::media_service::Service as MediaService;
use crate::prelude::*;
impl MediaService for Backend {
type Error = AnkiError;
// media
//-----------------------------------------------
fn check_media(&self, _input: pb::generic::Empty) -> Result<pb::media::CheckMediaResponse> {
fn check_media(&self, _input: generic::Empty) -> Result<anki_proto::media::CheckMediaResponse> {
let mut handler = self.new_progress_handler();
let progress_fn =
move |progress| handler.update(Progress::MediaCheck(progress as u32), true);
@ -26,7 +29,7 @@ impl MediaService for Backend {
let mut report = checker.summarize_output(&mut output);
ctx.report_media_field_referencing_templates(&mut report)?;
Ok(pb::media::CheckMediaResponse {
Ok(anki_proto::media::CheckMediaResponse {
unused: output.unused,
missing: output.missing,
missing_media_notes: to_i64s(output.missing_media_notes),
@ -39,8 +42,8 @@ impl MediaService for Backend {
fn trash_media_files(
&self,
input: pb::media::TrashMediaFilesRequest,
) -> Result<pb::generic::Empty> {
input: anki_proto::media::TrashMediaFilesRequest,
) -> Result<generic::Empty> {
self.with_col(|col| {
let mgr = col.media()?;
mgr.remove_files(&input.fnames)
@ -48,7 +51,10 @@ impl MediaService for Backend {
.map(Into::into)
}
fn add_media_file(&self, input: pb::media::AddMediaFileRequest) -> Result<pb::generic::String> {
fn add_media_file(
&self,
input: anki_proto::media::AddMediaFileRequest,
) -> Result<generic::String> {
self.with_col(|col| {
let mgr = col.media()?;
Ok(mgr
@ -58,7 +64,7 @@ impl MediaService for Backend {
})
}
fn empty_trash(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
fn empty_trash(&self, _input: generic::Empty) -> Result<generic::Empty> {
let mut handler = self.new_progress_handler();
let progress_fn =
move |progress| handler.update(Progress::MediaCheck(progress as u32), true);
@ -71,7 +77,7 @@ impl MediaService for Backend {
.map(Into::into)
}
fn restore_trash(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
fn restore_trash(&self, _input: generic::Empty) -> Result<generic::Empty> {
let mut handler = self.new_progress_handler();
let progress_fn =
move |progress| handler.update(Progress::MediaCheck(progress as u32), true);

View file

@ -14,7 +14,6 @@ mod dbproxy;
mod deckconfig;
mod decks;
mod error;
mod generic;
mod i18n;
mod image_occlusion;
mod import_export;
@ -35,6 +34,7 @@ use std::sync::Arc;
use std::sync::Mutex;
use std::thread::JoinHandle;
use anki_proto::ServiceIndex;
use once_cell::sync::OnceCell;
use progress::AbortHandleSlot;
use prost::Message;
@ -63,8 +63,6 @@ use self::sync::SyncService;
use self::sync::SyncState;
use self::tags::TagsService;
use crate::backend::dbproxy::db_command_bytes;
use crate::pb;
use crate::pb::ServiceIndex;
use crate::prelude::*;
pub struct Backend {
@ -84,7 +82,8 @@ struct BackendState {
}
pub fn init_backend(init_msg: &[u8]) -> result::Result<Backend, String> {
let input: pb::backend::BackendInit = match pb::backend::BackendInit::decode(init_msg) {
let input: anki_proto::backend::BackendInit =
match anki_proto::backend::BackendInit::decode(init_msg) {
Ok(req) => req,
Err(_) => return Err("couldn't decode init request".into()),
};

View file

@ -3,14 +3,19 @@
use std::collections::HashSet;
pub(super) use anki_proto::notes::notes_service::Service as NotesService;
use super::Backend;
use crate::cloze::add_cloze_numbers_in_string;
use crate::pb;
pub(super) use crate::pb::notes::notes_service::Service as NotesService;
use crate::prelude::*;
impl NotesService for Backend {
fn new_note(&self, input: pb::notetypes::NotetypeId) -> Result<pb::notes::Note> {
type Error = AnkiError;
fn new_note(
&self,
input: anki_proto::notetypes::NotetypeId,
) -> Result<anki_proto::notes::Note> {
let ntid = input.into();
self.with_col(|col| {
let nt = col.get_notetype(ntid)?.or_not_found(ntid)?;
@ -18,11 +23,14 @@ impl NotesService for Backend {
})
}
fn add_note(&self, input: pb::notes::AddNoteRequest) -> Result<pb::notes::AddNoteResponse> {
fn add_note(
&self,
input: anki_proto::notes::AddNoteRequest,
) -> Result<anki_proto::notes::AddNoteResponse> {
self.with_col(|col| {
let mut note: Note = input.note.or_invalid("no note provided")?.into();
let changes = col.add_note(&mut note, DeckId(input.deck_id))?;
Ok(pb::notes::AddNoteResponse {
Ok(anki_proto::notes::AddNoteResponse {
note_id: note.id.0,
changes: Some(changes.into()),
})
@ -31,8 +39,8 @@ impl NotesService for Backend {
fn defaults_for_adding(
&self,
input: pb::notes::DefaultsForAddingRequest,
) -> Result<pb::notes::DeckAndNotetype> {
input: anki_proto::notes::DefaultsForAddingRequest,
) -> Result<anki_proto::notes::DeckAndNotetype> {
self.with_col(|col| {
let home_deck: DeckId = input.home_deck_of_current_review_card.into();
col.defaults_for_adding(home_deck).map(Into::into)
@ -41,8 +49,8 @@ impl NotesService for Backend {
fn default_deck_for_notetype(
&self,
input: pb::notetypes::NotetypeId,
) -> Result<pb::decks::DeckId> {
input: anki_proto::notetypes::NotetypeId,
) -> Result<anki_proto::decks::DeckId> {
self.with_col(|col| {
Ok(col
.default_deck_for_notetype(input.into())?
@ -53,8 +61,8 @@ impl NotesService for Backend {
fn update_notes(
&self,
input: pb::notes::UpdateNotesRequest,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::notes::UpdateNotesRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
let notes = input
.notes
@ -66,15 +74,15 @@ impl NotesService for Backend {
.map(Into::into)
}
fn get_note(&self, input: pb::notes::NoteId) -> Result<pb::notes::Note> {
fn get_note(&self, input: anki_proto::notes::NoteId) -> Result<anki_proto::notes::Note> {
let nid = input.into();
self.with_col(|col| col.storage.get_note(nid)?.or_not_found(nid).map(Into::into))
}
fn remove_notes(
&self,
input: pb::notes::RemoveNotesRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: anki_proto::notes::RemoveNotesRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
if !input.note_ids.is_empty() {
col.remove_notes(
@ -100,21 +108,21 @@ impl NotesService for Backend {
fn cloze_numbers_in_note(
&self,
note: pb::notes::Note,
) -> Result<pb::notes::ClozeNumbersInNoteResponse> {
note: anki_proto::notes::Note,
) -> Result<anki_proto::notes::ClozeNumbersInNoteResponse> {
let mut set = HashSet::with_capacity(4);
for field in &note.fields {
add_cloze_numbers_in_string(field, &mut set);
}
Ok(pb::notes::ClozeNumbersInNoteResponse {
Ok(anki_proto::notes::ClozeNumbersInNoteResponse {
numbers: set.into_iter().map(|n| n as u32).collect(),
})
}
fn after_note_updates(
&self,
input: pb::notes::AfterNoteUpdatesRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: anki_proto::notes::AfterNoteUpdatesRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
col.after_note_updates(
&to_note_ids(input.nids),
@ -127,32 +135,35 @@ impl NotesService for Backend {
fn field_names_for_notes(
&self,
input: pb::notes::FieldNamesForNotesRequest,
) -> Result<pb::notes::FieldNamesForNotesResponse> {
input: anki_proto::notes::FieldNamesForNotesRequest,
) -> Result<anki_proto::notes::FieldNamesForNotesResponse> {
self.with_col(|col| {
let nids: Vec<_> = input.nids.into_iter().map(NoteId).collect();
col.storage
.field_names_for_notes(&nids)
.map(|fields| pb::notes::FieldNamesForNotesResponse { fields })
.map(|fields| anki_proto::notes::FieldNamesForNotesResponse { fields })
})
}
fn note_fields_check(
&self,
input: pb::notes::Note,
) -> Result<pb::notes::NoteFieldsCheckResponse> {
input: anki_proto::notes::Note,
) -> Result<anki_proto::notes::NoteFieldsCheckResponse> {
let note: Note = input.into();
self.with_col(|col| {
col.note_fields_check(&note)
.map(|r| pb::notes::NoteFieldsCheckResponse { state: r as i32 })
.map(|r| anki_proto::notes::NoteFieldsCheckResponse { state: r as i32 })
})
}
fn cards_of_note(&self, input: pb::notes::NoteId) -> Result<pb::cards::CardIds> {
fn cards_of_note(
&self,
input: anki_proto::notes::NoteId,
) -> Result<anki_proto::cards::CardIds> {
self.with_col(|col| {
col.storage
.all_card_ids_of_note_in_template_order(NoteId(input.nid))
.map(|v| pb::cards::CardIds {
.map(|v| anki_proto::cards::CardIds {
cids: v.into_iter().map(Into::into).collect(),
})
})
@ -160,8 +171,8 @@ impl NotesService for Backend {
fn get_single_notetype_of_notes(
&self,
input: pb::notes::NoteIds,
) -> Result<pb::notetypes::NotetypeId> {
input: anki_proto::notes::NoteIds,
) -> Result<anki_proto::notetypes::NotetypeId> {
self.with_col(|col| {
col.get_single_notetype_of_notes(&input.note_ids.into_newtype(NoteId))
.map(Into::into)
@ -176,3 +187,15 @@ pub(super) fn to_note_ids(ids: Vec<i64>) -> Vec<NoteId> {
pub(super) fn to_i64s(ids: Vec<NoteId>) -> Vec<i64> {
ids.into_iter().map(Into::into).collect()
}
impl From<anki_proto::notes::NoteId> for NoteId {
fn from(nid: anki_proto::notes::NoteId) -> Self {
NoteId(nid.nid)
}
}
impl From<NoteId> for anki_proto::notes::NoteId {
fn from(nid: NoteId) -> Self {
anki_proto::notes::NoteId { nid: nid.0 }
}
}

View file

@ -1,6 +1,9 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::generic;
pub(super) use anki_proto::notetypes::notetypes_service::Service as NotetypesService;
use super::Backend;
use crate::config::get_aux_notetype_config_key;
use crate::notetype::stock::get_stock_notetype;
@ -9,15 +12,15 @@ use crate::notetype::ChangeNotetypeInput;
use crate::notetype::Notetype;
use crate::notetype::NotetypeChangeInfo;
use crate::notetype::NotetypeSchema11;
use crate::pb;
pub(super) use crate::pb::notetypes::notetypes_service::Service as NotetypesService;
use crate::prelude::*;
impl NotetypesService for Backend {
type Error = AnkiError;
fn add_notetype(
&self,
input: pb::notetypes::Notetype,
) -> Result<pb::collection::OpChangesWithId> {
input: anki_proto::notetypes::Notetype,
) -> Result<anki_proto::collection::OpChangesWithId> {
let mut notetype: Notetype = input.into();
self.with_col(|col| {
Ok(col
@ -27,7 +30,10 @@ impl NotetypesService for Backend {
})
}
fn update_notetype(&self, input: pb::notetypes::Notetype) -> Result<pb::collection::OpChanges> {
fn update_notetype(
&self,
input: anki_proto::notetypes::Notetype,
) -> Result<anki_proto::collection::OpChanges> {
let mut notetype: Notetype = input.into();
self.with_col(|col| col.update_notetype(&mut notetype, false))
.map(Into::into)
@ -35,8 +41,8 @@ impl NotetypesService for Backend {
fn add_notetype_legacy(
&self,
input: pb::generic::Json,
) -> Result<pb::collection::OpChangesWithId> {
input: generic::Json,
) -> Result<anki_proto::collection::OpChangesWithId> {
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
let mut notetype: Notetype = legacy.into();
self.with_col(|col| {
@ -49,8 +55,8 @@ impl NotetypesService for Backend {
fn update_notetype_legacy(
&self,
input: pb::generic::Json,
) -> Result<pb::collection::OpChanges> {
input: generic::Json,
) -> Result<anki_proto::collection::OpChanges> {
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
let mut notetype: Notetype = legacy.into();
self.with_col(|col| col.update_notetype(&mut notetype, false))
@ -59,8 +65,8 @@ impl NotetypesService for Backend {
fn add_or_update_notetype(
&self,
input: pb::notetypes::AddOrUpdateNotetypeRequest,
) -> Result<pb::notetypes::NotetypeId> {
input: anki_proto::notetypes::AddOrUpdateNotetypeRequest,
) -> Result<anki_proto::notetypes::NotetypeId> {
self.with_col(|col| {
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
let mut nt: Notetype = legacy.into();
@ -74,14 +80,14 @@ impl NotetypesService for Backend {
} else {
col.add_or_update_notetype_with_existing_id(&mut nt, input.skip_checks)?;
}
Ok(pb::notetypes::NotetypeId { ntid: nt.id.0 })
Ok(anki_proto::notetypes::NotetypeId { ntid: nt.id.0 })
})
}
fn get_stock_notetype_legacy(
&self,
input: pb::notetypes::StockNotetype,
) -> Result<pb::generic::Json> {
input: anki_proto::notetypes::StockNotetype,
) -> Result<generic::Json> {
let nt = get_stock_notetype(input.kind(), &self.tr);
let schema11: NotetypeSchema11 = nt.into();
serde_json::to_vec(&schema11)
@ -89,7 +95,10 @@ impl NotetypesService for Backend {
.map(Into::into)
}
fn get_notetype(&self, input: pb::notetypes::NotetypeId) -> Result<pb::notetypes::Notetype> {
fn get_notetype(
&self,
input: anki_proto::notetypes::NotetypeId,
) -> Result<anki_proto::notetypes::Notetype> {
let ntid = input.into();
self.with_col(|col| {
col.storage
@ -99,7 +108,10 @@ impl NotetypesService for Backend {
})
}
fn get_notetype_legacy(&self, input: pb::notetypes::NotetypeId) -> Result<pb::generic::Json> {
fn get_notetype_legacy(
&self,
input: anki_proto::notetypes::NotetypeId,
) -> Result<generic::Json> {
let ntid = input.into();
self.with_col(|col| {
let schema11: NotetypeSchema11 =
@ -110,71 +122,71 @@ impl NotetypesService for Backend {
fn get_notetype_names(
&self,
_input: pb::generic::Empty,
) -> Result<pb::notetypes::NotetypeNames> {
_input: generic::Empty,
) -> Result<anki_proto::notetypes::NotetypeNames> {
self.with_col(|col| {
let entries: Vec<_> = col
.storage
.get_all_notetype_names()?
.into_iter()
.map(|(id, name)| pb::notetypes::NotetypeNameId { id: id.0, name })
.map(|(id, name)| anki_proto::notetypes::NotetypeNameId { id: id.0, name })
.collect();
Ok(pb::notetypes::NotetypeNames { entries })
Ok(anki_proto::notetypes::NotetypeNames { entries })
})
}
fn get_notetype_names_and_counts(
&self,
_input: pb::generic::Empty,
) -> Result<pb::notetypes::NotetypeUseCounts> {
_input: generic::Empty,
) -> Result<anki_proto::notetypes::NotetypeUseCounts> {
self.with_col(|col| {
let entries: Vec<_> = col
.storage
.get_notetype_use_counts()?
.into_iter()
.map(
|(id, name, use_count)| pb::notetypes::NotetypeNameIdUseCount {
|(id, name, use_count)| anki_proto::notetypes::NotetypeNameIdUseCount {
id: id.0,
name,
use_count,
},
)
.collect();
Ok(pb::notetypes::NotetypeUseCounts { entries })
Ok(anki_proto::notetypes::NotetypeUseCounts { entries })
})
}
fn get_notetype_id_by_name(
&self,
input: pb::generic::String,
) -> Result<pb::notetypes::NotetypeId> {
input: generic::String,
) -> Result<anki_proto::notetypes::NotetypeId> {
self.with_col(|col| {
col.storage
.get_notetype_id(&input.val)
.and_then(|nt| nt.or_not_found(input.val))
.map(|ntid| pb::notetypes::NotetypeId { ntid: ntid.0 })
.map(|ntid| anki_proto::notetypes::NotetypeId { ntid: ntid.0 })
})
}
fn remove_notetype(
&self,
input: pb::notetypes::NotetypeId,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::notetypes::NotetypeId,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.remove_notetype(input.into()))
.map(Into::into)
}
fn get_aux_notetype_config_key(
&self,
input: pb::notetypes::GetAuxConfigKeyRequest,
) -> Result<pb::generic::String> {
input: anki_proto::notetypes::GetAuxConfigKeyRequest,
) -> Result<generic::String> {
Ok(get_aux_notetype_config_key(input.id.into(), &input.key).into())
}
fn get_aux_template_config_key(
&self,
input: pb::notetypes::GetAuxTemplateConfigKeyRequest,
) -> Result<pb::generic::String> {
input: anki_proto::notetypes::GetAuxTemplateConfigKeyRequest,
) -> Result<generic::String> {
self.with_col(|col| {
col.get_aux_template_config_key(
input.notetype_id.into(),
@ -187,8 +199,8 @@ impl NotetypesService for Backend {
fn get_change_notetype_info(
&self,
input: pb::notetypes::GetChangeNotetypeInfoRequest,
) -> Result<pb::notetypes::ChangeNotetypeInfo> {
input: anki_proto::notetypes::GetChangeNotetypeInfoRequest,
) -> Result<anki_proto::notetypes::ChangeNotetypeInfo> {
self.with_col(|col| {
col.notetype_change_info(input.old_notetype_id.into(), input.new_notetype_id.into())
.map(Into::into)
@ -197,20 +209,23 @@ impl NotetypesService for Backend {
fn change_notetype(
&self,
input: pb::notetypes::ChangeNotetypeRequest,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::notetypes::ChangeNotetypeRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.change_notetype_of_notes(input.into()).map(Into::into))
}
fn get_field_names(&self, input: pb::notetypes::NotetypeId) -> Result<pb::generic::StringList> {
fn get_field_names(
&self,
input: anki_proto::notetypes::NotetypeId,
) -> Result<generic::StringList> {
self.with_col(|col| col.storage.get_field_names(input.into()))
.map(Into::into)
}
fn restore_notetype_to_stock(
&self,
input: pb::notetypes::RestoreNotetypeToStockRequest,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::notetypes::RestoreNotetypeToStockRequest,
) -> Result<anki_proto::collection::OpChanges> {
let force_kind = input.force_kind.and_then(StockKind::from_i32);
self.with_col(|col| {
col.restore_notetype_to_stock(
@ -222,8 +237,8 @@ impl NotetypesService for Backend {
}
}
impl From<pb::notetypes::Notetype> for Notetype {
fn from(n: pb::notetypes::Notetype) -> Self {
impl From<anki_proto::notetypes::Notetype> for Notetype {
fn from(n: anki_proto::notetypes::Notetype) -> Self {
Notetype {
id: n.id.into(),
name: n.name,
@ -236,9 +251,9 @@ impl From<pb::notetypes::Notetype> for Notetype {
}
}
impl From<NotetypeChangeInfo> for pb::notetypes::ChangeNotetypeInfo {
impl From<NotetypeChangeInfo> for anki_proto::notetypes::ChangeNotetypeInfo {
fn from(i: NotetypeChangeInfo) -> Self {
pb::notetypes::ChangeNotetypeInfo {
anki_proto::notetypes::ChangeNotetypeInfo {
old_notetype_name: i.old_notetype_name,
old_field_names: i.old_field_names,
old_template_names: i.old_template_names,
@ -249,8 +264,8 @@ impl From<NotetypeChangeInfo> for pb::notetypes::ChangeNotetypeInfo {
}
}
impl From<pb::notetypes::ChangeNotetypeRequest> for ChangeNotetypeInput {
fn from(i: pb::notetypes::ChangeNotetypeRequest) -> Self {
impl From<anki_proto::notetypes::ChangeNotetypeRequest> for ChangeNotetypeInput {
fn from(i: anki_proto::notetypes::ChangeNotetypeRequest) -> Self {
ChangeNotetypeInput {
current_schema: i.current_schema.into(),
note_ids: i.note_ids.into_newtype(NoteId),
@ -278,9 +293,9 @@ impl From<pb::notetypes::ChangeNotetypeRequest> for ChangeNotetypeInput {
}
}
impl From<ChangeNotetypeInput> for pb::notetypes::ChangeNotetypeRequest {
impl From<ChangeNotetypeInput> for anki_proto::notetypes::ChangeNotetypeRequest {
fn from(i: ChangeNotetypeInput) -> Self {
pb::notetypes::ChangeNotetypeRequest {
anki_proto::notetypes::ChangeNotetypeRequest {
current_schema: i.current_schema.into(),
note_ids: i.note_ids.into_iter().map(Into::into).collect(),
old_notetype_name: i.old_notetype_name,
@ -300,3 +315,15 @@ impl From<ChangeNotetypeInput> for pb::notetypes::ChangeNotetypeRequest {
}
}
}
impl From<anki_proto::notetypes::NotetypeId> for NotetypeId {
fn from(ntid: anki_proto::notetypes::NotetypeId) -> Self {
NotetypeId(ntid.ntid)
}
}
impl From<NotetypeId> for anki_proto::notetypes::NotetypeId {
fn from(ntid: NotetypeId) -> Self {
anki_proto::notetypes::NotetypeId { ntid: ntid.0 }
}
}

View file

@ -2,14 +2,13 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::ops::OpChanges;
use crate::pb;
use crate::prelude::*;
use crate::undo::UndoOutput;
use crate::undo::UndoStatus;
impl From<OpChanges> for pb::collection::OpChanges {
impl From<OpChanges> for anki_proto::collection::OpChanges {
fn from(c: OpChanges) -> Self {
pb::collection::OpChanges {
anki_proto::collection::OpChanges {
card: c.changes.card,
note: c.changes.note,
deck: c.changes.deck,
@ -27,8 +26,8 @@ impl From<OpChanges> for pb::collection::OpChanges {
}
impl UndoStatus {
pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::collection::UndoStatus {
pb::collection::UndoStatus {
pub(crate) fn into_protobuf(self, tr: &I18n) -> anki_proto::collection::UndoStatus {
anki_proto::collection::UndoStatus {
undo: self.undo.map(|op| op.describe(tr)).unwrap_or_default(),
redo: self.redo.map(|op| op.describe(tr)).unwrap_or_default(),
last_step: self.last_step as u32,
@ -36,24 +35,24 @@ impl UndoStatus {
}
}
impl From<OpOutput<()>> for pb::collection::OpChanges {
impl From<OpOutput<()>> for anki_proto::collection::OpChanges {
fn from(o: OpOutput<()>) -> Self {
o.changes.into()
}
}
impl From<OpOutput<usize>> for pb::collection::OpChangesWithCount {
impl From<OpOutput<usize>> for anki_proto::collection::OpChangesWithCount {
fn from(out: OpOutput<usize>) -> Self {
pb::collection::OpChangesWithCount {
anki_proto::collection::OpChangesWithCount {
count: out.output as u32,
changes: Some(out.changes.into()),
}
}
}
impl From<OpOutput<i64>> for pb::collection::OpChangesWithId {
impl From<OpOutput<i64>> for anki_proto::collection::OpChangesWithId {
fn from(out: OpOutput<i64>) -> Self {
pb::collection::OpChangesWithId {
anki_proto::collection::OpChangesWithId {
id: out.output,
changes: Some(out.changes.into()),
}
@ -61,8 +60,8 @@ impl From<OpOutput<i64>> for pb::collection::OpChangesWithId {
}
impl OpOutput<UndoOutput> {
pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::collection::OpChangesAfterUndo {
pb::collection::OpChangesAfterUndo {
pub(crate) fn into_protobuf(self, tr: &I18n) -> anki_proto::collection::OpChangesAfterUndo {
anki_proto::collection::OpChangesAfterUndo {
changes: Some(self.changes.into()),
operation: self.output.undone_op.describe(tr),
reverted_to_timestamp: self.output.reverted_to.0,

View file

@ -4,14 +4,13 @@
use std::sync::Arc;
use std::sync::Mutex;
use anki_i18n::I18n;
use futures::future::AbortHandle;
use super::Backend;
use crate::dbcheck::DatabaseCheckProgress;
use crate::i18n::I18n;
use crate::import_export::ExportProgress;
use crate::import_export::ImportProgress;
use crate::pb;
use crate::sync::collection::normal::NormalSyncProgress;
use crate::sync::collection::progress::FullSyncProgress;
use crate::sync::collection::progress::SyncStage;
@ -57,21 +56,24 @@ pub(super) enum Progress {
Export(ExportProgress),
}
pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::collection::Progress {
pub(super) fn progress_to_proto(
progress: Option<Progress>,
tr: &I18n,
) -> anki_proto::collection::Progress {
let progress = if let Some(progress) = progress {
match progress {
Progress::MediaSync(p) => {
pb::collection::progress::Value::MediaSync(media_sync_progress(p, tr))
anki_proto::collection::progress::Value::MediaSync(media_sync_progress(p, tr))
}
Progress::MediaCheck(n) => {
pb::collection::progress::Value::MediaCheck(tr.media_check_checked(n).into())
}
Progress::FullSync(p) => {
pb::collection::progress::Value::FullSync(pb::collection::progress::FullSync {
Progress::MediaCheck(n) => anki_proto::collection::progress::Value::MediaCheck(
tr.media_check_checked(n).into(),
),
Progress::FullSync(p) => anki_proto::collection::progress::Value::FullSync(
anki_proto::collection::progress::FullSync {
transferred: p.transferred_bytes as u32,
total: p.total_bytes as u32,
})
}
},
),
Progress::NormalSync(p) => {
let stage = match p.stage {
SyncStage::Connecting => tr.sync_syncing(),
@ -85,11 +87,13 @@ pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::co
let removed = tr
.sync_media_removed_count(p.local_remove, p.remote_remove)
.into();
pb::collection::progress::Value::NormalSync(pb::collection::progress::NormalSync {
anki_proto::collection::progress::Value::NormalSync(
anki_proto::collection::progress::NormalSync {
stage,
added,
removed,
})
},
)
}
Progress::DatabaseCheck(p) => {
let mut stage_total = 0;
@ -106,15 +110,15 @@ pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::co
DatabaseCheckProgress::History => tr.database_check_checking_history(),
}
.to_string();
pb::collection::progress::Value::DatabaseCheck(
pb::collection::progress::DatabaseCheck {
anki_proto::collection::progress::Value::DatabaseCheck(
anki_proto::collection::progress::DatabaseCheck {
stage,
stage_total,
stage_current,
},
)
}
Progress::Import(progress) => pb::collection::progress::Value::Importing(
Progress::Import(progress) => anki_proto::collection::progress::Value::Importing(
match progress {
ImportProgress::File => tr.importing_importing_file(),
ImportProgress::Media(n) => tr.importing_processed_media_file(n),
@ -125,7 +129,7 @@ pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::co
}
.into(),
),
Progress::Export(progress) => pb::collection::progress::Value::Exporting(
Progress::Export(progress) => anki_proto::collection::progress::Value::Exporting(
match progress {
ExportProgress::File => tr.exporting_exporting_file(),
ExportProgress::Media(n) => tr.exporting_processed_media_files(n),
@ -137,15 +141,18 @@ pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::co
),
}
} else {
pb::collection::progress::Value::None(pb::generic::Empty {})
anki_proto::collection::progress::Value::None(anki_proto::generic::Empty {})
};
pb::collection::Progress {
anki_proto::collection::Progress {
value: Some(progress),
}
}
fn media_sync_progress(p: MediaSyncProgress, tr: &I18n) -> pb::collection::progress::MediaSync {
pb::collection::progress::MediaSync {
fn media_sync_progress(
p: MediaSyncProgress,
tr: &I18n,
) -> anki_proto::collection::progress::MediaSync {
anki_proto::collection::progress::MediaSync {
checked: tr.sync_media_checked_count(p.checked).into(),
added: tr
.sync_media_added_count(p.uploaded_files, p.downloaded_files)

View file

@ -3,15 +3,14 @@
use std::mem;
use crate::pb;
use crate::prelude::*;
use crate::scheduler::answering::CardAnswer;
use crate::scheduler::answering::Rating;
use crate::scheduler::queue::QueuedCard;
use crate::scheduler::queue::QueuedCards;
impl From<pb::scheduler::CardAnswer> for CardAnswer {
fn from(mut answer: pb::scheduler::CardAnswer) -> Self {
impl From<anki_proto::scheduler::CardAnswer> for CardAnswer {
fn from(mut answer: anki_proto::scheduler::CardAnswer) -> Self {
let mut new_state = mem::take(&mut answer.new_state).unwrap_or_default();
let custom_data = mem::take(&mut new_state.custom_data);
CardAnswer {
@ -26,18 +25,18 @@ impl From<pb::scheduler::CardAnswer> for CardAnswer {
}
}
impl From<pb::scheduler::card_answer::Rating> for Rating {
fn from(rating: pb::scheduler::card_answer::Rating) -> Self {
impl From<anki_proto::scheduler::card_answer::Rating> for Rating {
fn from(rating: anki_proto::scheduler::card_answer::Rating) -> Self {
match rating {
pb::scheduler::card_answer::Rating::Again => Rating::Again,
pb::scheduler::card_answer::Rating::Hard => Rating::Hard,
pb::scheduler::card_answer::Rating::Good => Rating::Good,
pb::scheduler::card_answer::Rating::Easy => Rating::Easy,
anki_proto::scheduler::card_answer::Rating::Again => Rating::Again,
anki_proto::scheduler::card_answer::Rating::Hard => Rating::Hard,
anki_proto::scheduler::card_answer::Rating::Good => Rating::Good,
anki_proto::scheduler::card_answer::Rating::Easy => Rating::Easy,
}
}
}
impl From<QueuedCard> for pb::scheduler::queued_cards::QueuedCard {
impl From<QueuedCard> for anki_proto::scheduler::queued_cards::QueuedCard {
fn from(queued_card: QueuedCard) -> Self {
Self {
card: Some(queued_card.card.into()),
@ -45,20 +44,20 @@ impl From<QueuedCard> for pb::scheduler::queued_cards::QueuedCard {
context: Some(queued_card.context),
queue: match queued_card.kind {
crate::scheduler::queue::QueueEntryKind::New => {
pb::scheduler::queued_cards::Queue::New
anki_proto::scheduler::queued_cards::Queue::New
}
crate::scheduler::queue::QueueEntryKind::Review => {
pb::scheduler::queued_cards::Queue::Review
anki_proto::scheduler::queued_cards::Queue::Review
}
crate::scheduler::queue::QueueEntryKind::Learning => {
pb::scheduler::queued_cards::Queue::Learning
anki_proto::scheduler::queued_cards::Queue::Learning
}
} as i32,
}
}
}
impl From<QueuedCards> for pb::scheduler::QueuedCards {
impl From<QueuedCards> for anki_proto::scheduler::QueuedCards {
fn from(queued_cards: QueuedCards) -> Self {
Self {
cards: queued_cards.cards.into_iter().map(Into::into).collect(),

View file

@ -4,9 +4,11 @@
mod answering;
mod states;
use anki_proto::generic;
use anki_proto::scheduler;
pub(super) use anki_proto::scheduler::scheduler_service::Service as SchedulerService;
use super::Backend;
use crate::pb;
pub(super) use crate::pb::scheduler::scheduler_service::Service as SchedulerService;
use crate::prelude::*;
use crate::scheduler::new::NewCardDueOrder;
use crate::scheduler::states::CardState;
@ -14,12 +16,14 @@ use crate::scheduler::states::SchedulingStates;
use crate::stats::studied_today;
impl SchedulerService for Backend {
type Error = AnkiError;
/// This behaves like _updateCutoff() in older code - it also unburies at
/// the start of a new day.
fn sched_timing_today(
&self,
_input: pb::generic::Empty,
) -> Result<pb::scheduler::SchedTimingTodayResponse> {
_input: generic::Empty,
) -> Result<scheduler::SchedTimingTodayResponse> {
self.with_col(|col| {
let timing = col.timing_today()?;
col.unbury_if_day_rolled_over(timing)?;
@ -28,19 +32,19 @@ impl SchedulerService for Backend {
}
/// Fetch data from DB and return rendered string.
fn studied_today(&self, _input: pb::generic::Empty) -> Result<pb::generic::String> {
fn studied_today(&self, _input: generic::Empty) -> Result<generic::String> {
self.with_col(|col| col.studied_today().map(Into::into))
}
/// Message rendering only, for old graphs.
fn studied_today_message(
&self,
input: pb::scheduler::StudiedTodayMessageRequest,
) -> Result<pb::generic::String> {
input: scheduler::StudiedTodayMessageRequest,
) -> Result<generic::String> {
Ok(studied_today(input.cards, input.seconds as f32, &self.tr).into())
}
fn update_stats(&self, input: pb::scheduler::UpdateStatsRequest) -> Result<pb::generic::Empty> {
fn update_stats(&self, input: scheduler::UpdateStatsRequest) -> Result<generic::Empty> {
self.with_col(|col| {
col.transact_no_undo(|col| {
let today = col.current_due_day(0)?;
@ -50,10 +54,7 @@ impl SchedulerService for Backend {
})
}
fn extend_limits(
&self,
input: pb::scheduler::ExtendLimitsRequest,
) -> Result<pb::generic::Empty> {
fn extend_limits(&self, input: scheduler::ExtendLimitsRequest) -> Result<generic::Empty> {
self.with_col(|col| {
col.transact_no_undo(|col| {
let today = col.current_due_day(0)?;
@ -72,30 +73,27 @@ impl SchedulerService for Backend {
fn counts_for_deck_today(
&self,
input: pb::decks::DeckId,
) -> Result<pb::scheduler::CountsForDeckTodayResponse> {
input: anki_proto::decks::DeckId,
) -> Result<scheduler::CountsForDeckTodayResponse> {
self.with_col(|col| col.counts_for_deck_today(input.did.into()))
}
fn congrats_info(
&self,
_input: pb::generic::Empty,
) -> Result<pb::scheduler::CongratsInfoResponse> {
fn congrats_info(&self, _input: generic::Empty) -> Result<scheduler::CongratsInfoResponse> {
self.with_col(|col| col.congrats_info())
}
fn restore_buried_and_suspended_cards(
&self,
input: pb::cards::CardIds,
) -> Result<pb::collection::OpChanges> {
let cids: Vec<_> = input.into();
input: anki_proto::cards::CardIds,
) -> Result<anki_proto::collection::OpChanges> {
let cids: Vec<_> = input.cids.into_iter().map(CardId).collect();
self.with_col(|col| col.unbury_or_unsuspend_cards(&cids).map(Into::into))
}
fn unbury_deck(
&self,
input: pb::scheduler::UnburyDeckRequest,
) -> Result<pb::collection::OpChanges> {
input: scheduler::UnburyDeckRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
col.unbury_deck(input.deck_id.into(), input.mode())
.map(Into::into)
@ -104,8 +102,8 @@ impl SchedulerService for Backend {
fn bury_or_suspend_cards(
&self,
input: pb::scheduler::BuryOrSuspendCardsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: scheduler::BuryOrSuspendCardsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
let mode = input.mode();
let cids = if input.card_ids.is_empty() {
@ -118,21 +116,24 @@ impl SchedulerService for Backend {
})
}
fn empty_filtered_deck(&self, input: pb::decks::DeckId) -> Result<pb::collection::OpChanges> {
fn empty_filtered_deck(
&self,
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.empty_filtered_deck(input.did.into()).map(Into::into))
}
fn rebuild_filtered_deck(
&self,
input: pb::decks::DeckId,
) -> Result<pb::collection::OpChangesWithCount> {
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| col.rebuild_filtered_deck(input.did.into()).map(Into::into))
}
fn schedule_cards_as_new(
&self,
input: pb::scheduler::ScheduleCardsAsNewRequest,
) -> Result<pb::collection::OpChanges> {
input: scheduler::ScheduleCardsAsNewRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
let cids = input.card_ids.into_newtype(CardId);
col.reschedule_cards_as_new(
@ -142,7 +143,7 @@ impl SchedulerService for Backend {
input.reset_counts,
input
.context
.and_then(pb::scheduler::schedule_cards_as_new_request::Context::from_i32),
.and_then(scheduler::schedule_cards_as_new_request::Context::from_i32),
)
.map(Into::into)
})
@ -150,15 +151,15 @@ impl SchedulerService for Backend {
fn schedule_cards_as_new_defaults(
&self,
input: pb::scheduler::ScheduleCardsAsNewDefaultsRequest,
) -> Result<pb::scheduler::ScheduleCardsAsNewDefaultsResponse> {
input: scheduler::ScheduleCardsAsNewDefaultsRequest,
) -> Result<scheduler::ScheduleCardsAsNewDefaultsResponse> {
self.with_col(|col| Ok(col.reschedule_cards_as_new_defaults(input.context())))
}
fn set_due_date(
&self,
input: pb::scheduler::SetDueDateRequest,
) -> Result<pb::collection::OpChanges> {
input: scheduler::SetDueDateRequest,
) -> Result<anki_proto::collection::OpChanges> {
let config = input.config_key.map(|v| v.key().into());
let days = input.days;
let cids = input.card_ids.into_newtype(CardId);
@ -167,8 +168,8 @@ impl SchedulerService for Backend {
fn sort_cards(
&self,
input: pb::scheduler::SortCardsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: scheduler::SortCardsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
let cids = input.card_ids.into_newtype(CardId);
let (start, step, random, shift) = (
input.starting_from,
@ -189,15 +190,15 @@ impl SchedulerService for Backend {
fn reposition_defaults(
&self,
_input: pb::generic::Empty,
) -> Result<pb::scheduler::RepositionDefaultsResponse> {
_input: generic::Empty,
) -> Result<scheduler::RepositionDefaultsResponse> {
self.with_col(|col| Ok(col.reposition_defaults()))
}
fn sort_deck(
&self,
input: pb::scheduler::SortDeckRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: scheduler::SortDeckRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
col.sort_deck_legacy(input.deck_id.into(), input.randomize)
.map(Into::into)
@ -206,8 +207,8 @@ impl SchedulerService for Backend {
fn get_scheduling_states(
&self,
input: pb::cards::CardId,
) -> Result<pb::scheduler::SchedulingStates> {
input: anki_proto::cards::CardId,
) -> Result<scheduler::SchedulingStates> {
let cid: CardId = input.into();
self.with_col(|col| col.get_scheduling_states(cid))
.map(Into::into)
@ -215,32 +216,35 @@ impl SchedulerService for Backend {
fn describe_next_states(
&self,
input: pb::scheduler::SchedulingStates,
) -> Result<pb::generic::StringList> {
input: scheduler::SchedulingStates,
) -> Result<generic::StringList> {
let states: SchedulingStates = input.into();
self.with_col(|col| col.describe_next_states(states))
.map(Into::into)
}
fn state_is_leech(&self, input: pb::scheduler::SchedulingState) -> Result<pb::generic::Bool> {
fn state_is_leech(&self, input: scheduler::SchedulingState) -> Result<generic::Bool> {
let state: CardState = input.into();
Ok(state.leeched().into())
}
fn answer_card(&self, input: pb::scheduler::CardAnswer) -> Result<pb::collection::OpChanges> {
fn answer_card(
&self,
input: scheduler::CardAnswer,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.answer_card(&mut input.into()))
.map(Into::into)
}
fn upgrade_scheduler(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
fn upgrade_scheduler(&self, _input: generic::Empty) -> Result<generic::Empty> {
self.with_col(|col| col.transact_no_undo(|col| col.upgrade_to_v2_scheduler()))
.map(Into::into)
}
fn get_queued_cards(
&self,
input: pb::scheduler::GetQueuedCardsRequest,
) -> Result<pb::scheduler::QueuedCards> {
input: scheduler::GetQueuedCardsRequest,
) -> Result<scheduler::QueuedCards> {
self.with_col(|col| {
col.get_queued_cards(input.fetch_limit as usize, input.intraday_learning_only)
.map(Into::into)
@ -249,26 +253,15 @@ impl SchedulerService for Backend {
fn custom_study(
&self,
input: pb::scheduler::CustomStudyRequest,
) -> Result<pb::collection::OpChanges> {
input: scheduler::CustomStudyRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.custom_study(input)).map(Into::into)
}
fn custom_study_defaults(
&self,
input: pb::scheduler::CustomStudyDefaultsRequest,
) -> Result<pb::scheduler::CustomStudyDefaultsResponse> {
input: scheduler::CustomStudyDefaultsRequest,
) -> Result<scheduler::CustomStudyDefaultsResponse> {
self.with_col(|col| col.custom_study_defaults(input.deck_id.into()))
}
}
impl From<crate::scheduler::timing::SchedTimingToday> for pb::scheduler::SchedTimingTodayResponse {
fn from(
t: crate::scheduler::timing::SchedTimingToday,
) -> pb::scheduler::SchedTimingTodayResponse {
pb::scheduler::SchedTimingTodayResponse {
days_elapsed: t.days_elapsed,
next_day_at: t.next_day_at.0,
}
}
}

View file

@ -1,33 +1,34 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::pb;
use crate::scheduler::states::FilteredState;
impl From<FilteredState> for pb::scheduler::scheduling_state::Filtered {
impl From<FilteredState> for anki_proto::scheduler::scheduling_state::Filtered {
fn from(state: FilteredState) -> Self {
pb::scheduler::scheduling_state::Filtered {
anki_proto::scheduler::scheduling_state::Filtered {
value: Some(match state {
FilteredState::Preview(state) => {
pb::scheduler::scheduling_state::filtered::Value::Preview(state.into())
anki_proto::scheduler::scheduling_state::filtered::Value::Preview(state.into())
}
FilteredState::Rescheduling(state) => {
pb::scheduler::scheduling_state::filtered::Value::Rescheduling(state.into())
anki_proto::scheduler::scheduling_state::filtered::Value::Rescheduling(
state.into(),
)
}
}),
}
}
}
impl From<pb::scheduler::scheduling_state::Filtered> for FilteredState {
fn from(state: pb::scheduler::scheduling_state::Filtered) -> Self {
impl From<anki_proto::scheduler::scheduling_state::Filtered> for FilteredState {
fn from(state: anki_proto::scheduler::scheduling_state::Filtered) -> Self {
match state.value.unwrap_or_else(|| {
pb::scheduler::scheduling_state::filtered::Value::Preview(Default::default())
anki_proto::scheduler::scheduling_state::filtered::Value::Preview(Default::default())
}) {
pb::scheduler::scheduling_state::filtered::Value::Preview(state) => {
anki_proto::scheduler::scheduling_state::filtered::Value::Preview(state) => {
FilteredState::Preview(state.into())
}
pb::scheduler::scheduling_state::filtered::Value::Rescheduling(state) => {
anki_proto::scheduler::scheduling_state::filtered::Value::Rescheduling(state) => {
FilteredState::Rescheduling(state.into())
}
}

View file

@ -1,11 +1,10 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::pb;
use crate::scheduler::states::LearnState;
impl From<pb::scheduler::scheduling_state::Learning> for LearnState {
fn from(state: pb::scheduler::scheduling_state::Learning) -> Self {
impl From<anki_proto::scheduler::scheduling_state::Learning> for LearnState {
fn from(state: anki_proto::scheduler::scheduling_state::Learning) -> Self {
LearnState {
remaining_steps: state.remaining_steps,
scheduled_secs: state.scheduled_secs,
@ -13,9 +12,9 @@ impl From<pb::scheduler::scheduling_state::Learning> for LearnState {
}
}
impl From<LearnState> for pb::scheduler::scheduling_state::Learning {
impl From<LearnState> for anki_proto::scheduler::scheduling_state::Learning {
fn from(state: LearnState) -> Self {
pb::scheduler::scheduling_state::Learning {
anki_proto::scheduler::scheduling_state::Learning {
remaining_steps: state.remaining_steps,
scheduled_secs: state.scheduled_secs,
}

View file

@ -10,15 +10,14 @@ mod relearning;
mod rescheduling;
mod review;
use crate::pb;
use crate::scheduler::states::CardState;
use crate::scheduler::states::NewState;
use crate::scheduler::states::NormalState;
use crate::scheduler::states::SchedulingStates;
impl From<SchedulingStates> for pb::scheduler::SchedulingStates {
impl From<SchedulingStates> for anki_proto::scheduler::SchedulingStates {
fn from(choices: SchedulingStates) -> Self {
pb::scheduler::SchedulingStates {
anki_proto::scheduler::SchedulingStates {
current: Some(choices.current.into()),
again: Some(choices.again.into()),
hard: Some(choices.hard.into()),
@ -28,8 +27,8 @@ impl From<SchedulingStates> for pb::scheduler::SchedulingStates {
}
}
impl From<pb::scheduler::SchedulingStates> for SchedulingStates {
fn from(choices: pb::scheduler::SchedulingStates) -> Self {
impl From<anki_proto::scheduler::SchedulingStates> for SchedulingStates {
fn from(choices: anki_proto::scheduler::SchedulingStates) -> Self {
SchedulingStates {
current: choices.current.unwrap_or_default().into(),
again: choices.again.unwrap_or_default().into(),
@ -40,15 +39,15 @@ impl From<pb::scheduler::SchedulingStates> for SchedulingStates {
}
}
impl From<CardState> for pb::scheduler::SchedulingState {
impl From<CardState> for anki_proto::scheduler::SchedulingState {
fn from(state: CardState) -> Self {
pb::scheduler::SchedulingState {
anki_proto::scheduler::SchedulingState {
value: Some(match state {
CardState::Normal(state) => {
pb::scheduler::scheduling_state::Value::Normal(state.into())
anki_proto::scheduler::scheduling_state::Value::Normal(state.into())
}
CardState::Filtered(state) => {
pb::scheduler::scheduling_state::Value::Filtered(state.into())
anki_proto::scheduler::scheduling_state::Value::Filtered(state.into())
}
}),
custom_data: None,
@ -56,14 +55,14 @@ impl From<CardState> for pb::scheduler::SchedulingState {
}
}
impl From<pb::scheduler::SchedulingState> for CardState {
fn from(state: pb::scheduler::SchedulingState) -> Self {
impl From<anki_proto::scheduler::SchedulingState> for CardState {
fn from(state: anki_proto::scheduler::SchedulingState) -> Self {
if let Some(value) = state.value {
match value {
pb::scheduler::scheduling_state::Value::Normal(normal) => {
anki_proto::scheduler::scheduling_state::Value::Normal(normal) => {
CardState::Normal(normal.into())
}
pb::scheduler::scheduling_state::Value::Filtered(filtered) => {
anki_proto::scheduler::scheduling_state::Value::Filtered(filtered) => {
CardState::Filtered(filtered.into())
}
}

View file

@ -1,20 +1,19 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::pb;
use crate::scheduler::states::NewState;
impl From<pb::scheduler::scheduling_state::New> for NewState {
fn from(state: pb::scheduler::scheduling_state::New) -> Self {
impl From<anki_proto::scheduler::scheduling_state::New> for NewState {
fn from(state: anki_proto::scheduler::scheduling_state::New) -> Self {
NewState {
position: state.position,
}
}
}
impl From<NewState> for pb::scheduler::scheduling_state::New {
impl From<NewState> for anki_proto::scheduler::scheduling_state::New {
fn from(state: NewState) -> Self {
pb::scheduler::scheduling_state::New {
anki_proto::scheduler::scheduling_state::New {
position: state.position,
}
}

View file

@ -1,45 +1,44 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::pb;
use crate::scheduler::states::NormalState;
impl From<NormalState> for pb::scheduler::scheduling_state::Normal {
impl From<NormalState> for anki_proto::scheduler::scheduling_state::Normal {
fn from(state: NormalState) -> Self {
pb::scheduler::scheduling_state::Normal {
anki_proto::scheduler::scheduling_state::Normal {
value: Some(match state {
NormalState::New(state) => {
pb::scheduler::scheduling_state::normal::Value::New(state.into())
anki_proto::scheduler::scheduling_state::normal::Value::New(state.into())
}
NormalState::Learning(state) => {
pb::scheduler::scheduling_state::normal::Value::Learning(state.into())
anki_proto::scheduler::scheduling_state::normal::Value::Learning(state.into())
}
NormalState::Review(state) => {
pb::scheduler::scheduling_state::normal::Value::Review(state.into())
anki_proto::scheduler::scheduling_state::normal::Value::Review(state.into())
}
NormalState::Relearning(state) => {
pb::scheduler::scheduling_state::normal::Value::Relearning(state.into())
anki_proto::scheduler::scheduling_state::normal::Value::Relearning(state.into())
}
}),
}
}
}
impl From<pb::scheduler::scheduling_state::Normal> for NormalState {
fn from(state: pb::scheduler::scheduling_state::Normal) -> Self {
impl From<anki_proto::scheduler::scheduling_state::Normal> for NormalState {
fn from(state: anki_proto::scheduler::scheduling_state::Normal) -> Self {
match state.value.unwrap_or_else(|| {
pb::scheduler::scheduling_state::normal::Value::New(Default::default())
anki_proto::scheduler::scheduling_state::normal::Value::New(Default::default())
}) {
pb::scheduler::scheduling_state::normal::Value::New(state) => {
anki_proto::scheduler::scheduling_state::normal::Value::New(state) => {
NormalState::New(state.into())
}
pb::scheduler::scheduling_state::normal::Value::Learning(state) => {
anki_proto::scheduler::scheduling_state::normal::Value::Learning(state) => {
NormalState::Learning(state.into())
}
pb::scheduler::scheduling_state::normal::Value::Review(state) => {
anki_proto::scheduler::scheduling_state::normal::Value::Review(state) => {
NormalState::Review(state.into())
}
pb::scheduler::scheduling_state::normal::Value::Relearning(state) => {
anki_proto::scheduler::scheduling_state::normal::Value::Relearning(state) => {
NormalState::Relearning(state.into())
}
}

View file

@ -1,11 +1,10 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::pb;
use crate::scheduler::states::PreviewState;
impl From<pb::scheduler::scheduling_state::Preview> for PreviewState {
fn from(state: pb::scheduler::scheduling_state::Preview) -> Self {
impl From<anki_proto::scheduler::scheduling_state::Preview> for PreviewState {
fn from(state: anki_proto::scheduler::scheduling_state::Preview) -> Self {
PreviewState {
scheduled_secs: state.scheduled_secs,
finished: state.finished,
@ -13,9 +12,9 @@ impl From<pb::scheduler::scheduling_state::Preview> for PreviewState {
}
}
impl From<PreviewState> for pb::scheduler::scheduling_state::Preview {
impl From<PreviewState> for anki_proto::scheduler::scheduling_state::Preview {
fn from(state: PreviewState) -> Self {
pb::scheduler::scheduling_state::Preview {
anki_proto::scheduler::scheduling_state::Preview {
scheduled_secs: state.scheduled_secs,
finished: state.finished,
}

View file

@ -1,11 +1,10 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::pb;
use crate::scheduler::states::RelearnState;
impl From<pb::scheduler::scheduling_state::Relearning> for RelearnState {
fn from(state: pb::scheduler::scheduling_state::Relearning) -> Self {
impl From<anki_proto::scheduler::scheduling_state::Relearning> for RelearnState {
fn from(state: anki_proto::scheduler::scheduling_state::Relearning) -> Self {
RelearnState {
review: state.review.unwrap_or_default().into(),
learning: state.learning.unwrap_or_default().into(),
@ -13,9 +12,9 @@ impl From<pb::scheduler::scheduling_state::Relearning> for RelearnState {
}
}
impl From<RelearnState> for pb::scheduler::scheduling_state::Relearning {
impl From<RelearnState> for anki_proto::scheduler::scheduling_state::Relearning {
fn from(state: RelearnState) -> Self {
pb::scheduler::scheduling_state::Relearning {
anki_proto::scheduler::scheduling_state::Relearning {
review: Some(state.review.into()),
learning: Some(state.learning.into()),
}

View file

@ -1,20 +1,19 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::pb;
use crate::scheduler::states::ReschedulingFilterState;
impl From<pb::scheduler::scheduling_state::ReschedulingFilter> for ReschedulingFilterState {
fn from(state: pb::scheduler::scheduling_state::ReschedulingFilter) -> Self {
impl From<anki_proto::scheduler::scheduling_state::ReschedulingFilter> for ReschedulingFilterState {
fn from(state: anki_proto::scheduler::scheduling_state::ReschedulingFilter) -> Self {
ReschedulingFilterState {
original_state: state.original_state.unwrap_or_default().into(),
}
}
}
impl From<ReschedulingFilterState> for pb::scheduler::scheduling_state::ReschedulingFilter {
impl From<ReschedulingFilterState> for anki_proto::scheduler::scheduling_state::ReschedulingFilter {
fn from(state: ReschedulingFilterState) -> Self {
pb::scheduler::scheduling_state::ReschedulingFilter {
anki_proto::scheduler::scheduling_state::ReschedulingFilter {
original_state: Some(state.original_state.into()),
}
}

View file

@ -1,11 +1,10 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::pb;
use crate::scheduler::states::ReviewState;
impl From<pb::scheduler::scheduling_state::Review> for ReviewState {
fn from(state: pb::scheduler::scheduling_state::Review) -> Self {
impl From<anki_proto::scheduler::scheduling_state::Review> for ReviewState {
fn from(state: anki_proto::scheduler::scheduling_state::Review) -> Self {
ReviewState {
scheduled_days: state.scheduled_days,
elapsed_days: state.elapsed_days,
@ -16,9 +15,9 @@ impl From<pb::scheduler::scheduling_state::Review> for ReviewState {
}
}
impl From<ReviewState> for pb::scheduler::scheduling_state::Review {
impl From<ReviewState> for anki_proto::scheduler::scheduling_state::Review {
fn from(state: ReviewState) -> Self {
pb::scheduler::scheduling_state::Review {
anki_proto::scheduler::scheduling_state::Review {
scheduled_days: state.scheduled_days,
elapsed_days: state.elapsed_days,
ease_factor: state.ease_factor,

View file

@ -3,13 +3,13 @@
use std::str::FromStr;
use anki_i18n::I18n;
use crate::browser_table;
use crate::i18n::I18n;
use crate::pb;
impl browser_table::Column {
pub fn to_pb_column(self, i18n: &I18n) -> pb::search::browser_columns::Column {
pb::search::browser_columns::Column {
pub fn to_pb_column(self, i18n: &I18n) -> anki_proto::search::browser_columns::Column {
anki_proto::search::browser_columns::Column {
key: self.to_string(),
cards_mode_label: self.cards_mode_label(i18n),
notes_mode_label: self.notes_mode_label(i18n),
@ -22,12 +22,11 @@ impl browser_table::Column {
}
}
impl From<pb::generic::StringList> for Vec<browser_table::Column> {
fn from(input: pb::generic::StringList) -> Self {
input
.vals
.iter()
.map(|c| browser_table::Column::from_str(c).unwrap_or_default())
pub(crate) fn string_list_to_browser_columns(
list: anki_proto::generic::StringList,
) -> Vec<browser_table::Column> {
list.vals
.into_iter()
.map(|c| browser_table::Column::from_str(&c).unwrap_or_default())
.collect()
}
}

View file

@ -7,12 +7,14 @@ mod search_node;
use std::str::FromStr;
use std::sync::Arc;
use anki_proto::generic;
pub(super) use anki_proto::search::search_service::Service as SearchService;
use anki_proto::search::sort_order::Value as SortOrderProto;
use super::notes::to_note_ids;
use super::Backend;
use crate::backend::search::browser_table::string_list_to_browser_columns;
use crate::browser_table::Column;
use crate::pb;
pub(super) use crate::pb::search::search_service::Service as SearchService;
use crate::pb::search::sort_order::Value as SortOrderProto;
use crate::prelude::*;
use crate::search::replace_search_node;
use crate::search::JoinSearches;
@ -20,26 +22,37 @@ use crate::search::Node;
use crate::search::SortMode;
impl SearchService for Backend {
fn build_search_string(&self, input: pb::search::SearchNode) -> Result<pb::generic::String> {
type Error = AnkiError;
fn build_search_string(
&self,
input: anki_proto::search::SearchNode,
) -> Result<generic::String> {
let node: Node = input.try_into()?;
Ok(SearchBuilder::from_root(node).write().into())
}
fn search_cards(&self, input: pb::search::SearchRequest) -> Result<pb::search::SearchResponse> {
fn search_cards(
&self,
input: anki_proto::search::SearchRequest,
) -> Result<anki_proto::search::SearchResponse> {
self.with_col(|col| {
let order = input.order.unwrap_or_default().value.into();
let cids = col.search_cards(&input.search, order)?;
Ok(pb::search::SearchResponse {
Ok(anki_proto::search::SearchResponse {
ids: cids.into_iter().map(|v| v.0).collect(),
})
})
}
fn search_notes(&self, input: pb::search::SearchRequest) -> Result<pb::search::SearchResponse> {
fn search_notes(
&self,
input: anki_proto::search::SearchRequest,
) -> Result<anki_proto::search::SearchResponse> {
self.with_col(|col| {
let order = input.order.unwrap_or_default().value.into();
let nids = col.search_notes(&input.search, order)?;
Ok(pb::search::SearchResponse {
Ok(anki_proto::search::SearchResponse {
ids: nids.into_iter().map(|v| v.0).collect(),
})
})
@ -47,15 +60,21 @@ impl SearchService for Backend {
fn join_search_nodes(
&self,
input: pb::search::JoinSearchNodesRequest,
) -> Result<pb::generic::String> {
input: anki_proto::search::JoinSearchNodesRequest,
) -> Result<generic::String> {
let existing_node: Node = input.existing_node.unwrap_or_default().try_into()?;
let additional_node: Node = input.additional_node.unwrap_or_default().try_into()?;
Ok(
match pb::search::search_node::group::Joiner::from_i32(input.joiner).unwrap_or_default() {
pb::search::search_node::group::Joiner::And => existing_node.and_flat(additional_node),
pb::search::search_node::group::Joiner::Or => existing_node.or_flat(additional_node),
match anki_proto::search::search_node::group::Joiner::from_i32(input.joiner)
.unwrap_or_default()
{
anki_proto::search::search_node::group::Joiner::And => {
existing_node.and_flat(additional_node)
}
anki_proto::search::search_node::group::Joiner::Or => {
existing_node.or_flat(additional_node)
}
}
.write()
.into(),
@ -64,8 +83,8 @@ impl SearchService for Backend {
fn replace_search_node(
&self,
input: pb::search::ReplaceSearchNodeRequest,
) -> Result<pb::generic::String> {
input: anki_proto::search::ReplaceSearchNodeRequest,
) -> Result<generic::String> {
let existing = {
let node = input.existing_node.unwrap_or_default().try_into()?;
if let Node::Group(nodes) = node {
@ -80,8 +99,8 @@ impl SearchService for Backend {
fn find_and_replace(
&self,
input: pb::search::FindAndReplaceRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: anki_proto::search::FindAndReplaceRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
let mut search = if input.regex {
input.search
} else {
@ -108,31 +127,29 @@ impl SearchService for Backend {
fn all_browser_columns(
&self,
_input: pb::generic::Empty,
) -> Result<pb::search::BrowserColumns> {
_input: generic::Empty,
) -> Result<anki_proto::search::BrowserColumns> {
self.with_col(|col| Ok(col.all_browser_columns()))
}
fn set_active_browser_columns(
&self,
input: pb::generic::StringList,
) -> Result<pb::generic::Empty> {
fn set_active_browser_columns(&self, input: generic::StringList) -> Result<generic::Empty> {
self.with_col(|col| {
col.state.active_browser_columns = Some(Arc::new(input.into()));
col.state.active_browser_columns =
Some(Arc::new(string_list_to_browser_columns(input)));
Ok(())
})
.map(Into::into)
}
fn browser_row_for_id(&self, input: pb::generic::Int64) -> Result<pb::search::BrowserRow> {
fn browser_row_for_id(&self, input: generic::Int64) -> Result<anki_proto::search::BrowserRow> {
self.with_col(|col| col.browser_row_for_id(input.val).map(Into::into))
}
}
impl From<Option<SortOrderProto>> for SortMode {
fn from(order: Option<SortOrderProto>) -> Self {
use pb::search::sort_order::Value as V;
match order.unwrap_or(V::None(pb::generic::Empty {})) {
use anki_proto::search::sort_order::Value as V;
match order.unwrap_or(V::None(generic::Empty {})) {
V::None(_) => SortMode::NoOrder,
V::Custom(s) => SortMode::Custom(s),
V::Builtin(b) => SortMode::Builtin {

View file

@ -1,9 +1,9 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::search::search_node::IdList;
use itertools::Itertools;
use crate::pb;
use crate::prelude::*;
use crate::search::parse_search;
use crate::search::Negated;
@ -16,13 +16,13 @@ use crate::search::TemplateKind;
use crate::text::escape_anki_wildcards;
use crate::text::escape_anki_wildcards_for_search_node;
impl TryFrom<pb::search::SearchNode> for Node {
impl TryFrom<anki_proto::search::SearchNode> for Node {
type Error = AnkiError;
fn try_from(msg: pb::search::SearchNode) -> std::result::Result<Self, Self::Error> {
use pb::search::search_node::group::Joiner;
use pb::search::search_node::Filter;
use pb::search::search_node::Flag;
fn try_from(msg: anki_proto::search::SearchNode) -> std::result::Result<Self, Self::Error> {
use anki_proto::search::search_node::group::Joiner;
use anki_proto::search::search_node::Filter;
use anki_proto::search::search_node::Flag;
Ok(if let Some(filter) = msg.filter {
match filter {
Filter::Tag(s) => SearchNode::from_tag_name(&s).into(),
@ -32,7 +32,7 @@ impl TryFrom<pb::search::SearchNode> for Node {
Node::Search(SearchNode::CardTemplate(TemplateKind::Ordinal(u as u16)))
}
Filter::Nid(nid) => Node::Search(SearchNode::NoteIds(nid.to_string())),
Filter::Nids(nids) => Node::Search(SearchNode::NoteIds(nids.into_id_string())),
Filter::Nids(nids) => Node::Search(SearchNode::NoteIds(id_list_to_string(nids))),
Filter::Dupe(dupe) => Node::Search(SearchNode::Duplicates {
notetype_id: dupe.notetype_id.into(),
text: dupe.first_field,
@ -58,7 +58,7 @@ impl TryFrom<pb::search::SearchNode> for Node {
}),
Filter::EditedInDays(u) => Node::Search(SearchNode::EditedInDays(u)),
Filter::CardState(state) => Node::Search(SearchNode::State(
pb::search::search_node::CardState::from_i32(state)
anki_proto::search::search_node::CardState::from_i32(state)
.unwrap_or_default()
.into(),
)),
@ -120,38 +120,36 @@ impl TryFrom<pb::search::SearchNode> for Node {
}
}
impl From<pb::search::search_node::Rating> for RatingKind {
fn from(r: pb::search::search_node::Rating) -> Self {
impl From<anki_proto::search::search_node::Rating> for RatingKind {
fn from(r: anki_proto::search::search_node::Rating) -> Self {
match r {
pb::search::search_node::Rating::Again => RatingKind::AnswerButton(1),
pb::search::search_node::Rating::Hard => RatingKind::AnswerButton(2),
pb::search::search_node::Rating::Good => RatingKind::AnswerButton(3),
pb::search::search_node::Rating::Easy => RatingKind::AnswerButton(4),
pb::search::search_node::Rating::Any => RatingKind::AnyAnswerButton,
pb::search::search_node::Rating::ByReschedule => RatingKind::ManualReschedule,
anki_proto::search::search_node::Rating::Again => RatingKind::AnswerButton(1),
anki_proto::search::search_node::Rating::Hard => RatingKind::AnswerButton(2),
anki_proto::search::search_node::Rating::Good => RatingKind::AnswerButton(3),
anki_proto::search::search_node::Rating::Easy => RatingKind::AnswerButton(4),
anki_proto::search::search_node::Rating::Any => RatingKind::AnyAnswerButton,
anki_proto::search::search_node::Rating::ByReschedule => RatingKind::ManualReschedule,
}
}
}
impl From<pb::search::search_node::CardState> for StateKind {
fn from(k: pb::search::search_node::CardState) -> Self {
impl From<anki_proto::search::search_node::CardState> for StateKind {
fn from(k: anki_proto::search::search_node::CardState) -> Self {
match k {
pb::search::search_node::CardState::New => StateKind::New,
pb::search::search_node::CardState::Learn => StateKind::Learning,
pb::search::search_node::CardState::Review => StateKind::Review,
pb::search::search_node::CardState::Due => StateKind::Due,
pb::search::search_node::CardState::Suspended => StateKind::Suspended,
pb::search::search_node::CardState::Buried => StateKind::Buried,
anki_proto::search::search_node::CardState::New => StateKind::New,
anki_proto::search::search_node::CardState::Learn => StateKind::Learning,
anki_proto::search::search_node::CardState::Review => StateKind::Review,
anki_proto::search::search_node::CardState::Due => StateKind::Due,
anki_proto::search::search_node::CardState::Suspended => StateKind::Suspended,
anki_proto::search::search_node::CardState::Buried => StateKind::Buried,
}
}
}
impl pb::search::search_node::IdList {
fn into_id_string(self) -> String {
self.ids
fn id_list_to_string(list: IdList) -> String {
list.ids
.iter()
.map(|i| i.to_string())
.collect::<Vec<_>>()
.join(",")
}
}

View file

@ -1,32 +1,40 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub(super) use anki_proto::stats::stats_service::Service as StatsService;
use super::Backend;
use crate::pb;
pub(super) use crate::pb::stats::stats_service::Service as StatsService;
use crate::prelude::*;
use crate::revlog::RevlogReviewKind;
impl StatsService for Backend {
fn card_stats(&self, input: pb::cards::CardId) -> Result<pb::stats::CardStatsResponse> {
self.with_col(|col| col.card_stats(input.into()))
type Error = AnkiError;
fn card_stats(
&self,
input: anki_proto::cards::CardId,
) -> Result<anki_proto::stats::CardStatsResponse> {
self.with_col(|col| col.card_stats(input.cid.into()))
}
fn graphs(&self, input: pb::stats::GraphsRequest) -> Result<pb::stats::GraphsResponse> {
fn graphs(
&self,
input: anki_proto::stats::GraphsRequest,
) -> Result<anki_proto::stats::GraphsResponse> {
self.with_col(|col| col.graph_data_for_search(&input.search, input.days))
}
fn get_graph_preferences(
&self,
_input: pb::generic::Empty,
) -> Result<pb::stats::GraphPreferences> {
_input: anki_proto::generic::Empty,
) -> Result<anki_proto::stats::GraphPreferences> {
self.with_col(|col| Ok(col.get_graph_preferences()))
}
fn set_graph_preferences(
&self,
input: pb::stats::GraphPreferences,
) -> Result<pb::generic::Empty> {
input: anki_proto::stats::GraphPreferences,
) -> Result<anki_proto::generic::Empty> {
self.with_col(|col| col.set_graph_preferences(input))
.map(Into::into)
}
@ -35,11 +43,11 @@ impl StatsService for Backend {
impl From<RevlogReviewKind> for i32 {
fn from(kind: RevlogReviewKind) -> Self {
(match kind {
RevlogReviewKind::Learning => pb::stats::revlog_entry::ReviewKind::Learning,
RevlogReviewKind::Review => pb::stats::revlog_entry::ReviewKind::Review,
RevlogReviewKind::Relearning => pb::stats::revlog_entry::ReviewKind::Relearning,
RevlogReviewKind::Filtered => pb::stats::revlog_entry::ReviewKind::Filtered,
RevlogReviewKind::Manual => pb::stats::revlog_entry::ReviewKind::Manual,
RevlogReviewKind::Learning => anki_proto::stats::revlog_entry::ReviewKind::Learning,
RevlogReviewKind::Review => anki_proto::stats::revlog_entry::ReviewKind::Review,
RevlogReviewKind::Relearning => anki_proto::stats::revlog_entry::ReviewKind::Relearning,
RevlogReviewKind::Filtered => anki_proto::stats::revlog_entry::ReviewKind::Filtered,
RevlogReviewKind::Manual => anki_proto::stats::revlog_entry::ReviewKind::Manual,
}) as i32
}
}

View file

@ -3,18 +3,18 @@
use std::sync::Arc;
use anki_proto::generic;
pub(super) use anki_proto::sync::sync_service::Service as SyncService;
use anki_proto::sync::sync_status_response::Required;
use anki_proto::sync::SyncStatusResponse;
use futures::future::AbortHandle;
use futures::future::AbortRegistration;
use futures::future::Abortable;
use pb::sync::sync_status_response::Required;
use reqwest::Url;
use tracing::warn;
use super::progress::AbortHandleSlot;
use super::Backend;
use crate::pb;
pub(super) use crate::pb::sync::sync_service::Service as SyncService;
use crate::pb::sync::SyncStatusResponse;
use crate::prelude::*;
use crate::sync::collection::normal::ClientSyncState;
use crate::sync::collection::normal::NormalSyncProgress;
@ -46,40 +46,42 @@ impl RemoteSyncStatus {
}
}
impl From<SyncOutput> for pb::sync::SyncCollectionResponse {
impl From<SyncOutput> for anki_proto::sync::SyncCollectionResponse {
fn from(o: SyncOutput) -> Self {
pb::sync::SyncCollectionResponse {
anki_proto::sync::SyncCollectionResponse {
host_number: o.host_number,
server_message: o.server_message,
new_endpoint: o.new_endpoint,
required: match o.required {
SyncActionRequired::NoChanges => {
pb::sync::sync_collection_response::ChangesRequired::NoChanges as i32
anki_proto::sync::sync_collection_response::ChangesRequired::NoChanges as i32
}
SyncActionRequired::FullSyncRequired {
upload_ok,
download_ok,
} => {
if !upload_ok {
pb::sync::sync_collection_response::ChangesRequired::FullDownload as i32
anki_proto::sync::sync_collection_response::ChangesRequired::FullDownload
as i32
} else if !download_ok {
pb::sync::sync_collection_response::ChangesRequired::FullUpload as i32
anki_proto::sync::sync_collection_response::ChangesRequired::FullUpload
as i32
} else {
pb::sync::sync_collection_response::ChangesRequired::FullSync as i32
anki_proto::sync::sync_collection_response::ChangesRequired::FullSync as i32
}
}
SyncActionRequired::NormalSyncRequired => {
pb::sync::sync_collection_response::ChangesRequired::NormalSync as i32
anki_proto::sync::sync_collection_response::ChangesRequired::NormalSync as i32
}
},
}
}
}
impl TryFrom<pb::sync::SyncAuth> for SyncAuth {
impl TryFrom<anki_proto::sync::SyncAuth> for SyncAuth {
type Error = AnkiError;
fn try_from(value: pb::sync::SyncAuth) -> std::result::Result<Self, Self::Error> {
fn try_from(value: anki_proto::sync::SyncAuth) -> std::result::Result<Self, Self::Error> {
Ok(SyncAuth {
hkey: value.hkey,
endpoint: value
@ -100,11 +102,13 @@ impl TryFrom<pb::sync::SyncAuth> for SyncAuth {
}
impl SyncService for Backend {
fn sync_media(&self, input: pb::sync::SyncAuth) -> Result<pb::generic::Empty> {
type Error = AnkiError;
fn sync_media(&self, input: anki_proto::sync::SyncAuth) -> Result<generic::Empty> {
self.sync_media_inner(input).map(Into::into)
}
fn abort_sync(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
fn abort_sync(&self, _input: generic::Empty) -> Result<generic::Empty> {
if let Some(handle) = self.sync_abort.lock().unwrap().take() {
handle.abort();
}
@ -112,7 +116,7 @@ impl SyncService for Backend {
}
/// Abort the media sync. Does not wait for completion.
fn abort_media_sync(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
fn abort_media_sync(&self, _input: generic::Empty) -> Result<generic::Empty> {
let guard = self.state.lock().unwrap();
if let Some(handle) = &guard.sync.media_sync_abort {
handle.abort();
@ -120,27 +124,33 @@ impl SyncService for Backend {
Ok(().into())
}
fn sync_login(&self, input: pb::sync::SyncLoginRequest) -> Result<pb::sync::SyncAuth> {
fn sync_login(
&self,
input: anki_proto::sync::SyncLoginRequest,
) -> Result<anki_proto::sync::SyncAuth> {
self.sync_login_inner(input)
}
fn sync_status(&self, input: pb::sync::SyncAuth) -> Result<pb::sync::SyncStatusResponse> {
fn sync_status(
&self,
input: anki_proto::sync::SyncAuth,
) -> Result<anki_proto::sync::SyncStatusResponse> {
self.sync_status_inner(input)
}
fn sync_collection(
&self,
input: pb::sync::SyncAuth,
) -> Result<pb::sync::SyncCollectionResponse> {
input: anki_proto::sync::SyncAuth,
) -> Result<anki_proto::sync::SyncCollectionResponse> {
self.sync_collection_inner(input)
}
fn full_upload(&self, input: pb::sync::SyncAuth) -> Result<pb::generic::Empty> {
fn full_upload(&self, input: anki_proto::sync::SyncAuth) -> Result<generic::Empty> {
self.full_sync_inner(input, true)?;
Ok(().into())
}
fn full_download(&self, input: pb::sync::SyncAuth) -> Result<pb::generic::Empty> {
fn full_download(&self, input: anki_proto::sync::SyncAuth) -> Result<generic::Empty> {
self.full_sync_inner(input, false)?;
Ok(().into())
}
@ -173,7 +183,7 @@ impl Backend {
Ok((guard, abort_reg))
}
pub(super) fn sync_media_inner(&self, auth: pb::sync::SyncAuth) -> Result<()> {
pub(super) fn sync_media_inner(&self, auth: anki_proto::sync::SyncAuth) -> Result<()> {
let auth = auth.try_into()?;
// mark media sync as active
let (abort_handle, abort_reg) = AbortHandle::new_pair();
@ -228,8 +238,8 @@ impl Backend {
pub(super) fn sync_login_inner(
&self,
input: pb::sync::SyncLoginRequest,
) -> Result<pb::sync::SyncAuth> {
input: anki_proto::sync::SyncLoginRequest,
) -> Result<anki_proto::sync::SyncAuth> {
let (_guard, abort_reg) = self.sync_abort_handle()?;
let rt = self.runtime_handle();
@ -239,7 +249,7 @@ impl Backend {
Ok(sync_result) => sync_result,
Err(_) => Err(AnkiError::Interrupted),
};
ret.map(|a| pb::sync::SyncAuth {
ret.map(|a| anki_proto::sync::SyncAuth {
hkey: a.hkey,
endpoint: None,
io_timeout_secs: None,
@ -248,19 +258,21 @@ impl Backend {
pub(super) fn sync_status_inner(
&self,
input: pb::sync::SyncAuth,
) -> Result<pb::sync::SyncStatusResponse> {
input: anki_proto::sync::SyncAuth,
) -> Result<anki_proto::sync::SyncStatusResponse> {
// any local changes mean we can skip the network round-trip
let req = self.with_col(|col| col.sync_status_offline())?;
if req != Required::NoChanges {
return Ok(req.into());
return Ok(status_response_from_required(req));
}
// return cached server response if only a short time has elapsed
{
let guard = self.state.lock().unwrap();
if guard.sync.remote_sync_status.last_check.elapsed_secs() < 300 {
return Ok(guard.sync.remote_sync_status.last_response.into());
return Ok(status_response_from_required(
guard.sync.remote_sync_status.last_response,
));
}
}
@ -288,8 +300,8 @@ impl Backend {
pub(super) fn sync_collection_inner(
&self,
input: pb::sync::SyncAuth,
) -> Result<pb::sync::SyncCollectionResponse> {
input: anki_proto::sync::SyncAuth,
) -> Result<anki_proto::sync::SyncCollectionResponse> {
let auth: SyncAuth = input.try_into()?;
let (_guard, abort_reg) = self.sync_abort_handle()?;
@ -329,7 +341,11 @@ impl Backend {
Ok(output.into())
}
pub(super) fn full_sync_inner(&self, input: pb::sync::SyncAuth, upload: bool) -> Result<()> {
pub(super) fn full_sync_inner(
&self,
input: anki_proto::sync::SyncAuth,
upload: bool,
) -> Result<()> {
let auth = input.try_into()?;
self.abort_media_sync_and_wait();
@ -381,13 +397,11 @@ impl Backend {
}
}
impl From<Required> for SyncStatusResponse {
fn from(r: Required) -> Self {
fn status_response_from_required(required: Required) -> SyncStatusResponse {
SyncStatusResponse {
required: r.into(),
required: required.into(),
new_endpoint: None,
}
}
}
impl From<ClientSyncState> for SyncStatusResponse {

View file

@ -1,22 +1,25 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::generic;
pub(super) use anki_proto::tags::tags_service::Service as TagsService;
use super::notes::to_note_ids;
use super::Backend;
use crate::pb;
pub(super) use crate::pb::tags::tags_service::Service as TagsService;
use crate::prelude::*;
impl TagsService for Backend {
type Error = AnkiError;
fn clear_unused_tags(
&self,
_input: pb::generic::Empty,
) -> Result<pb::collection::OpChangesWithCount> {
_input: generic::Empty,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| col.clear_unused_tags().map(Into::into))
}
fn all_tags(&self, _input: pb::generic::Empty) -> Result<pb::generic::StringList> {
Ok(pb::generic::StringList {
fn all_tags(&self, _input: generic::Empty) -> Result<generic::StringList> {
Ok(generic::StringList {
vals: self.with_col(|col| {
Ok(col
.storage
@ -28,28 +31,31 @@ impl TagsService for Backend {
})
}
fn remove_tags(&self, tags: pb::generic::String) -> Result<pb::collection::OpChangesWithCount> {
fn remove_tags(
&self,
tags: generic::String,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| col.remove_tags(tags.val.as_str()).map(Into::into))
}
fn set_tag_collapsed(
&self,
input: pb::tags::SetTagCollapsedRequest,
) -> Result<pb::collection::OpChanges> {
input: anki_proto::tags::SetTagCollapsedRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
col.set_tag_collapsed(&input.name, input.collapsed)
.map(Into::into)
})
}
fn tag_tree(&self, _input: pb::generic::Empty) -> Result<pb::tags::TagTreeNode> {
fn tag_tree(&self, _input: generic::Empty) -> Result<anki_proto::tags::TagTreeNode> {
self.with_col(|col| col.tag_tree())
}
fn reparent_tags(
&self,
input: pb::tags::ReparentTagsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: anki_proto::tags::ReparentTagsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
let source_tags = input.tags;
let target_tag = if input.new_parent.is_empty() {
None
@ -62,16 +68,16 @@ impl TagsService for Backend {
fn rename_tags(
&self,
input: pb::tags::RenameTagsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: anki_proto::tags::RenameTagsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| col.rename_tag(&input.current_prefix, &input.new_prefix))
.map(Into::into)
}
fn add_note_tags(
&self,
input: pb::tags::NoteIdsAndTagsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: anki_proto::tags::NoteIdsAndTagsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
col.add_tags_to_notes(&to_note_ids(input.note_ids), &input.tags)
.map(Into::into)
@ -80,8 +86,8 @@ impl TagsService for Backend {
fn remove_note_tags(
&self,
input: pb::tags::NoteIdsAndTagsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: anki_proto::tags::NoteIdsAndTagsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
col.remove_tags_from_notes(&to_note_ids(input.note_ids), &input.tags)
.map(Into::into)
@ -90,8 +96,8 @@ impl TagsService for Backend {
fn find_and_replace_tag(
&self,
input: pb::tags::FindAndReplaceTagRequest,
) -> Result<pb::collection::OpChangesWithCount> {
input: anki_proto::tags::FindAndReplaceTagRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
let note_ids = if input.note_ids.is_empty() {
col.search_notes_unordered("")?
@ -111,11 +117,11 @@ impl TagsService for Backend {
fn complete_tag(
&self,
input: pb::tags::CompleteTagRequest,
) -> Result<pb::tags::CompleteTagResponse> {
input: anki_proto::tags::CompleteTagRequest,
) -> Result<anki_proto::tags::CompleteTagResponse> {
self.with_col(|col| {
let tags = col.complete_tag(&input.input, input.match_limit as usize)?;
Ok(pb::tags::CompleteTagResponse { tags })
Ok(anki_proto::tags::CompleteTagResponse { tags })
})
}
}

View file

@ -14,7 +14,6 @@ use crate::card::CardType;
use crate::card_rendering::prettify_av_tags;
use crate::notetype::CardTemplate;
use crate::notetype::NotetypeKind;
use crate::pb;
use crate::prelude::*;
use crate::scheduler::timespan::time_span;
use crate::scheduler::timing::SchedTimingToday;
@ -182,8 +181,8 @@ impl Column {
.into()
}
pub fn default_order(self) -> pb::search::browser_columns::Sorting {
use pb::search::browser_columns::Sorting;
pub fn default_order(self) -> anki_proto::search::browser_columns::Sorting {
use anki_proto::search::browser_columns::Sorting;
match self {
Column::Question | Column::Answer | Column::Custom => Sorting::None,
Column::SortField | Column::Tags | Column::Notetype | Column::Deck => {
@ -205,8 +204,8 @@ impl Column {
matches!(self, Self::Question | Self::Answer | Self::SortField)
}
pub fn alignment(self) -> pb::search::browser_columns::Alignment {
use pb::search::browser_columns::Alignment;
pub fn alignment(self) -> anki_proto::search::browser_columns::Alignment {
use anki_proto::search::browser_columns::Alignment;
match self {
Self::Question
| Self::Answer
@ -221,16 +220,16 @@ impl Column {
}
impl Collection {
pub fn all_browser_columns(&self) -> pb::search::BrowserColumns {
let mut columns: Vec<pb::search::browser_columns::Column> = Column::iter()
pub fn all_browser_columns(&self) -> anki_proto::search::BrowserColumns {
let mut columns: Vec<anki_proto::search::browser_columns::Column> = Column::iter()
.filter(|&c| c != Column::Custom)
.map(|c| c.to_pb_column(&self.tr))
.collect();
columns.sort_by(|c1, c2| c1.cards_mode_label.cmp(&c2.cards_mode_label));
pb::search::BrowserColumns { columns }
anki_proto::search::BrowserColumns { columns }
}
pub fn browser_row_for_id(&mut self, id: i64) -> Result<pb::search::BrowserRow> {
pub fn browser_row_for_id(&mut self, id: i64) -> Result<anki_proto::search::BrowserRow> {
let notes_mode = self.get_config_bool(BoolKey::BrowserTableShowNotesMode);
let columns = Arc::clone(
self.state
@ -361,8 +360,8 @@ impl RowContext {
})
}
fn browser_row(&self, columns: &[Column]) -> Result<pb::search::BrowserRow> {
Ok(pb::search::BrowserRow {
fn browser_row(&self, columns: &[Column]) -> Result<anki_proto::search::BrowserRow> {
Ok(anki_proto::search::BrowserRow {
cells: columns
.iter()
.map(|&column| self.get_cell(column))
@ -373,8 +372,8 @@ impl RowContext {
})
}
fn get_cell(&self, column: Column) -> Result<pb::search::browser_row::Cell> {
Ok(pb::search::browser_row::Cell {
fn get_cell(&self, column: Column) -> Result<anki_proto::search::browser_row::Cell> {
Ok(anki_proto::search::browser_row::Cell {
text: self.get_cell_text(column)?,
is_rtl: self.get_is_rtl(column),
})
@ -551,8 +550,8 @@ impl RowContext {
Ok(self.template()?.config.browser_font_size)
}
fn get_row_color(&self) -> pb::search::browser_row::Color {
use pb::search::browser_row::Color;
fn get_row_color(&self) -> anki_proto::search::browser_row::Color {
use anki_proto::search::browser_row::Color;
if self.notes_mode {
if self.note.is_marked() {
Color::Marked

View file

@ -3,7 +3,6 @@
use std::collections::HashMap;
use crate::pb;
use crate::prelude::*;
mod parser;
@ -20,7 +19,7 @@ pub fn extract_av_tags<S: Into<String> + AsRef<str>>(
txt: S,
question_side: bool,
tr: &I18n,
) -> (String, Vec<pb::card_rendering::AvTag>) {
) -> (String, Vec<anki_proto::card_rendering::AvTag>) {
nodes_or_text_only(txt.as_ref())
.map(|nodes| nodes.write_and_extract_av_tags(question_side, tr))
.unwrap_or_else(|| (txt.into(), vec![]))
@ -125,14 +124,14 @@ mod test {
(
"foo [anki:play:q:0] baz [anki:play:q:1]",
vec![
pb::card_rendering::AvTag {
value: Some(pb::card_rendering::av_tag::Value::SoundOrVideo(
anki_proto::card_rendering::AvTag {
value: Some(anki_proto::card_rendering::av_tag::Value::SoundOrVideo(
"bar.mp3".to_string()
))
},
pb::card_rendering::AvTag {
value: Some(pb::card_rendering::av_tag::Value::Tts(
pb::card_rendering::TtsTag {
anki_proto::card_rendering::AvTag {
value: Some(anki_proto::card_rendering::av_tag::Value::Tts(
anki_proto::card_rendering::TtsTag {
field_text: tr.card_templates_blank().to_string(),
lang: "en_US".to_string(),
voices: vec![],

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::pb::card_rendering::all_tts_voices_response::TtsVoice;
use anki_proto::card_rendering::all_tts_voices_response::TtsVoice;
use crate::prelude::*;
#[cfg(windows)]

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::pb::card_rendering::all_tts_voices_response::TtsVoice;
use anki_proto::card_rendering::all_tts_voices_response::TtsVoice;
use crate::prelude::*;
pub(super) fn all_voices(_validate: bool) -> Result<Vec<TtsVoice>> {

View file

@ -4,6 +4,7 @@
use std::fs::File;
use std::io::Write;
use anki_proto::card_rendering::all_tts_voices_response::TtsVoice;
use futures::executor::block_on;
use windows::core::HSTRING;
use windows::Media::SpeechSynthesis::SpeechSynthesisStream;
@ -13,7 +14,6 @@ use windows::Storage::Streams::DataReader;
use crate::error::windows::WindowsErrorDetails;
use crate::error::windows::WindowsSnafu;
use crate::pb::card_rendering::all_tts_voices_response::TtsVoice;
use crate::prelude::*;
const MAX_BUFFER_SIZE: usize = 128 * 1024;

View file

@ -8,7 +8,6 @@ use super::Directive;
use super::Node;
use super::OtherDirective;
use super::TtsDirective;
use crate::pb;
use crate::prelude::*;
use crate::text::decode_entities;
use crate::text::strip_html_for_tts;
@ -22,7 +21,7 @@ impl<'a> CardNodes<'a> {
&self,
question_side: bool,
tr: &I18n,
) -> (String, Vec<pb::card_rendering::AvTag>) {
) -> (String, Vec<anki_proto::card_rendering::AvTag>) {
let mut extractor = AvExtractor::new(question_side, tr);
(extractor.write(self), extractor.tags)
}
@ -122,7 +121,7 @@ impl Write for AvStripper {
struct AvExtractor<'a> {
side: char,
tags: Vec<pb::card_rendering::AvTag>,
tags: Vec<anki_proto::card_rendering::AvTag>,
tr: &'a I18n,
}
@ -150,8 +149,8 @@ impl<'a> AvExtractor<'a> {
impl Write for AvExtractor<'_> {
fn write_sound(&mut self, buf: &mut String, resource: &str) {
self.write_play_tag(buf);
self.tags.push(pb::card_rendering::AvTag {
value: Some(pb::card_rendering::av_tag::Value::SoundOrVideo(
self.tags.push(anki_proto::card_rendering::AvTag {
value: Some(anki_proto::card_rendering::av_tag::Value::SoundOrVideo(
decode_entities(resource).into(),
)),
});
@ -164,9 +163,9 @@ impl Write for AvExtractor<'_> {
}
self.write_play_tag(buf);
self.tags.push(pb::card_rendering::AvTag {
value: Some(pb::card_rendering::av_tag::Value::Tts(
pb::card_rendering::TtsTag {
self.tags.push(anki_proto::card_rendering::AvTag {
value: Some(anki_proto::card_rendering::av_tag::Value::Tts(
anki_proto::card_rendering::TtsTag {
field_text: self.transform_tts_content(directive),
lang: directive.lang.into(),
voices: directive.voices.iter().map(ToString::to_string).collect(),

View file

@ -11,13 +11,13 @@ use std::thread;
use std::thread::JoinHandle;
use std::time::SystemTime;
use anki_proto::config::preferences::BackupLimits;
use chrono::prelude::*;
use itertools::Itertools;
use tracing::error;
use crate::import_export::package::export_colpkg_from_data;
use crate::io::read_locked_db_file;
use crate::pb::config::preferences::BackupLimits;
use crate::prelude::*;
const BACKUP_FORMAT_STRING: &str = "backup-%Y-%m-%d-%H.%M.%S.colpkg";

View file

@ -12,11 +12,12 @@ use std::fmt::Formatter;
use std::path::PathBuf;
use std::sync::Arc;
use anki_i18n::I18n;
use crate::browser_table;
use crate::decks::Deck;
use crate::decks::DeckId;
use crate::error::Result;
use crate::i18n::I18n;
use crate::io::create_dir_all;
use crate::notetype::Notetype;
use crate::notetype::NotetypeId;

View file

@ -1,8 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use serde::Deserialize;
use serde_aux::field_attributes::deserialize_bool_from_anything;
use serde_derive::Deserialize;
use strum::IntoStaticStr;
use crate::prelude::*;

View file

@ -9,6 +9,7 @@ pub(crate) mod schema11;
mod string;
pub(crate) mod undo;
use anki_proto::config::preferences::BackupLimits;
use serde::de::DeserializeOwned;
use serde::Serialize;
use serde_repr::Deserialize_repr;
@ -20,7 +21,6 @@ pub use self::deck::DeckConfigKey;
pub use self::notetype::get_aux_notetype_config_key;
pub use self::number::I32ConfigKey;
pub use self::string::StringKey;
use crate::pb::config::preferences::BackupLimits;
use crate::prelude::*;
/// Only used when updating/undoing.

View file

@ -4,6 +4,7 @@
use std::collections::HashSet;
use std::sync::Arc;
use anki_i18n::I18n;
use itertools::Itertools;
use tracing::debug;
@ -13,7 +14,6 @@ use crate::error::AnkiError;
use crate::error::DbError;
use crate::error::DbErrorKind;
use crate::error::Result;
use crate::i18n::I18n;
use crate::notetype::all_stock_notetypes;
use crate::notetype::AlreadyGeneratedCardInfo;
use crate::notetype::CardGenContext;

View file

@ -5,18 +5,17 @@ mod schema11;
pub(crate) mod undo;
mod update;
pub use anki_proto::deckconfig::deck_config::config::LeechAction;
pub use anki_proto::deckconfig::deck_config::config::NewCardGatherPriority;
pub use anki_proto::deckconfig::deck_config::config::NewCardInsertOrder;
pub use anki_proto::deckconfig::deck_config::config::NewCardSortOrder;
pub use anki_proto::deckconfig::deck_config::config::ReviewCardOrder;
pub use anki_proto::deckconfig::deck_config::config::ReviewMix;
pub use anki_proto::deckconfig::deck_config::Config as DeckConfigInner;
pub use schema11::DeckConfSchema11;
pub use schema11::NewCardOrderSchema11;
pub use update::UpdateDeckConfigsRequest;
pub use crate::pb::deckconfig::deck_config::config::LeechAction;
pub use crate::pb::deckconfig::deck_config::config::NewCardGatherPriority;
pub use crate::pb::deckconfig::deck_config::config::NewCardInsertOrder;
pub use crate::pb::deckconfig::deck_config::config::NewCardSortOrder;
pub use crate::pb::deckconfig::deck_config::config::ReviewCardOrder;
pub use crate::pb::deckconfig::deck_config::config::ReviewMix;
pub use crate::pb::deckconfig::deck_config::Config as DeckConfigInner;
/// Old deck config and cards table store 250% as 2500.
pub(crate) const INITIAL_EASE_FACTOR_THOUSANDS: u16 = (INITIAL_EASE_FACTOR * 1000.0) as u16;
@ -186,67 +185,85 @@ impl Collection {
}
}
impl DeckConfigInner {
/// There was a period of time when the deck options screen was allowing
/// 0/NaN to be persisted, so we need to check the values are within
/// valid bounds when reading from the DB.
pub(crate) fn ensure_values_valid(&mut self) {
/// There was a period of time when the deck options screen was allowing
/// 0/NaN to be persisted, so we need to check the values are within
/// valid bounds when reading from the DB.
pub(crate) fn ensure_deck_config_values_valid(config: &mut DeckConfigInner) {
let default = DEFAULT_DECK_CONFIG_INNER;
ensure_u32_valid(&mut self.new_per_day, default.new_per_day, 0, 9999);
ensure_u32_valid(&mut self.reviews_per_day, default.reviews_per_day, 0, 9999);
ensure_u32_valid(&mut config.new_per_day, default.new_per_day, 0, 9999);
ensure_u32_valid(
&mut self.new_per_day_minimum,
&mut config.reviews_per_day,
default.reviews_per_day,
0,
9999,
);
ensure_u32_valid(
&mut config.new_per_day_minimum,
default.new_per_day_minimum,
0,
9999,
);
ensure_f32_valid(&mut self.initial_ease, default.initial_ease, 1.31, 5.0);
ensure_f32_valid(&mut self.easy_multiplier, default.easy_multiplier, 1.0, 5.0);
ensure_f32_valid(&mut self.hard_multiplier, default.hard_multiplier, 0.5, 1.3);
ensure_f32_valid(&mut config.initial_ease, default.initial_ease, 1.31, 5.0);
ensure_f32_valid(
&mut self.lapse_multiplier,
&mut config.easy_multiplier,
default.easy_multiplier,
1.0,
5.0,
);
ensure_f32_valid(
&mut config.hard_multiplier,
default.hard_multiplier,
0.5,
1.3,
);
ensure_f32_valid(
&mut config.lapse_multiplier,
default.lapse_multiplier,
0.0,
1.0,
);
ensure_f32_valid(
&mut self.interval_multiplier,
&mut config.interval_multiplier,
default.interval_multiplier,
0.5,
2.0,
);
ensure_u32_valid(
&mut self.maximum_review_interval,
&mut config.maximum_review_interval,
default.maximum_review_interval,
1,
36_500,
);
ensure_u32_valid(
&mut self.minimum_lapse_interval,
&mut config.minimum_lapse_interval,
default.minimum_lapse_interval,
1,
36_500,
);
ensure_u32_valid(
&mut self.graduating_interval_good,
&mut config.graduating_interval_good,
default.graduating_interval_good,
1,
36_500,
);
ensure_u32_valid(
&mut self.graduating_interval_easy,
&mut config.graduating_interval_easy,
default.graduating_interval_easy,
1,
36_500,
);
ensure_u32_valid(&mut self.leech_threshold, default.leech_threshold, 1, 9999);
ensure_u32_valid(
&mut self.cap_answer_time_to_secs,
&mut config.leech_threshold,
default.leech_threshold,
1,
9999,
);
ensure_u32_valid(
&mut config.cap_answer_time_to_secs,
default.cap_answer_time_to_secs,
1,
9999,
);
}
}
fn ensure_f32_valid(val: &mut f32, default: f32, min: f32, max: f32) {

View file

@ -4,10 +4,10 @@
use std::collections::HashMap;
use serde::Deserialize as DeTrait;
use serde::Deserialize;
use serde::Deserializer;
use serde::Serialize;
use serde_aux::field_attributes::deserialize_number_from_string;
use serde_derive::Deserialize;
use serde_derive::Serialize;
use serde_json::Value;
use serde_repr::Deserialize_repr;
use serde_repr::Serialize_repr;

View file

@ -7,13 +7,13 @@ use std::collections::HashMap;
use std::collections::HashSet;
use std::iter;
use anki_proto::deckconfig::deck_configs_for_update::current_deck::Limits;
use anki_proto::deckconfig::deck_configs_for_update::ConfigWithExtra;
use anki_proto::deckconfig::deck_configs_for_update::CurrentDeck;
use anki_proto::decks::deck::normal::DayLimit;
use crate::config::StringKey;
use crate::decks::NormalDeck;
use crate::pb;
use crate::pb::deckconfig::deck_configs_for_update::current_deck::Limits;
use crate::pb::deckconfig::deck_configs_for_update::ConfigWithExtra;
use crate::pb::deckconfig::deck_configs_for_update::CurrentDeck;
use crate::pb::decks::deck::normal::DayLimit;
use crate::prelude::*;
use crate::search::JoinSearches;
use crate::search::SearchNode;
@ -35,8 +35,8 @@ impl Collection {
pub fn get_deck_configs_for_update(
&mut self,
deck: DeckId,
) -> Result<pb::deckconfig::DeckConfigsForUpdate> {
Ok(pb::deckconfig::DeckConfigsForUpdate {
) -> Result<anki_proto::deckconfig::DeckConfigsForUpdate> {
Ok(anki_proto::deckconfig::DeckConfigsForUpdate {
all_config: self.get_deck_config_with_extra_for_update()?,
current_deck: Some(self.get_current_deck_for_update(deck)?),
defaults: Some(DeckConfig::default().into()),
@ -99,7 +99,7 @@ impl Collection {
.into_iter()
.map(Into::into)
.collect(),
limits: Some(normal.to_limits(today)),
limits: Some(normal_deck_to_limits(normal, today)),
})
}
@ -172,7 +172,7 @@ impl Collection {
{
let mut updated = deck.clone();
updated.normal_mut()?.config_id = selected_config.id.0;
updated.normal_mut()?.update_limits(&input.limits, today);
update_deck_limits(updated.normal_mut()?, &input.limits, today);
self.update_deck_inner(&mut updated, deck, usn)?;
selected_config.id
} else {
@ -236,30 +236,28 @@ impl Collection {
}
}
impl NormalDeck {
fn to_limits(&self, today: u32) -> Limits {
fn normal_deck_to_limits(deck: &NormalDeck, today: u32) -> Limits {
Limits {
review: self.review_limit,
new: self.new_limit,
review_today: self.review_limit_today.map(|limit| limit.limit),
new_today: self.new_limit_today.map(|limit| limit.limit),
review_today_active: self
review: deck.review_limit,
new: deck.new_limit,
review_today: deck.review_limit_today.map(|limit| limit.limit),
new_today: deck.new_limit_today.map(|limit| limit.limit),
review_today_active: deck
.review_limit_today
.map(|limit| limit.today == today)
.unwrap_or_default(),
new_today_active: self
new_today_active: deck
.new_limit_today
.map(|limit| limit.today == today)
.unwrap_or_default(),
}
}
}
fn update_limits(&mut self, limits: &Limits, today: u32) {
self.review_limit = limits.review;
self.new_limit = limits.new;
update_day_limit(&mut self.review_limit_today, limits.review_today, today);
update_day_limit(&mut self.new_limit_today, limits.new_today, today);
}
fn update_deck_limits(deck: &mut NormalDeck, limits: &Limits, today: u32) {
deck.review_limit = limits.review;
deck.new_limit = limits.new;
update_day_limit(&mut deck.review_limit_today, limits.review_today, today);
update_day_limit(&mut deck.new_limit_today, limits.new_today, today);
}
fn update_day_limit(day_limit: &mut Option<DayLimit>, new_limit: Option<u32>, today: u32) {

View file

@ -2,7 +2,6 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::collections::HashMap;
use crate::pb;
use crate::prelude::*;
#[derive(Debug)]
@ -43,11 +42,11 @@ impl Collection {
pub(crate) fn counts_for_deck_today(
&mut self,
did: DeckId,
) -> Result<pb::scheduler::CountsForDeckTodayResponse> {
) -> Result<anki_proto::scheduler::CountsForDeckTodayResponse> {
let today = self.current_due_day(0)?;
let mut deck = self.storage.get_deck(did)?.or_not_found(did)?;
deck.reset_stats_if_day_changed(today);
Ok(pb::scheduler::CountsForDeckTodayResponse {
Ok(anki_proto::scheduler::CountsForDeckTodayResponse {
new: deck.common.new_studied,
review: deck.common.review_studied,
})

View file

@ -43,13 +43,14 @@ impl Deck {
}
}
impl FilteredSearchOrder {
pub fn labels(tr: &I18n) -> Vec<String> {
FilteredSearchOrder::iter().map(|v| v.label(tr)).collect()
}
pub fn search_order_labels(tr: &I18n) -> Vec<String> {
FilteredSearchOrder::iter()
.map(|v| search_order_label(v, tr))
.collect()
}
fn label(self, tr: &I18n) -> String {
match self {
fn search_order_label(order: FilteredSearchOrder, tr: &I18n) -> String {
match order {
FilteredSearchOrder::OldestReviewedFirst => tr.decks_oldest_seen_first(),
FilteredSearchOrder::Random => tr.decks_random(),
FilteredSearchOrder::IntervalsAscending => tr.decks_increasing_intervals(),
@ -61,5 +62,4 @@ impl FilteredSearchOrder {
FilteredSearchOrder::DuePriority => tr.decks_relative_overdueness(),
}
.into()
}
}

View file

@ -4,6 +4,7 @@
use std::collections::HashMap;
use std::iter::Peekable;
use anki_proto::decks::deck::normal::DayLimit;
use id_tree::InsertBehavior;
use id_tree::Node;
use id_tree::NodeId;
@ -13,7 +14,6 @@ use super::Deck;
use super::NormalDeck;
use crate::deckconfig::DeckConfig;
use crate::deckconfig::DeckConfigId;
use crate::pb::decks::deck::normal::DayLimit;
use crate::prelude::*;
#[derive(Debug, Clone, Copy)]
@ -22,36 +22,32 @@ pub(crate) enum LimitKind {
New,
}
impl NormalDeck {
/// The deck's review limit for today, or its regular one, if any is
/// configured.
pub fn current_review_limit(&self, today: u32) -> Option<u32> {
self.review_limit_today(today).or(self.review_limit)
}
/// The deck's new limit for today, or its regular one, if any is
/// configured.
pub fn current_new_limit(&self, today: u32) -> Option<u32> {
self.new_limit_today(today).or(self.new_limit)
}
/// The deck's review limit for today.
pub fn review_limit_today(&self, today: u32) -> Option<u32> {
self.review_limit_today
.and_then(|day_limit| day_limit.limit(today))
}
/// The deck's new limit for today.
pub fn new_limit_today(&self, today: u32) -> Option<u32> {
self.new_limit_today
.and_then(|day_limit| day_limit.limit(today))
}
/// The deck's review limit for today, or its regular one, if any is
/// configured.
pub fn current_review_limit(deck: &NormalDeck, today: u32) -> Option<u32> {
review_limit_today(deck, today).or(deck.review_limit)
}
impl DayLimit {
pub fn limit(&self, today: u32) -> Option<u32> {
(self.today == today).then_some(self.limit)
}
/// The deck's new limit for today, or its regular one, if any is
/// configured.
pub fn current_new_limit(deck: &NormalDeck, today: u32) -> Option<u32> {
new_limit_today(deck, today).or(deck.new_limit)
}
/// The deck's review limit for today.
pub fn review_limit_today(deck: &NormalDeck, today: u32) -> Option<u32> {
deck.review_limit_today
.and_then(|day_limit| limit_if_today(day_limit, today))
}
/// The deck's new limit for today.
pub fn new_limit_today(deck: &NormalDeck, today: u32) -> Option<u32> {
deck.new_limit_today
.and_then(|day_limit| limit_if_today(day_limit, today))
}
pub fn limit_if_today(limit: DayLimit, today: u32) -> Option<u32> {
(limit.today == today).then_some(limit.limit)
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
@ -118,12 +114,10 @@ impl RemainingLimits {
normal: &NormalDeck,
config: &DeckConfig,
) -> RemainingLimits {
let mut review_limit = normal
.current_review_limit(today)
.unwrap_or(config.inner.reviews_per_day) as i32;
let mut new_limit = normal
.current_new_limit(today)
.unwrap_or(config.inner.new_per_day) as i32;
let mut review_limit =
current_review_limit(normal, today).unwrap_or(config.inner.reviews_per_day) as i32;
let mut new_limit =
current_new_limit(normal, today).unwrap_or(config.inner.new_per_day) as i32;
let (new_today_count, review_today_count) = deck.new_rev_counts(today);
review_limit -= review_today_count;

View file

@ -4,18 +4,26 @@
mod addupdate;
mod counts;
mod current;
mod filtered;
pub mod filtered;
pub(crate) mod limits;
mod name;
mod remove;
mod reparent;
mod schema11;
mod stats;
mod tree;
pub mod tree;
pub(crate) mod undo;
use std::sync::Arc;
pub use anki_proto::decks::deck::filtered::search_term::Order as FilteredSearchOrder;
pub use anki_proto::decks::deck::filtered::SearchTerm as FilteredSearchTerm;
pub use anki_proto::decks::deck::kind_container::Kind as DeckKind;
pub use anki_proto::decks::deck::Common as DeckCommon;
pub use anki_proto::decks::deck::Filtered as FilteredDeck;
pub use anki_proto::decks::deck::KindContainer as DeckKindContainer;
pub use anki_proto::decks::deck::Normal as NormalDeck;
pub use anki_proto::decks::Deck as DeckProto;
pub(crate) use counts::DueCounts;
pub(crate) use name::immediate_parent_name;
pub use name::NativeDeckName;
@ -24,14 +32,6 @@ pub use schema11::DeckSchema11;
use crate::define_newtype;
use crate::error::FilteredDeckError;
use crate::markdown::render_markdown;
pub use crate::pb::decks::deck::filtered::search_term::Order as FilteredSearchOrder;
pub use crate::pb::decks::deck::filtered::SearchTerm as FilteredSearchTerm;
pub use crate::pb::decks::deck::kind_container::Kind as DeckKind;
pub use crate::pb::decks::deck::Common as DeckCommon;
pub use crate::pb::decks::deck::Filtered as FilteredDeck;
pub use crate::pb::decks::deck::KindContainer as DeckKindContainer;
pub use crate::pb::decks::deck::Normal as NormalDeck;
pub use crate::pb::decks::Deck as DeckProto;
use crate::prelude::*;
use crate::text::sanitize_html_no_images;

View file

@ -3,8 +3,9 @@
use std::collections::HashMap;
use serde_derive::Deserialize;
use serde_derive::Serialize;
use anki_proto::decks::deck::normal::DayLimit;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Value;
use serde_tuple::Serialize_tuple;
@ -12,7 +13,6 @@ use super::DeckCommon;
use super::FilteredDeck;
use super::FilteredSearchTerm;
use super::NormalDeck;
use crate::pb::decks::deck::normal::DayLimit;
use crate::prelude::*;
use crate::serde::default_on_invalid;
use crate::serde::deserialize_bool_from_anything;

View file

@ -1,7 +1,6 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::DeckCommon;
use crate::pb;
use crate::prelude::*;
impl Deck {
@ -24,7 +23,7 @@ impl Collection {
&mut self,
today: u32,
usn: Usn,
input: pb::scheduler::UpdateStatsRequest,
input: anki_proto::scheduler::UpdateStatsRequest,
) -> Result<()> {
let did = input.deck_id.into();
let mutator = |c: &mut DeckCommon| {

View file

@ -6,6 +6,8 @@ use std::collections::HashSet;
use std::iter::Peekable;
use std::ops::AddAssign;
pub use anki_proto::decks::set_deck_collapsed_request::Scope as DeckCollapseScope;
use anki_proto::decks::DeckTreeNode;
use serde_tuple::Serialize_tuple;
use unicase::UniCase;
@ -14,8 +16,6 @@ use super::limits::RemainingLimits;
use super::DueCounts;
use crate::config::SchedulerVersion;
use crate::ops::OpOutput;
pub use crate::pb::decks::set_deck_collapsed_request::Scope as DeckCollapseScope;
use crate::pb::decks::DeckTreeNode;
use crate::prelude::*;
use crate::undo::Op;
@ -250,28 +250,29 @@ fn hide_default_deck(node: &mut DeckTreeNode) {
}
}
impl DeckTreeNode {
/// Locate provided deck in tree, and return it.
pub fn get_deck(self, deck_id: DeckId) -> Option<DeckTreeNode> {
if self.deck_id == deck_id.0 {
return Some(self);
/// Locate provided deck in tree, and return it.
pub fn get_deck_in_tree(tree: DeckTreeNode, deck_id: DeckId) -> Option<DeckTreeNode> {
if tree.deck_id == deck_id.0 {
return Some(tree);
}
for child in self.children {
if let Some(node) = child.get_deck(deck_id) {
for child in tree.children {
if let Some(node) = get_deck_in_tree(child, deck_id) {
return Some(node);
}
}
None
}
}
pub(crate) fn sum<T: AddAssign>(&self, map: fn(&DeckTreeNode) -> T) -> T {
let mut output = map(self);
for child in &self.children {
output += child.sum(map);
pub(crate) fn sum_deck_tree_node<T: AddAssign>(
node: &DeckTreeNode,
map: fn(&DeckTreeNode) -> T,
) -> T {
let mut output = map(node);
for child in &node.children {
output += sum_deck_tree_node(child, map)
}
output
}
}
#[derive(Serialize_tuple)]
@ -355,7 +356,7 @@ impl Collection {
pub fn current_deck_tree(&mut self) -> Result<Option<DeckTreeNode>> {
let target = self.get_current_deck_id();
let tree = self.deck_tree(Some(TimestampSecs::now()))?;
Ok(tree.get_deck(target))
Ok(get_deck_in_tree(tree, target))
}
pub fn set_deck_collapsed(

View file

@ -11,6 +11,8 @@ mod search;
#[cfg(windows)]
pub mod windows;
use anki_i18n::I18n;
use anki_proto::ProtoError;
pub use db::DbError;
pub use db::DbErrorKind;
pub use filtered::CustomStudyError;
@ -30,7 +32,6 @@ pub use self::invalid_input::InvalidInputError;
pub use self::invalid_input::OrInvalid;
pub use self::not_found::NotFoundError;
pub use self::not_found::OrNotFound;
use crate::i18n::I18n;
use crate::import_export::ImportError;
use crate::links::HelpPage;
@ -300,3 +301,11 @@ pub enum CardTypeErrorDetails {
MissingCloze,
ExtraneousCloze,
}
impl From<anki_proto::ProtoError> for AnkiError {
fn from(value: ProtoError) -> Self {
AnkiError::ProtoError {
info: value.to_string(),
}
}
}

View file

@ -1,4 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub use anki_i18n::I18n;

View file

@ -4,16 +4,16 @@
use std::path::Path;
use std::path::PathBuf;
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageClozeNote;
use anki_proto::image_occlusion::get_image_occlusion_note_response::Value;
use anki_proto::image_occlusion::GetImageForOcclusionResponse;
use anki_proto::image_occlusion::GetImageOcclusionNoteResponse;
use regex::Regex;
use crate::io::metadata;
use crate::io::read_file;
use crate::media::MediaManager;
use crate::notetype::CardGenContext;
use crate::pb::image_occlusion::get_image_occlusion_note_response::ImageClozeNote;
use crate::pb::image_occlusion::get_image_occlusion_note_response::Value;
use crate::pb::image_occlusion::GetImageForOcclusionResponse;
use crate::pb::image_occlusion::GetImageOcclusionNoteResponse;
use crate::prelude::*;
impl Collection {

View file

@ -3,10 +3,11 @@
use std::sync::Arc;
use anki_proto::notetypes::stock_notetype::OriginalStockKind;
use crate::notetype::stock::empty_stock;
use crate::notetype::Notetype;
use crate::notetype::NotetypeKind;
use crate::pb::notetypes::stock_notetype::OriginalStockKind;
use crate::prelude::*;
impl Collection {

View file

@ -8,10 +8,10 @@ pub mod text;
use std::marker::PhantomData;
pub use anki_proto::import_export::import_response::Log as NoteLog;
pub use anki_proto::import_export::import_response::Note as LogNote;
use snafu::Snafu;
pub use crate::pb::import_export::import_response::Log as NoteLog;
pub use crate::pb::import_export::import_response::Note as LogNote;
use crate::prelude::*;
use crate::text::newlines_to_spaces;
use crate::text::strip_html_preserving_media_filenames;
@ -114,7 +114,7 @@ impl<'f, F: 'f + FnMut(usize) -> Result<()>> Incrementor<'f, F> {
impl Note {
pub(crate) fn into_log_note(self) -> LogNote {
LogNote {
id: Some(self.id.into()),
id: Some(anki_proto::notes::NoteId { nid: self.id.0 }),
fields: self
.into_fields()
.into_iter()

View file

@ -5,6 +5,7 @@ use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use super::super::meta::MetaExt;
use crate::collection::CollectionBuilder;
use crate::import_export::gather::ExchangeData;
use crate::import_export::package::colpkg::export::export_collection;

View file

@ -141,7 +141,7 @@ impl DeckContext<'_> {
fn update_deck(&mut self, deck: &Deck, original: Deck) -> Result<()> {
let mut new_deck = original.clone();
if let (Ok(new), Ok(old)) = (new_deck.normal_mut(), deck.normal()) {
new.update_with_other(old);
update_normal_with_other(new, old);
} else if let (Ok(new), Ok(old)) = (new_deck.filtered_mut(), deck.filtered()) {
*new = old.clone();
} else {
@ -188,20 +188,18 @@ impl Deck {
}
}
impl NormalDeck {
fn update_with_other(&mut self, other: &Self) {
fn update_normal_with_other(normal: &mut NormalDeck, other: &NormalDeck) {
if !other.description.is_empty() {
self.markdown_description = other.markdown_description;
self.description = other.description.clone();
normal.markdown_description = other.markdown_description;
normal.description = other.description.clone();
}
if other.config_id != 1 {
self.config_id = other.config_id;
}
self.review_limit = other.review_limit.or(self.review_limit);
self.new_limit = other.new_limit.or(self.new_limit);
self.review_limit_today = other.review_limit_today.or(self.review_limit_today);
self.new_limit_today = other.new_limit_today.or(self.new_limit_today);
normal.config_id = other.config_id;
}
normal.review_limit = other.review_limit.or(normal.review_limit);
normal.new_limit = other.new_limit.or(normal.new_limit);
normal.review_limit_today = other.review_limit_today.or(normal.review_limit_today);
normal.new_limit_today = other.new_limit_today.or(normal.new_limit_today);
}
#[cfg(test)]

View file

@ -7,6 +7,7 @@ use std::mem;
use zip::ZipArchive;
use super::super::super::meta::MetaExt;
use super::Context;
use crate::error::FileIoSnafu;
use crate::error::FileOp;

View file

@ -15,6 +15,7 @@ use rusqlite::OptionalExtension;
use tempfile::NamedTempFile;
use zip::ZipArchive;
use super::super::meta::MetaExt;
use crate::collection::CollectionBuilder;
use crate::error::FileIoSnafu;
use crate::error::FileOp;

View file

@ -18,11 +18,14 @@ use zstd::stream::raw::Encoder as RawEncoder;
use zstd::stream::zio;
use zstd::Encoder;
use super::super::meta::MetaExt;
use super::super::meta::VersionExt;
use super::super::MediaEntries;
use super::super::MediaEntry;
use super::super::Meta;
use super::super::Version;
use crate::collection::CollectionBuilder;
use crate::import_export::package::media::new_media_entry;
use crate::import_export::package::media::MediaCopier;
use crate::import_export::package::media::MediaIter;
use crate::import_export::ExportProgress;
@ -269,7 +272,7 @@ fn write_media_files(
zip.start_file(index.to_string(), file_options_stored())?;
let (size, sha1) = copier.copy(&mut entry.data, zip)?;
media_entries.push(MediaEntry::new(entry.nfc_filename, size, sha1));
media_entries.push(new_media_entry(entry.nfc_filename, size, sha1));
}
Ok(())

View file

@ -11,6 +11,7 @@ use zip::read::ZipFile;
use zip::ZipArchive;
use zstd::stream::copy_decode;
use super::super::meta::MetaExt;
use crate::collection::CollectionBuilder;
use crate::error::FileIoSnafu;
use crate::error::FileOp;

View file

@ -20,6 +20,7 @@ use zip::ZipArchive;
use zstd::stream::copy_decode;
use zstd::stream::raw::Encoder as RawEncoder;
use super::meta::MetaExt;
use super::MediaEntries;
use super::MediaEntry;
use super::Meta;
@ -44,19 +45,17 @@ pub(super) struct SafeMediaEntry {
pub(super) index: usize,
}
impl MediaEntry {
pub(super) fn new(
pub(super) fn new_media_entry(
name: impl Into<String>,
size: impl TryInto<u32>,
sha1: impl Into<Vec<u8>>,
) -> Self {
) -> MediaEntry {
MediaEntry {
name: name.into(),
size: size.try_into().unwrap_or_default(),
sha1: sha1.into(),
legacy_zip_filename: None,
}
}
}
impl SafeMediaEntry {
@ -146,7 +145,7 @@ pub(super) fn extract_media_entries(
let map: HashMap<&str, String> = serde_json::from_slice(&media_list_data)?;
map.into_iter().map(SafeMediaEntry::from_legacy).collect()
} else {
MediaEntries::decode_safe_entries(&media_list_data)
decode_safe_entries(&media_list_data)
}
}
@ -171,16 +170,14 @@ fn get_media_list_data(archive: &mut ZipArchive<File>, meta: &Meta) -> Result<Ve
Ok(buf)
}
impl MediaEntries {
fn decode_safe_entries(buf: &[u8]) -> Result<Vec<SafeMediaEntry>> {
let entries: Self = Message::decode(buf)?;
pub(super) fn decode_safe_entries(buf: &[u8]) -> Result<Vec<SafeMediaEntry>> {
let entries: MediaEntries = Message::decode(buf)?;
entries
.entries
.into_iter()
.enumerate()
.map(SafeMediaEntry::from_entry)
.collect()
}
}
pub struct MediaIterEntry {
@ -353,10 +350,10 @@ mod test {
// new-style entries should have been normalized on export
let mut entries = Vec::new();
MediaEntries {
entries: vec![MediaEntry::new("con", 0, Vec::new())],
entries: vec![new_media_entry("con", 0, Vec::new())],
}
.encode(&mut entries)
.unwrap();
assert!(MediaEntries::decode_safe_entries(&entries).is_err());
assert!(decode_safe_entries(&entries).is_err());
}
}

View file

@ -5,18 +5,23 @@ use std::fs::File;
use std::io;
use std::io::Read;
pub(super) use anki_proto::import_export::package_metadata::Version;
pub(super) use anki_proto::import_export::PackageMetadata as Meta;
use prost::Message;
use zip::ZipArchive;
use zstd::stream::copy_decode;
use crate::import_export::ImportError;
pub(super) use crate::pb::import_export::package_metadata::Version;
pub(super) use crate::pb::import_export::PackageMetadata as Meta;
use crate::prelude::*;
use crate::storage::SchemaVersion;
impl Version {
pub(super) fn collection_filename(&self) -> &'static str {
pub(super) trait VersionExt {
fn collection_filename(&self) -> &'static str;
fn schema_version(&self) -> SchemaVersion;
}
impl VersionExt for Version {
fn collection_filename(&self) -> &'static str {
match self {
Version::Unknown => unreachable!(),
Version::Legacy1 => "collection.anki2",
@ -27,7 +32,7 @@ impl Version {
/// Latest schema version that is supported by all clients supporting
/// this package version.
pub(super) fn schema_version(&self) -> SchemaVersion {
fn schema_version(&self) -> SchemaVersion {
match self {
Version::Unknown => unreachable!(),
Version::Legacy1 | Version::Legacy2 => SchemaVersion::V11,
@ -36,14 +41,26 @@ impl Version {
}
}
impl Meta {
pub(super) fn new() -> Self {
pub(in crate::import_export) trait MetaExt: Sized {
fn new() -> Self;
fn new_legacy() -> Self;
fn from_archive(archive: &mut ZipArchive<File>) -> Result<Self>;
fn collection_filename(&self) -> &'static str;
fn schema_version(&self) -> SchemaVersion;
fn zstd_compressed(&self) -> bool;
fn media_list_is_hashmap(&self) -> bool;
fn is_legacy(&self) -> bool;
fn copy(&self, reader: &mut impl Read, writer: &mut impl io::Write) -> io::Result<()>;
}
impl MetaExt for Meta {
fn new() -> Self {
Self {
version: Version::Latest as i32,
}
}
pub(super) fn new_legacy() -> Self {
fn new_legacy() -> Self {
Self {
version: Version::Legacy2 as i32,
}
@ -51,7 +68,7 @@ impl Meta {
/// Extracts meta data from an archive and checks if its version is
/// supported.
pub(super) fn from_archive(archive: &mut ZipArchive<File>) -> Result<Self> {
fn from_archive(archive: &mut ZipArchive<File>) -> Result<Self> {
let meta_bytes = archive.by_name("meta").ok().and_then(|mut meta_file| {
let mut buf = vec![];
meta_file.read_to_end(&mut buf).ok()?;
@ -77,21 +94,21 @@ impl Meta {
Ok(meta)
}
pub(super) fn collection_filename(&self) -> &'static str {
fn collection_filename(&self) -> &'static str {
self.version().collection_filename()
}
/// Latest schema version that is supported by all clients supporting
/// this package version.
pub(super) fn schema_version(&self) -> SchemaVersion {
fn schema_version(&self) -> SchemaVersion {
self.version().schema_version()
}
pub(super) fn zstd_compressed(&self) -> bool {
fn zstd_compressed(&self) -> bool {
!self.is_legacy()
}
pub(super) fn media_list_is_hashmap(&self) -> bool {
fn media_list_is_hashmap(&self) -> bool {
self.is_legacy()
}
@ -99,11 +116,7 @@ impl Meta {
matches!(self.version(), Version::Legacy1 | Version::Legacy2)
}
pub(super) fn copy(
&self,
reader: &mut impl Read,
writer: &mut impl io::Write,
) -> io::Result<()> {
fn copy(&self, reader: &mut impl Read, writer: &mut impl io::Write) -> io::Result<()> {
if self.zstd_compressed() {
copy_decode(reader, writer)
} else {

View file

@ -6,6 +6,8 @@ mod colpkg;
mod media;
mod meta;
pub(self) use anki_proto::import_export::media_entries::MediaEntry;
pub(self) use anki_proto::import_export::MediaEntries;
pub(crate) use apkg::NoteMeta;
pub(crate) use colpkg::export::export_colpkg_from_data;
pub use colpkg::import::import_colpkg;
@ -14,6 +16,3 @@ pub use media::MediaIterEntry;
pub use media::MediaIterError;
pub(self) use meta::Meta;
pub(self) use meta::Version;
pub(self) use crate::pb::import_export::media_entries::MediaEntry;
pub(self) use crate::pb::import_export::MediaEntries;

View file

@ -7,15 +7,16 @@ use std::fs::File;
use std::io::Write;
use std::sync::Arc;
use anki_proto::import_export::ExportNoteCsvRequest;
use itertools::Itertools;
use lazy_static::lazy_static;
use regex::Regex;
use super::metadata::Delimiter;
use crate::import_export::text::csv::metadata::DelimeterExt;
use crate::import_export::ExportProgress;
use crate::import_export::IncrementableProgress;
use crate::notetype::RenderCardOutput;
use crate::pb::import_export::ExportNoteCsvRequest;
use crate::prelude::*;
use crate::search::SearchNode;
use crate::search::SortMode;
@ -60,7 +61,7 @@ impl Collection {
progress.call(ExportProgress::File)?;
let mut incrementor = progress.incrementor(ExportProgress::Notes);
let guard = self.search_notes_into_table(request.search_node())?;
let guard = self.search_notes_into_table(Into::<SearchNode>::into(&mut request))?;
let ctx = NoteContext::new(&request, guard.col)?;
let mut writer = note_file_writer_with_header(&request.out_path, &ctx)?;
guard.col.storage.for_each_note_in_search(|note| {
@ -283,8 +284,8 @@ impl NoteContext {
}
}
impl ExportNoteCsvRequest {
fn search_node(&mut self) -> SearchNode {
SearchNode::from(self.limit.take().unwrap_or_default())
impl From<&mut ExportNoteCsvRequest> for SearchNode {
fn from(req: &mut ExportNoteCsvRequest) -> Self {
SearchNode::from(req.limit.take().unwrap_or_default())
}
}

View file

@ -9,7 +9,9 @@ use std::io::SeekFrom;
use crate::import_export::text::csv::metadata::CsvDeck;
use crate::import_export::text::csv::metadata::CsvMetadata;
use crate::import_export::text::csv::metadata::CsvMetadataHelpers;
use crate::import_export::text::csv::metadata::CsvNotetype;
use crate::import_export::text::csv::metadata::DelimeterExt;
use crate::import_export::text::csv::metadata::Delimiter;
use crate::import_export::text::ForeignData;
use crate::import_export::text::ForeignNote;
@ -53,34 +55,12 @@ impl From<CsvMetadata> for ForeignData {
}
}
impl CsvMetadata {
fn deck(&self) -> Result<&CsvDeck> {
self.deck.as_ref().or_invalid("deck oneof not set")
}
fn notetype(&self) -> Result<&CsvNotetype> {
self.notetype.as_ref().or_invalid("notetype oneof not set")
}
fn field_source_columns(&self) -> Result<FieldSourceColumns> {
Ok(match self.notetype()? {
CsvNotetype::GlobalNotetype(global) => global
.field_columns
.iter()
.map(|&i| (i > 0).then_some(i as usize))
.collect(),
CsvNotetype::NotetypeColumn(_) => {
let meta_columns = self.meta_columns();
(1..self.column_labels.len() + 1)
.filter(|idx| !meta_columns.contains(idx))
.map(Some)
.collect()
}
})
}
trait CsvDeckExt {
fn name_or_id(&self) -> NameOrId;
fn column(&self) -> Option<usize>;
}
impl CsvDeck {
impl CsvDeckExt for CsvDeck {
fn name_or_id(&self) -> NameOrId {
match self {
Self::DeckId(did) => NameOrId::Id(*did),
@ -96,7 +76,12 @@ impl CsvDeck {
}
}
impl CsvNotetype {
trait CsvNotetypeExt {
fn name_or_id(&self) -> NameOrId;
fn column(&self) -> Option<usize>;
}
impl CsvNotetypeExt for CsvNotetype {
fn name_or_id(&self) -> NameOrId {
match self {
Self::GlobalNotetype(nt) => NameOrId::Id(nt.id),
@ -113,7 +98,7 @@ impl CsvNotetype {
}
/// Column indices for the fields of a notetype.
type FieldSourceColumns = Vec<Option<usize>>;
pub(super) type FieldSourceColumns = Vec<Option<usize>>;
// Column indices are 1-based.
struct ColumnContext {
@ -244,8 +229,10 @@ fn remove_tags_line_from_reader(reader: &mut (impl Read + Seek)) -> Result<()> {
mod test {
use std::io::Cursor;
use anki_proto::import_export::csv_metadata::MappedNotetype;
use super::super::metadata::test::CsvMetadataTestExt;
use super::*;
use crate::pb::import_export::csv_metadata::MappedNotetype;
macro_rules! import {
($metadata:expr, $csv:expr) => {{
@ -276,30 +263,6 @@ mod test {
};
}
impl CsvMetadata {
fn defaults_for_testing() -> Self {
Self {
delimiter: Delimiter::Comma as i32,
force_delimiter: false,
is_html: false,
force_is_html: false,
tags_column: 0,
guid_column: 0,
global_tags: Vec::new(),
updated_tags: Vec::new(),
column_labels: vec!["".to_string(); 2],
deck: Some(CsvDeck::DeckId(1)),
notetype: Some(CsvNotetype::GlobalNotetype(MappedNotetype {
id: 1,
field_columns: vec![1, 2],
})),
preview: Vec::new(),
dupe_resolution: 0,
match_scope: 0,
}
}
}
#[test]
fn should_allow_missing_columns() {
let metadata = CsvMetadata::defaults_for_testing();

View file

@ -9,23 +9,23 @@ use std::io::Read;
use std::io::Seek;
use std::io::SeekFrom;
pub use anki_proto::import_export::csv_metadata::Deck as CsvDeck;
pub use anki_proto::import_export::csv_metadata::Delimiter;
pub use anki_proto::import_export::csv_metadata::DupeResolution;
pub use anki_proto::import_export::csv_metadata::MappedNotetype;
pub use anki_proto::import_export::csv_metadata::MatchScope;
pub use anki_proto::import_export::csv_metadata::Notetype as CsvNotetype;
pub use anki_proto::import_export::CsvMetadata;
use itertools::Itertools;
use strum::IntoEnumIterator;
use super::import::build_csv_reader;
use crate::config::I32ConfigKey;
use crate::import_export::text::csv::import::FieldSourceColumns;
use crate::import_export::text::NameOrId;
use crate::import_export::ImportError;
use crate::io::open_file;
use crate::notetype::NoteField;
use crate::pb::generic::StringList;
pub use crate::pb::import_export::csv_metadata::Deck as CsvDeck;
pub use crate::pb::import_export::csv_metadata::Delimiter;
pub use crate::pb::import_export::csv_metadata::DupeResolution;
pub use crate::pb::import_export::csv_metadata::MappedNotetype;
pub use crate::pb::import_export::csv_metadata::MatchScope;
pub use crate::pb::import_export::csv_metadata::Notetype as CsvNotetype;
pub use crate::pb::import_export::CsvMetadata;
use crate::prelude::*;
use crate::text::html_to_text_line;
use crate::text::is_html;
@ -135,7 +135,7 @@ impl Collection {
}
"notetype" => {
if let Ok(Some(nt)) = self.notetype_by_name_or_id(&NameOrId::parse(value)) {
metadata.notetype = Some(CsvNotetype::new_global(nt.id));
metadata.notetype = Some(new_global_csv_notetype(nt.id));
}
}
"deck" => {
@ -191,13 +191,13 @@ impl Collection {
/// we apply the defaults from defaults_for_adding().
pub(crate) fn maybe_set_notetype_and_deck(
&mut self,
metadata: &mut crate::pb::import_export::CsvMetadata,
metadata: &mut anki_proto::import_export::CsvMetadata,
notetype_id: Option<NotetypeId>,
deck_id: Option<DeckId>,
) -> Result<()> {
let defaults = self.defaults_for_adding(DeckId(0))?;
if metadata.notetype.is_none() || notetype_id.is_some() {
metadata.notetype = Some(CsvNotetype::new_global(
metadata.notetype = Some(new_global_csv_notetype(
notetype_id.unwrap_or(defaults.notetype_id),
));
}
@ -233,7 +233,15 @@ impl Collection {
}
}
impl CsvMetadata {
pub(super) trait CsvMetadataHelpers {
fn from_config(col: &Collection) -> Self;
fn deck(&self) -> Result<&CsvDeck>;
fn notetype(&self) -> Result<&CsvNotetype>;
fn field_source_columns(&self) -> Result<FieldSourceColumns>;
fn meta_columns(&self) -> HashSet<usize>;
}
impl CsvMetadataHelpers for CsvMetadata {
/// Defaults with config values filled in.
fn from_config(col: &Collection) -> Self {
Self {
@ -242,9 +250,56 @@ impl CsvMetadata {
..Default::default()
}
}
fn deck(&self) -> Result<&CsvDeck> {
self.deck.as_ref().or_invalid("deck oneof not set")
}
fn notetype(&self) -> Result<&CsvNotetype> {
self.notetype.as_ref().or_invalid("notetype oneof not set")
}
fn field_source_columns(&self) -> Result<FieldSourceColumns> {
Ok(match self.notetype()? {
CsvNotetype::GlobalNotetype(global) => global
.field_columns
.iter()
.map(|&i| (i > 0).then_some(i as usize))
.collect(),
CsvNotetype::NotetypeColumn(_) => {
let meta_columns = self.meta_columns();
(1..self.column_labels.len() + 1)
.filter(|idx| !meta_columns.contains(idx))
.map(Some)
.collect()
}
})
}
fn meta_columns(&self) -> HashSet<usize> {
let mut columns = HashSet::new();
if let Some(CsvDeck::DeckColumn(deck_column)) = self.deck {
columns.insert(deck_column as usize);
}
if let Some(CsvNotetype::NotetypeColumn(notetype_column)) = self.notetype {
columns.insert(notetype_column as usize);
}
if self.tags_column > 0 {
columns.insert(self.tags_column as usize);
}
if self.guid_column > 0 {
columns.insert(self.guid_column as usize);
}
columns
}
}
impl DupeResolution {
pub(super) trait DupeResolutionExt: Sized {
fn from_config(col: &Collection) -> Self;
fn from_text(text: &str) -> Option<Self>;
}
impl DupeResolutionExt for DupeResolution {
fn from_config(col: &Collection) -> Self {
Self::from_i32(col.get_config_i32(I32ConfigKey::CsvDuplicateResolution)).unwrap_or_default()
}
@ -259,7 +314,12 @@ impl DupeResolution {
}
}
impl MatchScope {
pub(super) trait MatchScopeExt: Sized {
fn from_config(col: &Collection) -> Self;
fn from_text(text: &str) -> Option<Self>;
}
impl MatchScopeExt for MatchScope {
fn from_config(col: &Collection) -> Self {
Self::from_i32(col.get_config_i32(I32ConfigKey::MatchScope)).unwrap_or_default()
}
@ -308,8 +368,12 @@ fn set_preview(metadata: &mut CsvMetadata, records: &[csv::StringRecord]) -> Res
Ok(())
}
fn build_preview_row(min_len: usize, record: &csv::StringRecord, strip_html: bool) -> StringList {
StringList {
fn build_preview_row(
min_len: usize,
record: &csv::StringRecord,
strip_html: bool,
) -> anki_proto::generic::StringList {
anki_proto::generic::StringList {
vals: record
.iter()
.pad_using(min_len, |_| "")
@ -475,8 +539,13 @@ fn strip_line_ending(line: &str) -> &str {
.unwrap_or_else(|| line.strip_suffix('\n').unwrap_or(line))
}
impl Delimiter {
pub fn byte(self) -> u8 {
pub(super) trait DelimeterExt {
fn byte(self) -> u8;
fn name(self) -> &'static str;
}
impl DelimeterExt for Delimiter {
fn byte(self) -> u8 {
match self {
Delimiter::Comma => b',',
Delimiter::Semicolon => b';',
@ -487,7 +556,7 @@ impl Delimiter {
}
}
pub fn name(self) -> &'static str {
fn name(self) -> &'static str {
match self {
Delimiter::Comma => "comma",
Delimiter::Semicolon => "semicolon",
@ -499,32 +568,11 @@ impl Delimiter {
}
}
impl CsvNotetype {
fn new_global(id: NotetypeId) -> Self {
Self::GlobalNotetype(MappedNotetype {
fn new_global_csv_notetype(id: NotetypeId) -> CsvNotetype {
CsvNotetype::GlobalNotetype(MappedNotetype {
id: id.0,
field_columns: Vec::new(),
})
}
}
impl CsvMetadata {
pub(super) fn meta_columns(&self) -> HashSet<usize> {
let mut columns = HashSet::new();
if let Some(CsvDeck::DeckColumn(deck_column)) = self.deck {
columns.insert(deck_column as usize);
}
if let Some(CsvNotetype::NotetypeColumn(notetype_column)) = self.notetype {
columns.insert(notetype_column as usize);
}
if self.tags_column > 0 {
columns.insert(self.tags_column as usize);
}
if self.guid_column > 0 {
columns.insert(self.guid_column as usize);
}
columns
}
}
impl NameOrId {
@ -537,16 +585,8 @@ impl NameOrId {
}
}
impl From<csv::StringRecord> for StringList {
fn from(record: csv::StringRecord) -> Self {
Self {
vals: record.iter().map(ToString::to_string).collect(),
}
}
}
#[cfg(test)]
mod test {
pub(in crate::import_export) mod test {
use std::io::Cursor;
use super::*;
@ -561,7 +601,36 @@ mod test {
};
}
impl CsvMetadata {
pub trait CsvMetadataTestExt {
fn defaults_for_testing() -> Self;
fn unwrap_deck_id(&self) -> i64;
fn unwrap_notetype_id(&self) -> i64;
fn unwrap_notetype_map(&self) -> &[u32];
}
impl CsvMetadataTestExt for CsvMetadata {
fn defaults_for_testing() -> Self {
Self {
delimiter: Delimiter::Comma as i32,
force_delimiter: false,
is_html: false,
force_is_html: false,
tags_column: 0,
guid_column: 0,
global_tags: Vec::new(),
updated_tags: Vec::new(),
column_labels: vec!["".to_string(); 2],
deck: Some(CsvDeck::DeckId(1)),
notetype: Some(CsvNotetype::GlobalNotetype(MappedNotetype {
id: 1,
field_columns: vec![1, 2],
})),
preview: Vec::new(),
dupe_resolution: 0,
match_scope: 0,
}
}
fn unwrap_deck_id(&self) -> i64 {
match self.deck {
Some(CsvDeck::DeckId(did)) => did,
@ -575,6 +644,12 @@ mod test {
_ => panic!("no notetype id"),
}
}
fn unwrap_notetype_map(&self) -> &[u32] {
match &self.notetype {
Some(CsvNotetype::GlobalNotetype(nt)) => &nt.field_columns,
_ => panic!("no notetype map"),
}
}
}
#[test]
@ -729,15 +804,6 @@ mod test {
);
}
impl CsvMetadata {
fn unwrap_notetype_map(&self) -> &[u32] {
match &self.notetype {
Some(CsvNotetype::GlobalNotetype(nt)) => &nt.field_columns,
_ => panic!("no notetype map"),
}
}
}
#[test]
fn should_map_default_notetype_fields_by_index_if_no_column_names() {
let mut col = Collection::new();

View file

@ -3,4 +3,4 @@
mod export;
mod import;
mod metadata;
pub mod metadata;

View file

@ -25,7 +25,6 @@ use crate::notes::normalize_field;
use crate::notetype::CardGenContext;
use crate::notetype::CardTemplate;
use crate::notetype::NoteField;
use crate::notetype::NotetypeConfig;
use crate::prelude::*;
use crate::text::strip_html_preserving_media_filenames;
@ -60,14 +59,12 @@ impl ForeignData {
}
}
impl NoteLog {
fn new(dupe_resolution: DupeResolution, found_notes: u32) -> Self {
Self {
fn new_note_log(dupe_resolution: DupeResolution, found_notes: u32) -> NoteLog {
NoteLog {
dupe_resolution: dupe_resolution as i32,
found_notes,
..Default::default()
}
}
}
struct Context<'a> {
@ -235,7 +232,7 @@ impl<'a> Context<'a> {
progress: &mut IncrementableProgress<ImportProgress>,
) -> Result<NoteLog> {
let mut incrementor = progress.incrementor(ImportProgress::Notes);
let mut log = NoteLog::new(self.dupe_resolution, notes.len() as u32);
let mut log = new_note_log(self.dupe_resolution, notes.len() as u32);
for foreign in notes {
incrementor.increment()?;
if foreign.first_field_is_the_empty_string() {
@ -612,9 +609,9 @@ impl ForeignNotetype {
.map(ForeignTemplate::into_native)
.collect(),
config: if self.is_cloze {
NotetypeConfig::new_cloze()
Notetype::new_cloze_config()
} else {
NotetypeConfig::new()
Notetype::new_config()
},
..Notetype::default()
}

View file

@ -5,12 +5,12 @@ pub mod csv;
mod import;
mod json;
use serde_derive::Deserialize;
use serde_derive::Serialize;
use anki_proto::import_export::csv_metadata::DupeResolution;
use anki_proto::import_export::csv_metadata::MatchScope;
use serde::Deserialize;
use serde::Serialize;
use super::LogNote;
use crate::pb::import_export::csv_metadata::DupeResolution;
use crate::pb::import_export::csv_metadata::MatchScope;
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[serde(default)]

View file

@ -16,7 +16,6 @@ pub mod deckconfig;
pub mod decks;
pub mod error;
pub mod findreplace;
pub mod i18n;
pub mod image_occlusion;
pub mod import_export;
mod io;
@ -28,7 +27,6 @@ pub mod media;
pub mod notes;
pub mod notetype;
pub mod ops;
pub mod pb;
mod preferences;
pub mod prelude;
pub mod revlog;
@ -56,3 +54,5 @@ use lazy_static::lazy_static;
lazy_static! {
pub(crate) static ref PYTHON_UNIT_TESTS: bool = env::var("ANKI_TEST_MODE").is_ok();
}
// temporary during proto migration

View file

@ -1,17 +1,16 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub use crate::pb::links::help_page_link_request::HelpPage;
pub use anki_proto::links::help_page_link_request::HelpPage;
static HELP_SITE: &str = "https://docs.ankiweb.net/";
impl HelpPage {
pub fn to_link(self) -> String {
format!("{}{}", HELP_SITE, self.to_link_suffix())
}
pub fn help_page_to_link(page: HelpPage) -> String {
format!("{}{}", HELP_SITE, help_page_link_suffix(page))
}
pub fn to_link_suffix(self) -> &'static str {
match self {
pub fn help_page_link_suffix(page: HelpPage) -> &'static str {
match page {
HelpPage::NoteType => "getting-started.html#note-types",
HelpPage::Browsing => "browsing.html",
HelpPage::BrowsingFindAndReplace => "browsing.html#find-and-replace",
@ -35,12 +34,9 @@ impl HelpPage {
HelpPage::CardTypeNoFrontField => {
"templates/errors.html#no-field-replacement-on-front-side"
}
HelpPage::CardTypeMissingCloze => {
"templates/errors.html#no-cloze-filter-on-cloze-notetype"
}
HelpPage::CardTypeMissingCloze => "templates/errors.html#no-cloze-filter-on-cloze-notetype",
HelpPage::CardTypeExtraneousCloze => {
"templates/errors.html#cloze-filter-outside-cloze-notetype"
}
}
}
}

View file

@ -7,6 +7,7 @@ use std::borrow::Cow;
use std::collections::HashMap;
use std::collections::HashSet;
use anki_proto::notes::note_fields_check_response::State as NoteFieldsState;
use itertools::Itertools;
use num_integer::Integer;
use sha1::Digest;
@ -17,8 +18,6 @@ use crate::define_newtype;
use crate::notetype::CardGenContext;
use crate::notetype::NoteField;
use crate::ops::StateChanges;
use crate::pb;
use crate::pb::notes::note_fields_check_response::State as NoteFieldsState;
use crate::prelude::*;
use crate::template::field_is_empty;
use crate::text::ensure_string_in_nfc;
@ -259,9 +258,9 @@ pub(crate) fn normalize_field(field: &mut String, normalize_text: bool) {
}
}
impl From<Note> for pb::notes::Note {
impl From<Note> for anki_proto::notes::Note {
fn from(n: Note) -> Self {
pb::notes::Note {
anki_proto::notes::Note {
id: n.id.0,
guid: n.guid,
notetype_id: n.notetype_id.0,
@ -273,8 +272,8 @@ impl From<Note> for pb::notes::Note {
}
}
impl From<pb::notes::Note> for Note {
fn from(n: pb::notes::Note) -> Self {
impl From<anki_proto::notes::Note> for Note {
fn from(n: anki_proto::notes::Note) -> Self {
Note {
id: NoteId(n.id),
guid: n.guid,

View file

@ -3,7 +3,6 @@
use super::NoteFieldConfig;
use super::NoteFieldProto;
use crate::pb::generic::UInt32;
use crate::prelude::*;
#[derive(Debug, PartialEq, Clone)]
@ -16,7 +15,7 @@ pub struct NoteField {
impl From<NoteField> for NoteFieldProto {
fn from(f: NoteField) -> Self {
NoteFieldProto {
ord: f.ord.map(|n| UInt32 { val: n }),
ord: f.ord.map(Into::into),
name: f.name,
config: Some(f.config),
}

View file

@ -19,6 +19,15 @@ use std::collections::HashSet;
use std::iter::FromIterator;
use std::sync::Arc;
pub use anki_proto::notetypes::notetype::config::card_requirement::Kind as CardRequirementKind;
pub use anki_proto::notetypes::notetype::config::CardRequirement;
pub use anki_proto::notetypes::notetype::config::Kind as NotetypeKind;
pub use anki_proto::notetypes::notetype::field::Config as NoteFieldConfig;
pub use anki_proto::notetypes::notetype::template::Config as CardTemplateConfig;
pub use anki_proto::notetypes::notetype::Config as NotetypeConfig;
pub use anki_proto::notetypes::notetype::Field as NoteFieldProto;
pub use anki_proto::notetypes::notetype::Template as CardTemplateProto;
pub use anki_proto::notetypes::Notetype as NotetypeProto;
pub(crate) use cardgen::AlreadyGeneratedCardInfo;
pub(crate) use cardgen::CardGenContext;
pub use fields::NoteField;
@ -39,15 +48,6 @@ use crate::error::CardTypeError;
use crate::error::CardTypeErrorDetails;
use crate::error::CardTypeSnafu;
use crate::error::MissingClozeSnafu;
pub use crate::pb::notetypes::notetype::config::card_requirement::Kind as CardRequirementKind;
pub use crate::pb::notetypes::notetype::config::CardRequirement;
pub use crate::pb::notetypes::notetype::config::Kind as NotetypeKind;
pub use crate::pb::notetypes::notetype::field::Config as NoteFieldConfig;
pub use crate::pb::notetypes::notetype::template::Config as CardTemplateConfig;
pub use crate::pb::notetypes::notetype::Config as NotetypeConfig;
pub use crate::pb::notetypes::notetype::Field as NoteFieldProto;
pub use crate::pb::notetypes::notetype::Template as CardTemplateProto;
pub use crate::pb::notetypes::Notetype as NotetypeProto;
use crate::prelude::*;
use crate::search::JoinSearches;
use crate::search::Node;
@ -96,13 +96,13 @@ impl Default for Notetype {
usn: Usn(0),
fields: vec![],
templates: vec![],
config: NotetypeConfig::new(),
config: Notetype::new_config(),
}
}
}
impl NotetypeConfig {
pub(crate) fn new() -> Self {
impl Notetype {
pub(crate) fn new_config() -> NotetypeConfig {
NotetypeConfig {
css: DEFAULT_CSS.into(),
latex_pre: DEFAULT_LATEX_HEADER.into(),
@ -111,8 +111,8 @@ impl NotetypeConfig {
}
}
pub(crate) fn new_cloze() -> Self {
let mut config = Self::new();
pub(crate) fn new_cloze_config() -> NotetypeConfig {
let mut config = Self::new_config();
config.css += DEFAULT_CLOZE_CSS;
config.kind = NotetypeKind::Cloze as i32;
config

View file

@ -1,10 +1,11 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::notetypes::stock_notetype::Kind;
use anki_proto::notetypes::stock_notetype::OriginalStockKind;
use crate::notetype::stock::get_original_stock_notetype;
use crate::notetype::stock::StockKind;
use crate::pb::notetypes::stock_notetype::Kind;
use crate::pb::notetypes::stock_notetype::OriginalStockKind;
use crate::prelude::*;
impl Collection {

View file

@ -3,8 +3,8 @@
use std::collections::HashMap;
use serde_derive::Deserialize;
use serde_derive::Serialize;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Value;
use serde_repr::Deserialize_repr;
use serde_repr::Serialize_repr;

View file

@ -1,18 +1,19 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_i18n::I18n;
use anki_proto::notetypes::notetype::config::Kind as NotetypeKind;
use anki_proto::notetypes::stock_notetype::Kind;
pub(crate) use anki_proto::notetypes::stock_notetype::Kind as StockKind;
use anki_proto::notetypes::stock_notetype::OriginalStockKind;
use super::NotetypeConfig;
use crate::config::ConfigEntry;
use crate::config::ConfigKey;
use crate::error::Result;
use crate::i18n::I18n;
use crate::image_occlusion::notetype::image_occlusion_notetype;
use crate::invalid_input;
use crate::notetype::Notetype;
use crate::pb::notetypes::notetype::config::Kind as NotetypeKind;
use crate::pb::notetypes::stock_notetype::Kind;
pub(crate) use crate::pb::notetypes::stock_notetype::Kind as StockKind;
use crate::pb::notetypes::stock_notetype::OriginalStockKind;
use crate::storage::SqliteStorage;
use crate::timestamp::TimestampSecs;
@ -63,9 +64,9 @@ pub(crate) fn empty_stock(
kind: nt_kind as i32,
original_stock_kind: original_stock_kind as i32,
..if nt_kind == NotetypeKind::Cloze {
NotetypeConfig::new_cloze()
Notetype::new_cloze_config()
} else {
NotetypeConfig::new()
Notetype::new_config()
}
},
..Default::default()

View file

@ -3,7 +3,6 @@
use super::CardTemplateConfig;
use super::CardTemplateProto;
use crate::pb::generic::UInt32;
use crate::prelude::*;
use crate::template::ParsedTemplate;
@ -53,7 +52,7 @@ impl CardTemplate {
impl From<CardTemplate> for CardTemplateProto {
fn from(t: CardTemplate) -> Self {
CardTemplateProto {
ord: t.ord.map(|n| UInt32 { val: n }),
ord: t.ord.map(Into::into),
mtime_secs: t.mtime_secs.0,
usn: t.usn.0,
name: t.name,

View file

@ -1,34 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
macro_rules! protobuf {
($ident:ident, $name:literal) => {
pub mod $ident {
include!(concat!("../../out/rslib/proto/anki.", $name, ".rs"));
}
};
}
include!("../../out/rslib/proto/service_index.rs");
protobuf!(ankidroid, "ankidroid");
protobuf!(backend, "backend");
protobuf!(card_rendering, "card_rendering");
protobuf!(cards, "cards");
protobuf!(collection, "collection");
protobuf!(config, "config");
protobuf!(deckconfig, "deckconfig");
protobuf!(decks, "decks");
protobuf!(generic, "generic");
protobuf!(i18n, "i18n");
protobuf!(image_occlusion, "image_occlusion");
protobuf!(import_export, "import_export");
protobuf!(links, "links");
protobuf!(media, "media");
protobuf!(notes, "notes");
protobuf!(notetypes, "notetypes");
protobuf!(scheduler, "scheduler");
protobuf!(search, "search");
protobuf!(stats, "stats");
protobuf!(sync, "sync");
protobuf!(tags, "tags");

View file

@ -1,15 +1,16 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::config::preferences::scheduling::NewReviewMix as NewRevMixPB;
use anki_proto::config::preferences::Editing;
use anki_proto::config::preferences::Reviewing;
use anki_proto::config::preferences::Scheduling;
use anki_proto::config::Preferences;
use crate::collection::Collection;
use crate::config::BoolKey;
use crate::config::StringKey;
use crate::error::Result;
use crate::pb::config::preferences::scheduling::NewReviewMix as NewRevMixPB;
use crate::pb::config::preferences::Editing;
use crate::pb::config::preferences::Reviewing;
use crate::pb::config::preferences::Scheduling;
use crate::pb::config::Preferences;
use crate::prelude::*;
use crate::scheduler::timing::local_minutes_west_for_stamp;

View file

@ -1,6 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub use anki_i18n::I18n;
pub use snafu::ResultExt;
pub use crate::card::Card;
@ -17,7 +18,6 @@ pub use crate::error::AnkiError;
pub use crate::error::OrInvalid;
pub use crate::error::OrNotFound;
pub use crate::error::Result;
pub use crate::i18n::I18n;
pub use crate::invalid_input;
pub use crate::media::Sha1Hash;
pub use crate::notes::Note;

Some files were not shown because too many files have changed in this diff Show more