Refactor service generation (#2552)

* Automatically elide empty inputs and outputs to backend methods

* Refactor service generation

Despite the fact that the majority of our Protobuf service methods require
an open collection, they were not accessible with just a Collection
object. To access the methods (e.g. because we haven't gotten around to
exposing the correct API in Collection yet), you had to wrap the collection
in a Backend object, and pay a mutex-acquisition cost for each call, even
if you have exclusive access to the object.

This commit migrates the majority of service methods to the Collection, so
they can now be used directly, and improves the ergonomics a bit at the
same time.

The approach taken:

- The service generation now happens in rslib instead of anki_proto, which
avoids the need for trait constraints and associated types.
- Service methods are assumed to be collection-based by default. Instead of
implementing the service on Backend, we now implement it on Collection, which
means our methods no longer need to use self.with_col(...).
- We automatically generate methods in Backend which use self.with_col() to
delegate to the Collection method.
- For methods that are only appropriate for the backend, we add a flag in
the .proto file. The codegen uses this flag to write the method into a
BackendFooService instead of FooService, which the backend implements.
- The flag can also allows us to define separate implementations for collection
and backend, so we can e.g. skip the collection mutex in the i18n service
while also providing the service on a collection.
This commit is contained in:
Damien Elmes 2023-06-19 15:33:40 +10:00 committed by GitHub
parent 1aabff9248
commit 553303fc12
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
85 changed files with 2444 additions and 2335 deletions

55
Cargo.lock generated
View file

@ -123,9 +123,10 @@ dependencies = [
"num_enum",
"once_cell",
"percent-encoding-iri",
"phf 0.11.1",
"pin-project",
"prettyplease 0.2.7",
"prost",
"prost-build",
"prost-reflect",
"prost-types",
"pulldown-cmark 0.9.2",
@ -142,6 +143,7 @@ dependencies = [
"sha1",
"snafu",
"strum",
"syn 2.0.18",
"tempfile",
"tokio",
"tokio-util",
@ -153,7 +155,6 @@ dependencies = [
"unicase",
"unicode-normalization",
"utime",
"which",
"windows",
"wiremock",
"workspace-hack",
@ -378,7 +379,7 @@ checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.12",
"syn 2.0.18",
]
[[package]]
@ -469,7 +470,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.12",
"syn 2.0.18",
]
[[package]]
@ -762,7 +763,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.12",
"syn 2.0.18",
]
[[package]]
@ -1035,7 +1036,7 @@ dependencies = [
"proc-macro2",
"quote",
"scratch",
"syn 2.0.12",
"syn 2.0.18",
]
[[package]]
@ -1052,7 +1053,7 @@ checksum = "2345488264226bf682893e25de0769f3360aac9957980ec49361b083ddaa5bc5"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.12",
"syn 2.0.18",
]
[[package]]
@ -1455,7 +1456,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.12",
"syn 2.0.18",
]
[[package]]
@ -2541,7 +2542,7 @@ dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"syn 2.0.12",
"syn 2.0.18",
]
[[package]]
@ -2767,7 +2768,7 @@ dependencies = [
"pest_meta",
"proc-macro2",
"quote",
"syn 2.0.12",
"syn 2.0.18",
]
[[package]]
@ -2973,6 +2974,16 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "prettyplease"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43ded2b5b204571f065ab8540367d738dfe1b3606ab9eb669dcfb5e7a3a07501"
dependencies = [
"proc-macro2",
"syn 2.0.18",
]
[[package]]
name = "proc-macro-crate"
version = "1.3.1"
@ -2991,9 +3002,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068"
[[package]]
name = "proc-macro2"
version = "1.0.54"
version = "1.0.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e472a104799c74b514a57226160104aa483546de37e839ec50e3c2e41dd87534"
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
dependencies = [
"unicode-ident",
]
@ -3021,7 +3032,7 @@ dependencies = [
"log",
"multimap",
"petgraph",
"prettyplease",
"prettyplease 0.1.25",
"prost",
"prost-types",
"regex",
@ -3158,9 +3169,9 @@ dependencies = [
[[package]]
name = "quote"
version = "1.0.26"
version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
dependencies = [
"proc-macro2",
]
@ -3615,7 +3626,7 @@ checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.12",
"syn 2.0.18",
]
[[package]]
@ -3657,7 +3668,7 @@ checksum = "bcec881020c684085e55a25f7fd888954d56609ef363479dc5a1305eb0d40cab"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.12",
"syn 2.0.18",
]
[[package]]
@ -3900,9 +3911,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.12"
version = "2.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79d9531f94112cfc3e4c8f5f02cb2b58f72c97b7efd85f70203cc6d8efda5927"
checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
dependencies = [
"proc-macro2",
"quote",
@ -3999,7 +4010,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.12",
"syn 2.0.18",
]
[[package]]
@ -4100,7 +4111,7 @@ checksum = "61a573bdc87985e9d6ddeed1b3d864e8a302c847e40d647746df2f1de209d1ce"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.12",
"syn 2.0.18",
]
[[package]]
@ -5072,7 +5083,7 @@ dependencies = [
"snafu",
"snafu-derive",
"syn 1.0.109",
"syn 2.0.12",
"syn 2.0.18",
"time",
"tokio",
"tokio-util",

View file

@ -1961,6 +1961,15 @@
"license_file": null,
"description": "A minimal `syn` syntax tree pretty-printer"
},
{
"name": "prettyplease",
"version": "0.2.7",
"authors": "David Tolnay <dtolnay@gmail.com>",
"repository": "https://github.com/dtolnay/prettyplease",
"license": "Apache-2.0 OR MIT",
"license_file": null,
"description": "A minimal `syn` syntax tree pretty-printer"
},
{
"name": "proc-macro-crate",
"version": "1.3.1",
@ -1981,7 +1990,7 @@
},
{
"name": "proc-macro2",
"version": "1.0.54",
"version": "1.0.60",
"authors": "David Tolnay <dtolnay@gmail.com>|Alex Crichton <alex@alexcrichton.com>",
"repository": "https://github.com/dtolnay/proc-macro2",
"license": "Apache-2.0 OR MIT",
@ -2044,7 +2053,7 @@
},
{
"name": "quote",
"version": "1.0.26",
"version": "1.0.28",
"authors": "David Tolnay <dtolnay@gmail.com>",
"repository": "https://github.com/dtolnay/quote",
"license": "Apache-2.0 OR MIT",
@ -2602,7 +2611,7 @@
},
{
"name": "syn",
"version": "2.0.12",
"version": "2.0.18",
"authors": "David Tolnay <dtolnay@gmail.com>",
"repository": "https://github.com/dtolnay/syn",
"license": "Apache-2.0 OR MIT",

View file

@ -4,25 +4,34 @@ option java_multiple_files = true;
import "anki/generic.proto";
import "anki/scheduler.proto";
import "anki/codegen.proto";
package anki.ankidroid;
service AnkidroidService {
rpc SchedTimingTodayLegacy(SchedTimingTodayLegacyRequest)
returns (scheduler.SchedTimingTodayResponse);
rpc LocalMinutesWestLegacy(generic.Int64) returns (generic.Int32);
returns (scheduler.SchedTimingTodayResponse) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc LocalMinutesWestLegacy(generic.Int64) returns (generic.Int32) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc RunDbCommand(generic.Json) returns (generic.Json);
rpc RunDbCommandProto(generic.Json) returns (DBResponse);
rpc RunDbCommandProto(generic.Json) returns (DbResponse);
rpc InsertForId(generic.Json) returns (generic.Int64);
rpc RunDbCommandForRowCount(generic.Json) returns (generic.Int64);
rpc FlushAllQueries(generic.Empty) returns (generic.Empty);
rpc FlushQuery(generic.Int32) returns (generic.Empty);
rpc GetNextResultPage(GetNextResultPageRequest) returns (DBResponse);
rpc SetPageSize(generic.Int64) returns (generic.Empty);
rpc GetNextResultPage(GetNextResultPageRequest) returns (DbResponse);
rpc SetPageSize(generic.Int64) returns (generic.Empty) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc GetColumnNamesFromQuery(generic.String) returns (generic.StringList);
rpc GetActiveSequenceNumbers(generic.Empty)
returns (GetActiveSequenceNumbersResponse);
rpc DebugProduceError(generic.String) returns (generic.Empty);
rpc DebugProduceError(generic.String) returns (generic.Empty) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
}
message DebugActiveDatabaseSequenceNumbersResponse {
@ -57,7 +66,7 @@ message DbResult {
repeated Row rows = 1;
}
message DBResponse {
message DbResponse {
DbResult result = 1;
int32 sequenceNumber = 2;
int32 rowCount = 3;

View file

@ -10,6 +10,7 @@ package anki.card_rendering;
import "anki/generic.proto";
import "anki/notes.proto";
import "anki/notetypes.proto";
import "anki/codegen.proto";
service CardRenderingService {
rpc ExtractAvTags(ExtractAvTagsRequest) returns (ExtractAvTagsResponse);
@ -25,7 +26,10 @@ service CardRenderingService {
rpc RenderMarkdown(RenderMarkdownRequest) returns (generic.String);
rpc EncodeIriPaths(generic.String) returns (generic.String);
rpc DecodeIriPaths(generic.String) returns (generic.String);
rpc StripHtml(StripHtmlRequest) returns (generic.String);
rpc StripHtml(StripHtmlRequest) returns (generic.String) {
// a bunch of our unit tests access this without a collection
option (codegen.rust_methods) = RUST_METHODS_COLLECTION_AND_MANUAL_BACKEND;
}
rpc CompareAnswer(CompareAnswerRequest) returns (generic.String);
rpc ExtractClozeForTyping(ExtractClozeForTypingRequest)
returns (generic.String);

34
proto/anki/codegen.proto Normal file
View file

@ -0,0 +1,34 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
syntax = "proto3";
package anki.codegen;
import "google/protobuf/descriptor.proto";
extend google.protobuf.MethodOptions {
RustMethods rust_methods = 50000;
}
message MethodOptions {
RustMethods rust_methods = 50000;
}
enum RustMethods {
/// Used for typical collection-based operations. We must implement the
// method on Collection. The same method is automatically implemented on
// Backend, which forwards to Collection.
RUST_METHODS_COLLECTION_AND_AUTO_BACKEND = 0;
/// Method only makes sense on the backend (eg one that closes and reopens
/// the collection). Backend method needs to be implemented.
RUST_METHODS_BACKEND_ONLY = 1;
/// Both the backend and collection need to implement the method; there
/// is no auto-delegation. Can be used to provide a method on both, but
/// skip the Collection mutex lock when a backend handle is available.
/// In practice we only do this for the i18n methods; for the occasional
/// method in other services that doesn't happen to need the collection,
/// we just delegate to the collection method for convenience, and to make
/// sure it's available even if the consumer is not using Backend.
RUST_METHODS_COLLECTION_AND_MANUAL_BACKEND = 2;
}

View file

@ -8,10 +8,15 @@ option java_multiple_files = true;
package anki.collection;
import "anki/generic.proto";
import "anki/codegen.proto";
service CollectionService {
rpc OpenCollection(OpenCollectionRequest) returns (generic.Empty);
rpc CloseCollection(CloseCollectionRequest) returns (generic.Empty);
rpc OpenCollection(OpenCollectionRequest) returns (generic.Empty) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc CloseCollection(CloseCollectionRequest) returns (generic.Empty) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc CheckDatabase(generic.Empty) returns (CheckDatabaseResponse);
rpc GetUndoStatus(generic.Empty) returns (UndoStatus);
rpc Undo(generic.Empty) returns (OpChangesAfterUndo);
@ -24,10 +29,14 @@ service CollectionService {
// transaction. Unlike a collection export, does not require reopening the DB,
// as there is no downgrade step.
// Returns false if it's not time to make a backup yet.
rpc CreateBackup(CreateBackupRequest) returns (generic.Bool);
rpc CreateBackup(CreateBackupRequest) returns (generic.Bool) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
// If a backup is running, wait for it to complete. Will return an error
// if the backup encountered an error.
rpc AwaitBackupCompletion(generic.Empty) returns (generic.Empty);
rpc AwaitBackupCompletion(generic.Empty) returns (generic.Empty) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
}
message OpenCollectionRequest {

View file

@ -8,11 +8,18 @@ option java_multiple_files = true;
package anki.i18n;
import "anki/generic.proto";
import "anki/codegen.proto";
service I18nService {
rpc TranslateString(TranslateStringRequest) returns (generic.String);
rpc FormatTimespan(FormatTimespanRequest) returns (generic.String);
rpc I18nResources(I18nResourcesRequest) returns (generic.Json);
rpc TranslateString(TranslateStringRequest) returns (generic.String) {
option (codegen.rust_methods) = RUST_METHODS_COLLECTION_AND_MANUAL_BACKEND;
}
rpc FormatTimespan(FormatTimespanRequest) returns (generic.String) {
option (codegen.rust_methods) = RUST_METHODS_COLLECTION_AND_MANUAL_BACKEND;
}
rpc I18nResources(I18nResourcesRequest) returns (generic.Json) {
option (codegen.rust_methods) = RUST_METHODS_COLLECTION_AND_MANUAL_BACKEND;
}
}
message TranslateStringRequest {

View file

@ -11,12 +11,17 @@ import "anki/cards.proto";
import "anki/collection.proto";
import "anki/notes.proto";
import "anki/generic.proto";
import "anki/codegen.proto";
service ImportExportService {
rpc ImportCollectionPackage(ImportCollectionPackageRequest)
returns (generic.Empty);
returns (generic.Empty) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc ExportCollectionPackage(ExportCollectionPackageRequest)
returns (generic.Empty);
returns (generic.Empty) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc ImportAnkiPackage(ImportAnkiPackageRequest) returns (ImportResponse);
rpc ExportAnkiPackage(ExportAnkiPackageRequest) returns (generic.UInt32);
rpc GetCsvMetadata(CsvMetadataRequest) returns (CsvMetadata);

View file

@ -11,8 +11,8 @@ import "anki/generic.proto";
service MediaService {
rpc CheckMedia(generic.Empty) returns (CheckMediaResponse);
rpc TrashMediaFiles(TrashMediaFilesRequest) returns (generic.Empty);
rpc AddMediaFile(AddMediaFileRequest) returns (generic.String);
rpc TrashMediaFiles(TrashMediaFilesRequest) returns (generic.Empty);
rpc EmptyTrash(generic.Empty) returns (generic.Empty);
rpc RestoreTrash(generic.Empty) returns (generic.Empty);
}

View file

@ -8,17 +8,33 @@ option java_multiple_files = true;
package anki.sync;
import "anki/generic.proto";
import "anki/codegen.proto";
service SyncService {
rpc SyncMedia(SyncAuth) returns (generic.Empty);
rpc AbortMediaSync(generic.Empty) returns (generic.Empty);
rpc SyncLogin(SyncLoginRequest) returns (SyncAuth);
rpc SyncStatus(SyncAuth) returns (SyncStatusResponse);
rpc SyncCollection(SyncAuth) returns (SyncCollectionResponse);
rpc FullUpload(SyncAuth) returns (generic.Empty);
rpc FullDownload(SyncAuth) returns (generic.Empty);
rpc AbortSync(generic.Empty) returns (generic.Empty);
rpc SyncMedia(SyncAuth) returns (generic.Empty) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc AbortMediaSync(generic.Empty) returns (generic.Empty) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc SyncLogin(SyncLoginRequest) returns (SyncAuth) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc SyncStatus(SyncAuth) returns (SyncStatusResponse) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc SyncCollection(SyncAuth) returns (SyncCollectionResponse) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc FullUpload(SyncAuth) returns (generic.Empty) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc FullDownload(SyncAuth) returns (generic.Empty) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
rpc AbortSync(generic.Empty) returns (generic.Empty) {
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
}
}
message SyncAuth {

View file

@ -53,7 +53,7 @@ impl Backend {
input: &PyBytes,
) -> PyResult<PyObject> {
let in_bytes = input.as_bytes();
py.allow_threads(|| self.backend.run_method(service, method, in_bytes))
py.allow_threads(|| self.backend.run_service_method(service, method, in_bytes))
.map(|out_bytes| {
let out_obj = PyBytes::new(py, &out_bytes);
out_obj.into()

View file

@ -19,17 +19,16 @@ name = "benchmark"
harness = false
required-features = ["bench"]
# After updating anything below, run ../cargo/update_licenses.sh
[build-dependencies]
anki_io = { version = "0.0.0", path = "io" }
anki_proto = { version = "0.0.0", path = "proto" }
anyhow = "1.0.71"
inflections = "1.1.1"
prettyplease = "0.2.7"
prost = "0.11.8"
prost-build = "0.11.8"
prost-reflect = "0.11.4"
prost-types = "0.11.9"
regex = "1.7.3"
which = "4.4.0"
syn = { version = "2.0.18", features = ["parsing", "printing"] }
[dev-dependencies]
async-stream = "0.3.4"
@ -45,6 +44,7 @@ features = ["json", "socks", "stream", "multipart"]
anki_i18n = { path = "i18n" }
anki_io = { path = "io" }
anki_proto = { path = "proto" }
workspace-hack = { version = "0.1", path = "../tools/workspace-hack" }
csv = { git = "https://github.com/ankitects/rust-csv.git", rev = "1c9d3aab6f79a7d815c69f925a46a4590c115f90" }
percent-encoding-iri = { git = "https://github.com/ankitects/rust-url.git", rev = "bb930b8d089f4d30d7d19c12e54e66191de47b88" }
@ -80,6 +80,7 @@ nom = "7.1.3"
num_cpus = "1.15.0"
num_enum = "0.6.1"
once_cell = "1.17.1"
phf = "0.11.1"
pin-project = "1.0.12"
prost = "0.11.8"
pulldown-cmark = "0.9.2"
@ -105,7 +106,6 @@ tracing-subscriber = { version = "0.3.16", features = ["fmt", "env-filter"] }
unic-ucd-category = "0.9.0"
unicode-normalization = "0.1.22"
utime = "0.3.1"
workspace-hack = { version = "0.1", path = "../tools/workspace-hack" }
zip = { version = "0.6.4", default-features = false, features = ["deflate", "time"] }
zstd = { version = "0.12.3", features = ["zstdmt"] }

View file

@ -1,10 +1,23 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::fs;
mod rust_interface;
fn main() {
use std::env;
use std::fs;
use std::path::PathBuf;
use anyhow::Result;
use prost_reflect::DescriptorPool;
fn main() -> Result<()> {
println!("cargo:rerun-if-changed=../out/buildhash");
let buildhash = fs::read_to_string("../out/buildhash").unwrap_or_default();
println!("cargo:rustc-env=BUILDHASH={buildhash}")
println!("cargo:rustc-env=BUILDHASH={buildhash}");
let descriptors_path = env::var("DESCRIPTORS_BIN").ok().map(PathBuf::from).unwrap();
println!("cargo:rerun-if-changed={}", descriptors_path.display());
let pool = DescriptorPool::decode(std::fs::read(descriptors_path)?.as_ref())?;
rust_interface::write_rust_interface(&pool)?;
Ok(())
}

View file

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub mod python;
pub mod rust;
pub mod rust_protos;
pub mod ts;
pub mod utils;
@ -14,7 +14,7 @@ use anyhow::Result;
fn main() -> Result<()> {
let descriptors_path = env::var("DESCRIPTORS_BIN").ok().map(PathBuf::from);
let pool = rust::write_backend_proto_rs(descriptors_path)?;
let pool = rust_protos::write_rust_protos(descriptors_path)?;
python::write_python_interface(&pool)?;
ts::write_ts_interface(&pool)?;

View file

@ -1,204 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::env;
use std::fmt::Write;
use std::path::Path;
use std::path::PathBuf;
use anki_io::create_dir_all;
use anki_io::read_file;
use anki_io::write_file_if_changed;
use anyhow::Context;
use anyhow::Result;
use itertools::Itertools;
use prost_build::ServiceGenerator;
use prost_reflect::DescriptorPool;
pub fn write_backend_proto_rs(descriptors_path: Option<PathBuf>) -> Result<DescriptorPool> {
set_protoc_path();
let proto_dir = PathBuf::from("../../proto");
let paths = gather_proto_paths(&proto_dir)?;
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
let tmp_descriptors = out_dir.join("descriptors.tmp");
prost_build::Config::new()
.out_dir(&out_dir)
.file_descriptor_set_path(&tmp_descriptors)
.service_generator(RustCodeGenerator::boxed())
.type_attribute(
"Deck.Filtered.SearchTerm.Order",
"#[derive(strum::EnumIter)]",
)
.type_attribute(
"Deck.Normal.DayLimit",
"#[derive(Copy, Eq, serde::Deserialize, serde::Serialize)]",
)
.type_attribute("HelpPageLinkRequest.HelpPage", "#[derive(strum::EnumIter)]")
.type_attribute("CsvMetadata.Delimiter", "#[derive(strum::EnumIter)]")
.type_attribute(
"Preferences.BackupLimits",
"#[derive(Copy, serde::Deserialize, serde::Serialize)]",
)
.type_attribute(
"CsvMetadata.DupeResolution",
"#[derive(serde::Deserialize, serde::Serialize)]",
)
.type_attribute(
"CsvMetadata.MatchScope",
"#[derive(serde::Deserialize, serde::Serialize)]",
)
.compile_protos(paths.as_slice(), &[proto_dir])
.context("prost build")?;
let descriptors = read_file(&tmp_descriptors)?;
if let Some(descriptors_path) = descriptors_path {
create_dir_all(
descriptors_path
.parent()
.context("missing parent of descriptor")?,
)?;
write_file_if_changed(descriptors_path, &descriptors)?;
}
write_service_index(&out_dir, descriptors)
}
fn write_service_index(out_dir: &Path, descriptors: Vec<u8>) -> Result<DescriptorPool> {
let pool =
DescriptorPool::decode(descriptors.as_ref()).context("unable to decode descriptors")?;
let mut buf = String::new();
writeln!(
buf,
"#[derive(num_enum::TryFromPrimitive)]
#[repr(u32)]
pub enum ServiceIndex {{"
)
.unwrap();
for service in pool.services() {
writeln!(
buf,
" {} = {},",
service.name().replace("Service", ""),
service.index()
)
.unwrap();
}
writeln!(buf, "}}").unwrap();
write_file_if_changed(out_dir.join("service_index.rs"), buf)?;
Ok(pool)
}
fn gather_proto_paths(proto_dir: &Path) -> Result<Vec<PathBuf>> {
let subfolders = &["anki"];
let mut paths = vec![];
for subfolder in subfolders {
for entry in proto_dir.join(subfolder).read_dir().unwrap() {
let entry = entry.unwrap();
let path = entry.path();
if path
.file_name()
.unwrap()
.to_str()
.unwrap()
.ends_with(".proto")
{
println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
paths.push(path);
}
}
}
paths.sort();
Ok(paths)
}
struct RustCodeGenerator {}
impl RustCodeGenerator {
fn boxed() -> Box<dyn ServiceGenerator> {
Box::new(Self {})
}
fn write_method_trait(&mut self, buf: &mut String, service: &prost_build::Service) {
buf.push_str(
r#"
pub trait Service {
type Error: From<crate::ProtoError>;
fn run_method(&self, method: u32, input: &[u8]) -> Result<Vec<u8>, Self::Error> {
match method {
"#,
);
for (idx, method) in service.methods.iter().enumerate() {
write!(
buf,
concat!(" ",
"{idx} => {{ let input = super::{input_type}::decode(input).map_err(crate::ProtoError::from)?;\n",
"let output = self.{rust_method}(input)?;\n",
"let mut out_bytes = Vec::new(); output.encode(&mut out_bytes).map_err(crate::ProtoError::from)?; Ok(out_bytes) }}, "),
idx = idx,
input_type = method.input_type,
rust_method = method.name
)
.unwrap();
}
buf.push_str(
r#"
_ => Err(crate::ProtoError::InvalidMethodIndex.into()),
}
}
"#,
);
for method in &service.methods {
let comments = method
.comments
.leading
.iter()
.map(|c| format!(" /// {c}"))
.join("\n");
write!(
buf,
concat!(
"{comments}\n",
"fn {method_name}(&self, input: super::{input_type}) -> ",
"Result<super::{output_type}, Self::Error>;\n"
),
comments = comments,
method_name = method.name,
input_type = method.input_type,
output_type = method.output_type
)
.unwrap();
}
buf.push_str("}\n");
}
}
impl ServiceGenerator for RustCodeGenerator {
fn generate(&mut self, service: prost_build::Service, buf: &mut String) {
write!(
buf,
"pub mod {name}_service {{
use prost::Message;
",
name = service.name.replace("Service", "").to_ascii_lowercase()
)
.unwrap();
self.write_method_trait(buf, &service);
buf.push('}');
}
}
/// Set PROTOC to the custom path provided by PROTOC_BINARY, or add .exe to
/// the standard path if on Windows.
fn set_protoc_path() {
if let Ok(custom_protoc) = env::var("PROTOC_BINARY") {
env::set_var("PROTOC", custom_protoc);
} else if let Ok(bundled_protoc) = env::var("PROTOC") {
if cfg!(windows) && !bundled_protoc.ends_with(".exe") {
env::set_var("PROTOC", format!("{bundled_protoc}.exe"));
}
}
}

View file

@ -0,0 +1,95 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::env;
use std::path::Path;
use std::path::PathBuf;
use anki_io::create_dir_all;
use anki_io::read_file;
use anki_io::write_file_if_changed;
use anyhow::Context;
use anyhow::Result;
use prost_reflect::DescriptorPool;
pub fn write_rust_protos(descriptors_path: Option<PathBuf>) -> Result<DescriptorPool> {
set_protoc_path();
let proto_dir = PathBuf::from("../../proto");
let paths = gather_proto_paths(&proto_dir)?;
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
let tmp_descriptors = out_dir.join("descriptors.tmp");
prost_build::Config::new()
.out_dir(&out_dir)
.file_descriptor_set_path(&tmp_descriptors)
.type_attribute(
"Deck.Filtered.SearchTerm.Order",
"#[derive(strum::EnumIter)]",
)
.type_attribute(
"Deck.Normal.DayLimit",
"#[derive(Copy, Eq, serde::Deserialize, serde::Serialize)]",
)
.type_attribute("HelpPageLinkRequest.HelpPage", "#[derive(strum::EnumIter)]")
.type_attribute("CsvMetadata.Delimiter", "#[derive(strum::EnumIter)]")
.type_attribute(
"Preferences.BackupLimits",
"#[derive(Copy, serde::Deserialize, serde::Serialize)]",
)
.type_attribute(
"CsvMetadata.DupeResolution",
"#[derive(serde::Deserialize, serde::Serialize)]",
)
.type_attribute(
"CsvMetadata.MatchScope",
"#[derive(serde::Deserialize, serde::Serialize)]",
)
.compile_protos(paths.as_slice(), &[proto_dir])
.context("prost build")?;
let descriptors = read_file(&tmp_descriptors)?;
if let Some(descriptors_path) = descriptors_path {
create_dir_all(
descriptors_path
.parent()
.context("missing parent of descriptor")?,
)?;
write_file_if_changed(descriptors_path, &descriptors)?;
}
let pool = DescriptorPool::decode(descriptors.as_ref())?;
Ok(pool)
}
fn gather_proto_paths(proto_dir: &Path) -> Result<Vec<PathBuf>> {
let subfolders = &["anki"];
let mut paths = vec![];
for subfolder in subfolders {
for entry in proto_dir.join(subfolder).read_dir().unwrap() {
let entry = entry.unwrap();
let path = entry.path();
if path
.file_name()
.unwrap()
.to_str()
.unwrap()
.ends_with(".proto")
{
println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
paths.push(path);
}
}
}
paths.sort();
Ok(paths)
}
/// Set PROTOC to the custom path provided by PROTOC_BINARY, or add .exe to
/// the standard path if on Windows.
fn set_protoc_path() {
if let Ok(custom_protoc) = env::var("PROTOC_BINARY") {
env::set_var("PROTOC", custom_protoc);
} else if let Ok(bundled_protoc) = env::var("PROTOC") {
if cfg!(windows) && !bundled_protoc.ends_with(".exe") {
env::set_var("PROTOC", format!("{bundled_protoc}.exe"));
}
}
}

View file

@ -48,9 +48,3 @@ impl From<usize> for crate::generic::UInt32 {
crate::generic::UInt32 { val: val as u32 }
}
}
impl From<()> for crate::generic::Empty {
fn from(_val: ()) -> Self {
crate::generic::Empty {}
}
}

View file

@ -11,27 +11,11 @@ macro_rules! protobuf {
};
}
use snafu::Snafu;
#[derive(Debug, Snafu)]
pub enum ProtoError {
InvalidMethodIndex,
#[snafu(context(false))]
DecodeError {
source: prost::DecodeError,
},
#[snafu(context(false))]
EncodeError {
source: prost::EncodeError,
},
}
include!(concat!(env!("OUT_DIR"), "/service_index.rs"));
protobuf!(ankidroid, "ankidroid");
protobuf!(backend, "backend");
protobuf!(card_rendering, "card_rendering");
protobuf!(cards, "cards");
protobuf!(codegen, "codegen");
protobuf!(collection, "collection");
protobuf!(config, "config");
protobuf!(deckconfig, "deckconfig");

319
rslib/rust_interface.rs Normal file
View file

@ -0,0 +1,319 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::env;
use std::fmt::Write;
use std::path::PathBuf;
use anki_io::write_file_if_changed;
use anki_proto::codegen::RustMethods;
use anyhow::Context;
use anyhow::Result;
use inflections::Inflect;
use prost_reflect::DescriptorPool;
pub fn write_rust_interface(pool: &DescriptorPool) -> Result<()> {
let mut buf = String::new();
buf.push_str("use crate::error::Result; use prost::Message;");
let services = pool
.services()
.map(RustService::from_proto)
.collect::<Vec<_>>();
for service in &services {
render_service(service, &mut buf);
}
render_top_level_run_method(&mut buf, &services, true);
render_top_level_run_method(&mut buf, &services, false);
render_method_lookup(&mut buf, &services);
// println!("{}", &buf);
let buf = format_code(buf)?;
// write into OUT_DIR so we can use it in build.rs
let out_dir = env::var("OUT_DIR").unwrap();
let path = PathBuf::from(out_dir).join("backend.rs");
write_file_if_changed(path, buf).context("write file")?;
Ok(())
}
#[derive(Debug)]
struct RustService {
name: String,
methods: Vec<RustMethod>,
}
#[derive(Debug)]
struct RustMethod {
name: String,
input_type: Option<String>,
output_type: Option<String>,
options: anki_proto::codegen::MethodOptions,
}
impl RustMethod {
/// No text if generic::Empty
fn text_if_input_not_empty(&self, text: impl Fn(&String) -> String) -> String {
self.input_type.as_ref().map(text).unwrap_or_default()
}
/// No text if generic::Empty
fn get_input_arg_with_label(&self) -> String {
self.input_type
.as_ref()
.map(|t| format!("input: {}", t))
.unwrap_or_default()
}
/// () if generic::Empty
fn get_output_type(&self) -> String {
self.output_type.as_deref().unwrap_or("()").into()
}
fn text_if_output_not_empty(&self, text: impl Fn(&String) -> String) -> String {
self.output_type.as_ref().map(text).unwrap_or_default()
}
fn wants_abstract_backend_method(&self) -> bool {
self.options.rust_methods() != RustMethods::CollectionAndAutoBackend
}
fn wants_abstract_collection_method(&self) -> bool {
self.options.rust_methods() != RustMethods::BackendOnly
}
}
impl RustMethod {
fn from_proto(method: prost_reflect::MethodDescriptor) -> Self {
RustMethod {
name: method.name().to_snake_case(),
input_type: rust_type(method.input().full_name()),
output_type: rust_type(method.output().full_name()),
options: method.options().transcode_to().unwrap(),
}
}
}
impl RustService {
fn from_proto(service: prost_reflect::ServiceDescriptor) -> Self {
RustService {
name: service.name().to_string(),
methods: service.methods().map(RustMethod::from_proto).collect(),
}
}
}
fn rust_type(name: &str) -> Option<String> {
if name == "anki.generic.Empty" {
return None;
}
let Some((head, tail)) = name.rsplit_once( '.') else { panic!() };
Some(format!(
"{}::{}",
head.to_snake_case()
.replace('.', "::")
.replace("anki::", "anki_proto::"),
tail
))
}
fn format_code(code: String) -> Result<String> {
let syntax_tree = syn::parse_file(&code)?;
Ok(prettyplease::unparse(&syntax_tree))
}
fn render_abstract_collection_method(method: &RustMethod, buf: &mut String) {
let method_name = &method.name;
let input_with_label = method.get_input_arg_with_label();
let output_type = method.get_output_type();
writeln!(
buf,
"fn {method_name}(&mut self, {input_with_label}) -> Result<{output_type}>;"
)
.unwrap();
}
fn render_abstract_backend_method(method: &RustMethod, buf: &mut String, _service: &RustService) {
let method_name = &method.name;
let input_with_label = method.get_input_arg_with_label();
let output_type = method.get_output_type();
writeln!(
buf,
"fn {method_name}(&self, {input_with_label}) -> Result<{output_type}>;"
)
.unwrap();
}
fn render_delegating_backend_method(method: &RustMethod, buf: &mut String, service: &RustService) {
let method_name = &method.name;
let input_with_label = method.get_input_arg_with_label();
let input = method.text_if_input_not_empty(|_| "input".into());
let output_type = method.get_output_type();
let col_service = &service.name;
writeln!(
buf,
"fn {method_name}(&self, {input_with_label}) -> Result<{output_type}> {{
self.with_col(|col| {col_service}::{method_name}(col, {input})) }}",
)
.unwrap();
}
fn render_service(service: &RustService, buf: &mut String) {
let have_collection = service
.methods
.iter()
.any(|m| m.wants_abstract_collection_method());
if have_collection {
render_collection_trait(service, buf);
}
if service
.methods
.iter()
.any(|m| m.wants_abstract_backend_method())
{
render_backend_trait(service, buf);
}
render_delegating_backend_methods(service, buf);
render_individual_service_run_method(buf, service, true);
render_individual_service_run_method(buf, service, false);
}
fn render_collection_trait(service: &RustService, buf: &mut String) {
let name = &service.name;
writeln!(buf, "pub trait {name} {{").unwrap();
for method in &service.methods {
if method.wants_abstract_collection_method() {
render_abstract_collection_method(method, buf);
}
}
buf.push('}');
}
fn render_backend_trait(service: &RustService, buf: &mut String) {
let name = format!("Backend{}", service.name);
writeln!(buf, "pub trait {name} {{").unwrap();
for method in &service.methods {
if method.wants_abstract_backend_method() {
render_abstract_backend_method(method, buf, service);
}
}
buf.push('}');
}
fn render_delegating_backend_methods(service: &RustService, buf: &mut String) {
buf.push_str("impl crate::backend::Backend {");
for method in &service.methods {
if method.wants_abstract_backend_method() {
continue;
}
render_delegating_backend_method(method, buf, service);
}
buf.push('}');
}
// Matches all service types and delegates to the revelant self.run_foo_method()
fn render_top_level_run_method(buf: &mut String, services: &[RustService], backend: bool) {
let self_kind = if backend { "&self" } else { "&mut self" };
let struct_to_impl = if backend {
"crate::backend::Backend"
} else {
"crate::collection::Collection"
};
writeln!(buf,
r#" impl {struct_to_impl} {{
pub fn run_service_method({self_kind}, service: u32, method: u32, input: &[u8]) -> Result<Vec<u8>, Vec<u8>> {{
match service {{
"#,
).unwrap();
for (idx, service) in services.iter().enumerate() {
writeln!(
buf,
"{idx} => self.run_{service}_method(method, input),",
service = service.name.to_snake_case()
)
.unwrap();
}
buf.push_str(
r#"
_ => Err(crate::error::AnkiError::InvalidServiceIndex),
}
.map_err(|err| {
let backend_err = err.into_protobuf(&self.tr);
let mut bytes = Vec::new();
backend_err.encode(&mut bytes).unwrap();
bytes
})
} }"#,
);
}
fn render_individual_service_run_method(buf: &mut String, service: &RustService, backend: bool) {
let self_kind = if backend { "&self" } else { "&mut self" };
let struct_to_impl = if backend {
"crate::backend::Backend"
} else {
"crate::collection::Collection"
};
let method_qualifier = if backend {
struct_to_impl
} else {
&service.name
};
let service_name = &service.name.to_snake_case();
writeln!(
buf,
"#[allow(unused_variables, clippy::match_single_binding)]
impl {struct_to_impl} {{ pub(crate) fn run_{service_name}_method({self_kind},
method: u32, input: &[u8]) -> Result<Vec<u8>> {{
match method {{",
)
.unwrap();
for (idx, method) in service.methods.iter().enumerate() {
if !backend && !method.wants_abstract_collection_method() {
continue;
}
let decode_input =
method.text_if_input_not_empty(|kind| format!("let input = {kind}::decode(input)?;"));
let rust_method = &method.name;
let input = method.text_if_input_not_empty(|_| "input".into());
let output_assign = method.text_if_output_not_empty(|_| "let output = ".into());
let output = if method.output_type.is_none() {
"Vec::new()"
} else {
"{ let mut out_bytes = Vec::new();
output.encode(&mut out_bytes)?;
out_bytes }"
};
writeln!(
buf,
"{idx} => {{ {decode_input}
{output_assign} {method_qualifier}::{rust_method}(self, {input})?;
Ok({output}) }},",
)
.unwrap();
}
buf.push_str(
r#"
_ => Err(crate::error::AnkiError::InvalidMethodIndex),
}
} }
"#,
);
}
fn render_method_lookup(buf: &mut String, services: &[RustService]) {
writeln!(
buf,
"
pub const METHODS_BY_NAME: phf::Map<&str, (u32, u32)> = phf::phf_map! {{
"
)
.unwrap();
for (sidx, service) in services.iter().enumerate() {
for (midx, method) in service.methods.iter().enumerate() {
let name = &method.name;
writeln!(buf, r#" "{name}" => ({sidx}, {midx}),"#,).unwrap();
}
}
buf.push_str("};\n");
}

View file

@ -284,8 +284,8 @@ mod tests {
use anki_proto::ankidroid::SqlValue;
use super::*;
use crate::backend::ankidroid::db::select_slice_of_size;
use crate::backend::ankidroid::db::Sizable;
use crate::ankidroid::db::select_slice_of_size;
use crate::ankidroid::db::Sizable;
fn gen_data() -> Vec<SqlValue> {
vec![

View file

@ -13,7 +13,7 @@ use crate::error::SyncError;
use crate::error::SyncErrorKind as Sync;
use crate::prelude::AnkiError;
pub(super) fn debug_produce_error(s: &str) -> AnkiError {
pub(crate) fn debug_produce_error(s: &str) -> AnkiError {
let info = "error_value".to_string();
match s {
"TemplateError" => AnkiError::TemplateError { info },

View file

@ -0,0 +1,5 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub(crate) mod db;
pub(crate) mod error;
pub mod service;

View file

@ -0,0 +1,70 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::ankidroid::DbResponse;
use anki_proto::ankidroid::GetActiveSequenceNumbersResponse;
use anki_proto::ankidroid::GetNextResultPageRequest;
use anki_proto::generic;
use crate::ankidroid::db;
use crate::ankidroid::db::active_sequences;
use crate::ankidroid::db::execute_for_row_count;
use crate::ankidroid::db::insert_for_id;
use crate::backend::dbproxy::db_command_bytes;
use crate::backend::dbproxy::db_command_proto;
use crate::collection::Collection;
use crate::error;
use crate::error::OrInvalid;
impl crate::services::AnkidroidService for Collection {
fn run_db_command(&mut self, input: generic::Json) -> error::Result<generic::Json> {
db_command_bytes(self, &input.json).map(|json| generic::Json { json })
}
fn run_db_command_proto(&mut self, input: generic::Json) -> error::Result<DbResponse> {
db_command_proto(self, &input.json)
}
fn run_db_command_for_row_count(
&mut self,
input: generic::Json,
) -> error::Result<generic::Int64> {
execute_for_row_count(self, &input.json).map(|val| generic::Int64 { val })
}
fn flush_all_queries(&mut self) -> error::Result<()> {
db::flush_collection(self);
Ok(())
}
fn flush_query(&mut self, input: generic::Int32) -> error::Result<()> {
db::flush_single_result(self, input.val);
Ok(())
}
fn get_next_result_page(
&mut self,
input: GetNextResultPageRequest,
) -> error::Result<DbResponse> {
db::get_next(self, input.sequence, input.index).or_invalid("missing result page")
}
fn insert_for_id(&mut self, input: generic::Json) -> error::Result<generic::Int64> {
insert_for_id(self, &input.json).map(Into::into)
}
fn get_column_names_from_query(
&mut self,
input: generic::String,
) -> error::Result<generic::StringList> {
let stmt = self.storage.db.prepare(&input.val)?;
let names = stmt.column_names();
let names: Vec<_> = names.iter().map(ToString::to_string).collect();
Ok(names.into())
}
fn get_active_sequence_numbers(&mut self) -> error::Result<GetActiveSequenceNumbersResponse> {
Ok(GetActiveSequenceNumbersResponse {
numbers: active_sequences(self),
})
}
}

View file

@ -0,0 +1,61 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::generic;
use super::Backend;
use crate::ankidroid::db;
use crate::ankidroid::error::debug_produce_error;
use crate::prelude::*;
use crate::scheduler::timing;
use crate::scheduler::timing::fixed_offset_from_minutes;
use crate::services::BackendAnkidroidService;
impl BackendAnkidroidService for Backend {
fn sched_timing_today_legacy(
&self,
input: anki_proto::ankidroid::SchedTimingTodayLegacyRequest,
) -> Result<anki_proto::scheduler::SchedTimingTodayResponse> {
let result = timing::sched_timing_today(
TimestampSecs::from(input.created_secs),
TimestampSecs::from(input.now_secs),
input.created_mins_west.map(fixed_offset_from_minutes),
fixed_offset_from_minutes(input.now_mins_west),
Some(input.rollover_hour as u8),
)?;
Ok(anki_proto::scheduler::SchedTimingTodayResponse::from(
result,
))
}
fn local_minutes_west_legacy(&self, input: generic::Int64) -> Result<generic::Int32> {
Ok(generic::Int32 {
val: timing::local_minutes_west_for_stamp(input.val.into())?,
})
}
fn set_page_size(&self, input: generic::Int64) -> Result<()> {
// we don't require an open collection, but should avoid modifying this
// concurrently
let _guard = self.col.lock();
db::set_max_page_size(input.val as usize);
Ok(())
}
fn debug_produce_error(&self, input: generic::String) -> Result<()> {
Err(debug_produce_error(&input.val))
}
}
impl From<crate::scheduler::timing::SchedTimingToday>
for anki_proto::scheduler::SchedTimingTodayResponse
{
fn from(
t: crate::scheduler::timing::SchedTimingToday,
) -> anki_proto::scheduler::SchedTimingTodayResponse {
anki_proto::scheduler::SchedTimingTodayResponse {
days_elapsed: t.days_elapsed,
next_day_at: t.next_day_at.0,
}
}
}

View file

@ -1,131 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub(crate) mod db;
pub(crate) mod error;
pub(super) use anki_proto::ankidroid::ankidroid_service::Service as AnkidroidService;
use anki_proto::ankidroid::DbResponse;
use anki_proto::ankidroid::GetActiveSequenceNumbersResponse;
use anki_proto::ankidroid::GetNextResultPageRequest;
use anki_proto::generic;
use self::db::active_sequences;
use self::error::debug_produce_error;
use super::dbproxy::db_command_bytes;
use super::dbproxy::db_command_proto;
use super::Backend;
use crate::backend::ankidroid::db::execute_for_row_count;
use crate::backend::ankidroid::db::insert_for_id;
use crate::prelude::*;
use crate::scheduler::timing;
use crate::scheduler::timing::fixed_offset_from_minutes;
impl AnkidroidService for Backend {
type Error = AnkiError;
fn sched_timing_today_legacy(
&self,
input: anki_proto::ankidroid::SchedTimingTodayLegacyRequest,
) -> Result<anki_proto::scheduler::SchedTimingTodayResponse> {
let result = timing::sched_timing_today(
TimestampSecs::from(input.created_secs),
TimestampSecs::from(input.now_secs),
input.created_mins_west.map(fixed_offset_from_minutes),
fixed_offset_from_minutes(input.now_mins_west),
Some(input.rollover_hour as u8),
)?;
Ok(anki_proto::scheduler::SchedTimingTodayResponse::from(
result,
))
}
fn local_minutes_west_legacy(&self, input: generic::Int64) -> Result<generic::Int32> {
Ok(generic::Int32 {
val: timing::local_minutes_west_for_stamp(input.val.into())?,
})
}
fn run_db_command(&self, input: generic::Json) -> Result<generic::Json> {
self.with_col(|col| db_command_bytes(col, &input.json))
.map(|json| generic::Json { json })
}
fn run_db_command_proto(&self, input: generic::Json) -> Result<DbResponse> {
self.with_col(|col| db_command_proto(col, &input.json))
}
fn run_db_command_for_row_count(&self, input: generic::Json) -> Result<generic::Int64> {
self.with_col(|col| execute_for_row_count(col, &input.json))
.map(|val| generic::Int64 { val })
}
fn flush_all_queries(&self, _input: generic::Empty) -> Result<generic::Empty> {
self.with_col(|col| {
db::flush_collection(col);
Ok(generic::Empty {})
})
}
fn flush_query(&self, input: generic::Int32) -> Result<generic::Empty> {
self.with_col(|col| {
db::flush_single_result(col, input.val);
Ok(generic::Empty {})
})
}
fn get_next_result_page(&self, input: GetNextResultPageRequest) -> Result<DbResponse> {
self.with_col(|col| {
db::get_next(col, input.sequence, input.index).or_invalid("missing result page")
})
}
fn insert_for_id(&self, input: generic::Json) -> Result<generic::Int64> {
self.with_col(|col| insert_for_id(col, &input.json).map(Into::into))
}
fn set_page_size(&self, input: generic::Int64) -> Result<generic::Empty> {
// we don't require an open collection, but should avoid modifying this
// concurrently
let _guard = self.col.lock();
db::set_max_page_size(input.val as usize);
Ok(().into())
}
fn get_column_names_from_query(&self, input: generic::String) -> Result<generic::StringList> {
self.with_col(|col| {
let stmt = col.storage.db.prepare(&input.val)?;
let names = stmt.column_names();
let names: Vec<_> = names.iter().map(ToString::to_string).collect();
Ok(names.into())
})
}
fn get_active_sequence_numbers(
&self,
_input: generic::Empty,
) -> Result<GetActiveSequenceNumbersResponse> {
self.with_col(|col| {
Ok(GetActiveSequenceNumbersResponse {
numbers: active_sequences(col),
})
})
}
fn debug_produce_error(&self, input: generic::String) -> Result<generic::Empty> {
Err(debug_produce_error(&input.val))
}
}
impl From<crate::scheduler::timing::SchedTimingToday>
for anki_proto::scheduler::SchedTimingTodayResponse
{
fn from(
t: crate::scheduler::timing::SchedTimingToday,
) -> anki_proto::scheduler::SchedTimingTodayResponse {
anki_proto::scheduler::SchedTimingTodayResponse {
days_elapsed: t.days_elapsed,
next_day_at: t.next_day_at.0,
}
}
}

View file

@ -0,0 +1,16 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::card_rendering::StripHtmlRequest;
use crate::backend::Backend;
use crate::card_rendering::service::strip_html_proto;
use crate::services::BackendCardRenderingService;
impl BackendCardRenderingService for Backend {
fn strip_html(
&self,
input: StripHtmlRequest,
) -> crate::error::Result<anki_proto::generic::String> {
strip_html_proto(input)
}
}

View file

@ -3,7 +3,6 @@
use std::sync::MutexGuard;
pub(super) use anki_proto::collection::collection_service::Service as CollectionService;
use anki_proto::generic;
use tracing::error;
@ -11,25 +10,11 @@ use super::Backend;
use crate::collection::CollectionBuilder;
use crate::prelude::*;
use crate::progress::progress_to_proto;
use crate::services::BackendCollectionService;
use crate::storage::SchemaVersion;
impl CollectionService for Backend {
type Error = AnkiError;
fn latest_progress(&self, _input: generic::Empty) -> Result<anki_proto::collection::Progress> {
let progress = self.progress_state.lock().unwrap().last_progress;
Ok(progress_to_proto(progress, &self.tr))
}
fn set_wants_abort(&self, _input: generic::Empty) -> Result<generic::Empty> {
self.progress_state.lock().unwrap().want_abort = true;
Ok(().into())
}
fn open_collection(
&self,
input: anki_proto::collection::OpenCollectionRequest,
) -> Result<generic::Empty> {
impl BackendCollectionService for Backend {
fn open_collection(&self, input: anki_proto::collection::OpenCollectionRequest) -> Result<()> {
let mut guard = self.lock_closed_collection()?;
let mut builder = CollectionBuilder::new(input.collection_path);
@ -42,13 +27,13 @@ impl CollectionService for Backend {
*guard = Some(builder.build()?);
Ok(().into())
Ok(())
}
fn close_collection(
&self,
input: anki_proto::collection::CloseCollectionRequest,
) -> Result<generic::Empty> {
) -> Result<()> {
let desired_version = if input.downgrade_to_schema11 {
Some(SchemaVersion::V11)
} else {
@ -63,47 +48,7 @@ impl CollectionService for Backend {
error!(" failed: {:?}", e);
}
Ok(().into())
}
fn check_database(
&self,
_input: generic::Empty,
) -> Result<anki_proto::collection::CheckDatabaseResponse> {
self.with_col(|col| {
col.check_database()
.map(|problems| anki_proto::collection::CheckDatabaseResponse {
problems: problems.to_i18n_strings(&col.tr),
})
})
}
fn get_undo_status(
&self,
_input: generic::Empty,
) -> Result<anki_proto::collection::UndoStatus> {
self.with_col(|col| Ok(col.undo_status().into_protobuf(&col.tr)))
}
fn undo(&self, _input: generic::Empty) -> Result<anki_proto::collection::OpChangesAfterUndo> {
self.with_col(|col| col.undo().map(|out| out.into_protobuf(&col.tr)))
}
fn redo(&self, _input: generic::Empty) -> Result<anki_proto::collection::OpChangesAfterUndo> {
self.with_col(|col| col.redo().map(|out| out.into_protobuf(&col.tr)))
}
fn add_custom_undo_entry(&self, input: generic::String) -> Result<generic::UInt32> {
self.with_col(|col| Ok(col.add_custom_undo_step(input.val).into()))
}
fn merge_undo_entries(
&self,
input: generic::UInt32,
) -> Result<anki_proto::collection::OpChanges> {
let starting_from = input.val as usize;
self.with_col(|col| col.merge_undoable_ops(starting_from))
.map(Into::into)
Ok(())
}
fn create_backup(
@ -133,9 +78,54 @@ impl CollectionService for Backend {
Ok(created.into())
}
fn await_backup_completion(&self, _input: generic::Empty) -> Result<generic::Empty> {
fn await_backup_completion(&self) -> Result<()> {
self.await_backup_completion()?;
Ok(().into())
Ok(())
}
}
impl crate::services::CollectionService for Collection {
fn check_database(&mut self) -> Result<anki_proto::collection::CheckDatabaseResponse> {
{
self.check_database()
.map(|problems| anki_proto::collection::CheckDatabaseResponse {
problems: problems.to_i18n_strings(&self.tr),
})
}
}
fn get_undo_status(&mut self) -> Result<anki_proto::collection::UndoStatus> {
Ok(self.undo_status().into_protobuf(&self.tr))
}
fn undo(&mut self) -> Result<anki_proto::collection::OpChangesAfterUndo> {
self.undo().map(|out| out.into_protobuf(&self.tr))
}
fn redo(&mut self) -> Result<anki_proto::collection::OpChangesAfterUndo> {
self.redo().map(|out| out.into_protobuf(&self.tr))
}
fn add_custom_undo_entry(&mut self, input: generic::String) -> Result<generic::UInt32> {
Ok(self.add_custom_undo_step(input.val).into())
}
fn merge_undo_entries(
&mut self,
input: generic::UInt32,
) -> Result<anki_proto::collection::OpChanges> {
let starting_from = input.val as usize;
self.merge_undoable_ops(starting_from).map(Into::into)
}
fn latest_progress(&mut self) -> Result<anki_proto::collection::Progress> {
let progress = self.state.progress.lock().unwrap().last_progress;
Ok(progress_to_proto(progress, &self.tr))
}
fn set_wants_abort(&mut self) -> Result<()> {
self.state.progress.lock().unwrap().want_abort = true;
Ok(())
}
}

View file

@ -3,11 +3,9 @@
use anki_proto::config::config_key::Bool as BoolKeyProto;
use anki_proto::config::config_key::String as StringKeyProto;
pub(super) use anki_proto::config::config_service::Service as ConfigService;
use anki_proto::generic;
use serde_json::Value;
use super::Backend;
use crate::config::BoolKey;
use crate::config::StringKey;
use crate::prelude::*;
@ -53,100 +51,88 @@ impl From<StringKeyProto> for StringKey {
}
}
impl ConfigService for Backend {
type Error = AnkiError;
fn get_config_json(&self, input: generic::String) -> Result<generic::Json> {
self.with_col(|col| {
let val: Option<Value> = col.get_config_optional(input.val.as_str());
val.or_not_found(input.val)
.and_then(|v| serde_json::to_vec(&v).map_err(Into::into))
.map(Into::into)
})
}
fn set_config_json(
&self,
input: anki_proto::config::SetConfigJsonRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
let val: Value = serde_json::from_slice(&input.value_json)?;
col.set_config_json(input.key.as_str(), &val, input.undoable)
})
.map(Into::into)
}
fn set_config_json_no_undo(
&self,
input: anki_proto::config::SetConfigJsonRequest,
) -> Result<generic::Empty> {
self.with_col(|col| {
let val: Value = serde_json::from_slice(&input.value_json)?;
col.transact_no_undo(|col| col.set_config(input.key.as_str(), &val).map(|_| ()))
})
.map(Into::into)
}
fn remove_config(&self, input: generic::String) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.remove_config(input.val.as_str()))
impl crate::services::ConfigService for Collection {
fn get_config_json(&mut self, input: generic::String) -> Result<generic::Json> {
let val: Option<Value> = self.get_config_optional(input.val.as_str());
val.or_not_found(input.val)
.and_then(|v| serde_json::to_vec(&v).map_err(Into::into))
.map(Into::into)
}
fn get_all_config(&self, _input: generic::Empty) -> Result<generic::Json> {
self.with_col(|col| {
let conf = col.storage.get_all_config()?;
serde_json::to_vec(&conf).map_err(Into::into)
})
.map(Into::into)
fn set_config_json(
&mut self,
input: anki_proto::config::SetConfigJsonRequest,
) -> Result<anki_proto::collection::OpChanges> {
let val: Value = serde_json::from_slice(&input.value_json)?;
self.set_config_json(input.key.as_str(), &val, input.undoable)
.map(Into::into)
}
fn set_config_json_no_undo(
&mut self,
input: anki_proto::config::SetConfigJsonRequest,
) -> Result<()> {
let val: Value = serde_json::from_slice(&input.value_json)?;
self.transact_no_undo(|col| col.set_config(input.key.as_str(), &val).map(|_| ()))
.map(Into::into)
}
fn remove_config(
&mut self,
input: generic::String,
) -> Result<anki_proto::collection::OpChanges> {
self.remove_config(input.val.as_str()).map(Into::into)
}
fn get_all_config(&mut self) -> Result<generic::Json> {
let conf = self.storage.get_all_config()?;
serde_json::to_vec(&conf)
.map_err(Into::into)
.map(Into::into)
}
fn get_config_bool(
&self,
&mut self,
input: anki_proto::config::GetConfigBoolRequest,
) -> Result<generic::Bool> {
self.with_col(|col| {
Ok(generic::Bool {
val: col.get_config_bool(input.key().into()),
})
Ok(generic::Bool {
val: Collection::get_config_bool(self, input.key().into()),
})
}
fn set_config_bool(
&self,
&mut self,
input: anki_proto::config::SetConfigBoolRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.set_config_bool(input.key().into(), input.value, input.undoable))
self.set_config_bool(input.key().into(), input.value, input.undoable)
.map(Into::into)
}
fn get_config_string(
&self,
&mut self,
input: anki_proto::config::GetConfigStringRequest,
) -> Result<generic::String> {
self.with_col(|col| {
Ok(generic::String {
val: col.get_config_string(input.key().into()),
})
Ok(generic::String {
val: Collection::get_config_string(self, input.key().into()),
})
}
fn set_config_string(
&self,
&mut self,
input: anki_proto::config::SetConfigStringRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.set_config_string(input.key().into(), &input.value, input.undoable))
self.set_config_string(input.key().into(), &input.value, input.undoable)
.map(Into::into)
}
fn get_preferences(&self, _input: generic::Empty) -> Result<anki_proto::config::Preferences> {
self.with_col(|col| col.get_preferences())
fn get_preferences(&mut self) -> Result<anki_proto::config::Preferences> {
Collection::get_preferences(self)
}
fn set_preferences(
&self,
&mut self,
input: anki_proto::config::Preferences,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.set_preferences(input))
.map(Into::into)
self.set_preferences(input).map(Into::into)
}
}

View file

@ -15,6 +15,8 @@ use rusqlite::OptionalExtension;
use serde::Deserialize;
use serde::Serialize;
use crate::ankidroid::db::next_sequence_number;
use crate::ankidroid::db::trim_and_cache_remaining;
use crate::prelude::*;
use crate::storage::SqliteStorage;
@ -44,7 +46,7 @@ pub(super) enum DbResult {
#[derive(Serialize, Deserialize, Debug)]
#[serde(untagged)]
pub(super) enum SqlValue {
pub(crate) enum SqlValue {
Null,
String(String),
Int(i64),
@ -113,7 +115,7 @@ impl FromSql for SqlValue {
}
}
pub(super) fn db_command_bytes(col: &mut Collection, input: &[u8]) -> Result<Vec<u8>> {
pub(crate) fn db_command_bytes(col: &mut Collection, input: &[u8]) -> Result<Vec<u8>> {
serde_json::to_vec(&db_command_bytes_inner(col, input)?).map_err(Into::into)
}
@ -181,11 +183,7 @@ pub(crate) fn db_command_proto(col: &mut Collection, input: &[u8]) -> Result<DbR
DbResult::None => ProtoDbResult { rows: Vec::new() },
DbResult::Rows(rows) => rows_to_proto(&rows),
};
let trimmed = super::ankidroid::db::trim_and_cache_remaining(
col,
proto_resp,
super::ankidroid::db::next_sequence_number(),
);
let trimmed = trim_and_cache_remaining(col, proto_resp, next_sequence_number());
Ok(trimmed)
}

View file

@ -1,334 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::convert::TryFrom;
pub(super) use anki_proto::decks::decks_service::Service as DecksService;
use anki_proto::generic;
use super::Backend;
use crate::decks::filtered::search_order_labels;
use crate::decks::DeckSchema11;
use crate::prelude::*;
use crate::scheduler::filtered::FilteredDeckForUpdate;
impl DecksService for Backend {
type Error = AnkiError;
fn new_deck(&self, _input: generic::Empty) -> Result<anki_proto::decks::Deck> {
Ok(Deck::new_normal().into())
}
fn add_deck(
&self,
deck: anki_proto::decks::Deck,
) -> Result<anki_proto::collection::OpChangesWithId> {
let mut deck: Deck = deck.try_into()?;
self.with_col(|col| Ok(col.add_deck(&mut deck)?.map(|_| deck.id.0).into()))
}
fn add_deck_legacy(
&self,
input: generic::Json,
) -> Result<anki_proto::collection::OpChangesWithId> {
let schema11: DeckSchema11 = serde_json::from_slice(&input.json)?;
let mut deck: Deck = schema11.into();
self.with_col(|col| {
let output = col.add_deck(&mut deck)?;
Ok(output.map(|_| deck.id.0).into())
})
}
fn add_or_update_deck_legacy(
&self,
input: anki_proto::decks::AddOrUpdateDeckLegacyRequest,
) -> Result<anki_proto::decks::DeckId> {
self.with_col(|col| {
let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?;
let mut deck: Deck = schema11.into();
if input.preserve_usn_and_mtime {
col.transact_no_undo(|col| {
let usn = col.usn()?;
col.add_or_update_single_deck_with_existing_id(&mut deck, usn)
})?;
} else {
col.add_or_update_deck(&mut deck)?;
}
Ok(anki_proto::decks::DeckId { did: deck.id.0 })
})
}
fn deck_tree(
&self,
input: anki_proto::decks::DeckTreeRequest,
) -> Result<anki_proto::decks::DeckTreeNode> {
self.with_col(|col| {
let now = if input.now == 0 {
None
} else {
Some(TimestampSecs(input.now))
};
col.deck_tree(now)
})
}
fn deck_tree_legacy(&self, _input: generic::Empty) -> Result<generic::Json> {
self.with_col(|col| {
let tree = col.legacy_deck_tree()?;
serde_json::to_vec(&tree)
.map_err(Into::into)
.map(Into::into)
})
}
fn get_all_decks_legacy(&self, _input: generic::Empty) -> Result<generic::Json> {
self.with_col(|col| {
let decks = col.storage.get_all_decks_as_schema11()?;
serde_json::to_vec(&decks).map_err(Into::into)
})
.map(Into::into)
}
fn get_deck_id_by_name(&self, input: generic::String) -> Result<anki_proto::decks::DeckId> {
self.with_col(|col| {
col.get_deck_id(&input.val).and_then(|d| {
d.or_not_found(input.val)
.map(|d| anki_proto::decks::DeckId { did: d.0 })
})
})
}
fn get_deck(&self, input: anki_proto::decks::DeckId) -> Result<anki_proto::decks::Deck> {
let did = input.into();
self.with_col(|col| Ok(col.storage.get_deck(did)?.or_not_found(did)?.into()))
}
fn update_deck(
&self,
input: anki_proto::decks::Deck,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
let mut deck = Deck::try_from(input)?;
col.update_deck(&mut deck).map(Into::into)
})
}
fn update_deck_legacy(
&self,
input: generic::Json,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
let deck: DeckSchema11 = serde_json::from_slice(&input.json)?;
let mut deck = deck.into();
col.update_deck(&mut deck).map(Into::into)
})
}
fn get_deck_legacy(&self, input: anki_proto::decks::DeckId) -> Result<generic::Json> {
let did = input.into();
self.with_col(|col| {
let deck: DeckSchema11 = col.storage.get_deck(did)?.or_not_found(did)?.into();
serde_json::to_vec(&deck)
.map_err(Into::into)
.map(Into::into)
})
}
fn get_deck_names(
&self,
input: anki_proto::decks::GetDeckNamesRequest,
) -> Result<anki_proto::decks::DeckNames> {
self.with_col(|col| {
let names = if input.include_filtered {
col.get_all_deck_names(input.skip_empty_default)?
} else {
col.get_all_normal_deck_names()?
};
Ok(deck_names_to_proto(names))
})
}
fn get_deck_and_child_names(
&self,
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::decks::DeckNames> {
self.with_col(|col| {
col.get_deck_and_child_names(input.did.into())
.map(deck_names_to_proto)
})
}
fn new_deck_legacy(&self, input: generic::Bool) -> Result<generic::Json> {
let deck = if input.val {
Deck::new_filtered()
} else {
Deck::new_normal()
};
let schema11: DeckSchema11 = deck.into();
serde_json::to_vec(&schema11)
.map_err(Into::into)
.map(Into::into)
}
fn remove_decks(
&self,
input: anki_proto::decks::DeckIds,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
col.remove_decks_and_child_decks(
&input.dids.into_iter().map(DeckId).collect::<Vec<_>>(),
)
})
.map(Into::into)
}
fn reparent_decks(
&self,
input: anki_proto::decks::ReparentDecksRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
let deck_ids: Vec<_> = input.deck_ids.into_iter().map(Into::into).collect();
let new_parent = if input.new_parent == 0 {
None
} else {
Some(input.new_parent.into())
};
self.with_col(|col| col.reparent_decks(&deck_ids, new_parent))
.map(Into::into)
}
fn rename_deck(
&self,
input: anki_proto::decks::RenameDeckRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.rename_deck(input.deck_id.into(), &input.new_name))
.map(Into::into)
}
fn get_or_create_filtered_deck(
&self,
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::decks::FilteredDeckForUpdate> {
self.with_col(|col| col.get_or_create_filtered_deck(input.into()))
.map(Into::into)
}
fn add_or_update_filtered_deck(
&self,
input: anki_proto::decks::FilteredDeckForUpdate,
) -> Result<anki_proto::collection::OpChangesWithId> {
self.with_col(|col| col.add_or_update_filtered_deck(input.into()))
.map(|out| out.map(i64::from))
.map(Into::into)
}
fn filtered_deck_order_labels(&self, _input: generic::Empty) -> Result<generic::StringList> {
Ok(search_order_labels(&self.tr).into())
}
fn set_deck_collapsed(
&self,
input: anki_proto::decks::SetDeckCollapsedRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
col.set_deck_collapsed(input.deck_id.into(), input.collapsed, input.scope())
})
.map(Into::into)
}
fn set_current_deck(
&self,
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.set_current_deck(input.did.into()))
.map(Into::into)
}
fn get_current_deck(&self, _input: generic::Empty) -> Result<anki_proto::decks::Deck> {
self.with_col(|col| col.get_current_deck())
.map(|deck| (*deck).clone().into())
}
}
impl From<anki_proto::decks::DeckId> for DeckId {
fn from(did: anki_proto::decks::DeckId) -> Self {
DeckId(did.did)
}
}
impl From<DeckId> for anki_proto::decks::DeckId {
fn from(did: DeckId) -> Self {
anki_proto::decks::DeckId { did: did.0 }
}
}
impl From<FilteredDeckForUpdate> for anki_proto::decks::FilteredDeckForUpdate {
fn from(deck: FilteredDeckForUpdate) -> Self {
anki_proto::decks::FilteredDeckForUpdate {
id: deck.id.into(),
name: deck.human_name,
config: Some(deck.config),
}
}
}
impl From<anki_proto::decks::FilteredDeckForUpdate> for FilteredDeckForUpdate {
fn from(deck: anki_proto::decks::FilteredDeckForUpdate) -> Self {
FilteredDeckForUpdate {
id: deck.id.into(),
human_name: deck.name,
config: deck.config.unwrap_or_default(),
}
}
}
impl From<Deck> for anki_proto::decks::Deck {
fn from(d: Deck) -> Self {
anki_proto::decks::Deck {
id: d.id.0,
name: d.name.human_name(),
mtime_secs: d.mtime_secs.0,
usn: d.usn.0,
common: Some(d.common),
kind: Some(kind_from_inline(d.kind)),
}
}
}
impl TryFrom<anki_proto::decks::Deck> for Deck {
type Error = AnkiError;
fn try_from(d: anki_proto::decks::Deck) -> Result<Self, Self::Error> {
Ok(Deck {
id: DeckId(d.id),
name: NativeDeckName::from_human_name(&d.name),
mtime_secs: TimestampSecs(d.mtime_secs),
usn: Usn(d.usn),
common: d.common.unwrap_or_default(),
kind: kind_to_inline(d.kind.or_invalid("missing kind")?),
})
}
}
fn kind_to_inline(kind: anki_proto::decks::deck::Kind) -> DeckKind {
match kind {
anki_proto::decks::deck::Kind::Normal(normal) => DeckKind::Normal(normal),
anki_proto::decks::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered),
}
}
fn kind_from_inline(k: DeckKind) -> anki_proto::decks::deck::Kind {
match k {
DeckKind::Normal(n) => anki_proto::decks::deck::Kind::Normal(n),
DeckKind::Filtered(f) => anki_proto::decks::deck::Kind::Filtered(f),
}
}
fn deck_name_to_proto((id, name): (DeckId, String)) -> anki_proto::decks::DeckNameId {
anki_proto::decks::DeckNameId { id: id.0, name }
}
fn deck_names_to_proto(names: Vec<(DeckId, String)>) -> anki_proto::decks::DeckNames {
anki_proto::decks::DeckNames {
entries: names.into_iter().map(deck_name_to_proto).collect(),
}
}

View file

@ -40,6 +40,7 @@ impl AnkiError {
AnkiError::FileIoError { .. } => Kind::IoError,
AnkiError::MediaCheckRequired => Kind::InvalidInput,
AnkiError::InvalidId => Kind::InvalidInput,
AnkiError::InvalidMethodIndex | AnkiError::InvalidServiceIndex => Kind::InvalidInput,
#[cfg(windows)]
AnkiError::WindowsError { .. } => Kind::OsError,
};

View file

@ -1,77 +1,27 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::collections::HashMap;
use anki_proto::generic;
pub(super) use anki_proto::i18n::i18n_service::Service as I18nService;
use fluent::FluentArgs;
use fluent::FluentValue;
use anki_proto::i18n::FormatTimespanRequest;
use anki_proto::i18n::I18nResourcesRequest;
use anki_proto::i18n::TranslateStringRequest;
use super::Backend;
use crate::i18n::service;
use crate::prelude::*;
use crate::scheduler::timespan::answer_button_time;
use crate::scheduler::timespan::time_span;
impl I18nService for Backend {
type Error = AnkiError;
fn translate_string(
&self,
input: anki_proto::i18n::TranslateStringRequest,
) -> Result<generic::String> {
let args = build_fluent_args(input.args);
Ok(self
.tr
.translate_via_index(
input.module_index as usize,
input.message_index as usize,
args,
)
.into())
// We avoid delegating to collection for these, as tr doesn't require a
// collection lock.
impl crate::services::BackendI18nService for Backend {
fn translate_string(&self, input: TranslateStringRequest) -> Result<generic::String> {
service::translate_string(&self.tr, input)
}
fn format_timespan(
&self,
input: anki_proto::i18n::FormatTimespanRequest,
) -> Result<generic::String> {
use anki_proto::i18n::format_timespan_request::Context;
Ok(match input.context() {
Context::Precise => time_span(input.seconds, &self.tr, true),
Context::Intervals => time_span(input.seconds, &self.tr, false),
Context::AnswerButtons => answer_button_time(input.seconds, &self.tr),
}
.into())
fn format_timespan(&self, input: FormatTimespanRequest) -> Result<generic::String> {
service::format_timespan(&self.tr, input)
}
fn i18n_resources(
&self,
input: anki_proto::i18n::I18nResourcesRequest,
) -> Result<generic::Json> {
serde_json::to_vec(&self.tr.resources_for_js(&input.modules))
.map(Into::into)
.map_err(Into::into)
}
}
fn build_fluent_args(
input: HashMap<String, anki_proto::i18n::TranslateArgValue>,
) -> FluentArgs<'static> {
let mut args = FluentArgs::new();
for (key, val) in input {
args.set(key, translate_arg_to_fluent_val(&val));
}
args
}
fn translate_arg_to_fluent_val(arg: &anki_proto::i18n::TranslateArgValue) -> FluentValue<'static> {
use anki_proto::i18n::translate_arg_value::Value as V;
match &arg.value {
Some(val) => match val {
V::Str(s) => FluentValue::String(s.to_owned().into()),
V::Number(f) => FluentValue::Number(f.into()),
},
None => FluentValue::String("".into()),
fn i18n_resources(&self, input: I18nResourcesRequest) -> Result<generic::Json> {
service::i18n_resources(&self.tr, input)
}
}

View file

@ -1,67 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::generic;
pub(super) use anki_proto::image_occlusion::imageocclusion_service::Service as ImageOcclusionService;
use super::Backend;
use crate::prelude::*;
impl ImageOcclusionService for Backend {
type Error = AnkiError;
fn get_image_for_occlusion(
&self,
input: anki_proto::image_occlusion::GetImageForOcclusionRequest,
) -> Result<anki_proto::image_occlusion::GetImageForOcclusionResponse> {
self.with_col(|col| col.get_image_for_occlusion(&input.path))
}
fn add_image_occlusion_note(
&self,
input: anki_proto::image_occlusion::AddImageOcclusionNoteRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
col.add_image_occlusion_note(
input.notetype_id.into(),
&input.image_path,
&input.occlusions,
&input.header,
&input.back_extra,
input.tags,
)
})
.map(Into::into)
}
fn get_image_occlusion_note(
&self,
input: anki_proto::image_occlusion::GetImageOcclusionNoteRequest,
) -> Result<anki_proto::image_occlusion::GetImageOcclusionNoteResponse> {
self.with_col(|col| col.get_image_occlusion_note(input.note_id.into()))
}
fn update_image_occlusion_note(
&self,
input: anki_proto::image_occlusion::UpdateImageOcclusionNoteRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
col.update_image_occlusion_note(
input.note_id.into(),
&input.occlusions,
&input.header,
&input.back_extra,
input.tags,
)
})
.map(Into::into)
}
fn add_image_occlusion_notetype(
&self,
_input: generic::Empty,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.add_image_occlusion_notetype())
.map(Into::into)
}
}

View file

@ -3,24 +3,16 @@
use std::path::Path;
use anki_proto::generic;
use anki_proto::import_export::export_limit;
pub(super) use anki_proto::import_export::importexport_service::Service as ImportExportService;
use anki_proto::import_export::ExportLimit;
use super::Backend;
use crate::import_export::package::import_colpkg;
use crate::import_export::NoteLog;
use crate::prelude::*;
use crate::search::SearchNode;
impl ImportExportService for Backend {
type Error = AnkiError;
use crate::services::BackendImportExportService;
impl BackendImportExportService for Backend {
fn export_collection_package(
&self,
input: anki_proto::import_export::ExportCollectionPackageRequest,
) -> Result<generic::Empty> {
) -> Result<()> {
self.abort_media_sync_and_wait();
let mut guard = self.lock_open_collection()?;
@ -34,7 +26,7 @@ impl ImportExportService for Backend {
fn import_collection_package(
&self,
input: anki_proto::import_export::ImportCollectionPackageRequest,
) -> Result<generic::Empty> {
) -> Result<()> {
let _guard = self.lock_closed_collection()?;
import_colpkg(
@ -46,115 +38,4 @@ impl ImportExportService for Backend {
)
.map(Into::into)
}
fn import_anki_package(
&self,
input: anki_proto::import_export::ImportAnkiPackageRequest,
) -> Result<anki_proto::import_export::ImportResponse> {
self.with_col(|col| col.import_apkg(&input.package_path))
.map(Into::into)
}
fn export_anki_package(
&self,
input: anki_proto::import_export::ExportAnkiPackageRequest,
) -> Result<generic::UInt32> {
self.with_col(|col| {
col.export_apkg(
&input.out_path,
SearchNode::from(input.limit.unwrap_or_default()),
input.with_scheduling,
input.with_media,
input.legacy,
None,
)
})
.map(Into::into)
}
fn get_csv_metadata(
&self,
input: anki_proto::import_export::CsvMetadataRequest,
) -> Result<anki_proto::import_export::CsvMetadata> {
let delimiter = input.delimiter.is_some().then(|| input.delimiter());
self.with_col(|col| {
col.get_csv_metadata(
&input.path,
delimiter,
input.notetype_id.map(Into::into),
input.deck_id.map(Into::into),
input.is_html,
)
})
}
fn import_csv(
&self,
input: anki_proto::import_export::ImportCsvRequest,
) -> Result<anki_proto::import_export::ImportResponse> {
self.with_col(|col| col.import_csv(&input.path, input.metadata.unwrap_or_default()))
.map(Into::into)
}
fn export_note_csv(
&self,
input: anki_proto::import_export::ExportNoteCsvRequest,
) -> Result<generic::UInt32> {
self.with_col(|col| col.export_note_csv(input))
.map(Into::into)
}
fn export_card_csv(
&self,
input: anki_proto::import_export::ExportCardCsvRequest,
) -> Result<generic::UInt32> {
self.with_col(|col| {
col.export_card_csv(
&input.out_path,
SearchNode::from(input.limit.unwrap_or_default()),
input.with_html,
)
})
.map(Into::into)
}
fn import_json_file(
&self,
input: generic::String,
) -> Result<anki_proto::import_export::ImportResponse> {
self.with_col(|col| col.import_json_file(&input.val))
.map(Into::into)
}
fn import_json_string(
&self,
input: generic::String,
) -> Result<anki_proto::import_export::ImportResponse> {
self.with_col(|col| col.import_json_string(&input.val))
.map(Into::into)
}
}
impl From<OpOutput<NoteLog>> for anki_proto::import_export::ImportResponse {
fn from(output: OpOutput<NoteLog>) -> Self {
Self {
changes: Some(output.changes.into()),
log: Some(output.output),
}
}
}
impl From<ExportLimit> for SearchNode {
fn from(export_limit: ExportLimit) -> Self {
use export_limit::Limit;
let limit = export_limit
.limit
.unwrap_or(Limit::WholeCollection(generic::Empty {}));
match limit {
Limit::WholeCollection(_) => Self::WholeCollection,
Limit::DeckId(did) => Self::from_deck_id(did, true),
Limit::NoteIds(nids) => Self::from_note_ids(nids.note_ids),
Limit::CardIds(cids) => Self::from_card_ids(cids.cids),
}
}
}

View file

@ -1,20 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::links::help_page_link_request::HelpPage;
pub(super) use anki_proto::links::links_service::Service as LinksService;
use super::Backend;
use crate::links::help_page_to_link;
use crate::prelude::*;
impl LinksService for Backend {
type Error = AnkiError;
fn help_page_link(
&self,
input: anki_proto::links::HelpPageLinkRequest,
) -> Result<anki_proto::generic::String> {
Ok(help_page_to_link(HelpPage::from_i32(input.page).unwrap_or(HelpPage::Index)).into())
}
}

View file

@ -1,64 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::generic;
pub(super) use anki_proto::media::media_service::Service as MediaService;
use super::notes::to_i64s;
use super::Backend;
use crate::prelude::*;
impl MediaService for Backend {
type Error = AnkiError;
fn check_media(&self, _input: generic::Empty) -> Result<anki_proto::media::CheckMediaResponse> {
self.with_col(|col| {
col.transact_no_undo(|col| {
let mut checker = col.media_checker()?;
let mut output = checker.check()?;
let mut report = checker.summarize_output(&mut output);
col.report_media_field_referencing_templates(&mut report)?;
Ok(anki_proto::media::CheckMediaResponse {
unused: output.unused,
missing: output.missing,
missing_media_notes: to_i64s(output.missing_media_notes),
report,
have_trash: output.trash_count > 0,
})
})
})
}
fn trash_media_files(
&self,
input: anki_proto::media::TrashMediaFilesRequest,
) -> Result<generic::Empty> {
self.with_col(|col| col.media()?.remove_files(&input.fnames))
.map(Into::into)
}
fn add_media_file(
&self,
input: anki_proto::media::AddMediaFileRequest,
) -> Result<generic::String> {
self.with_col(|col| {
Ok(col
.media()?
.add_file(&input.desired_name, &input.data)?
.to_string()
.into())
})
}
fn empty_trash(&self, _input: generic::Empty) -> Result<generic::Empty> {
self.with_col(|col| col.media_checker()?.empty_trash())
.map(Into::into)
}
fn restore_trash(&self, _input: generic::Empty) -> Result<generic::Empty> {
self.with_col(|col| col.media_checker()?.restore_trash())
.map(Into::into)
}
}

View file

@ -1,65 +1,30 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
// infallible backend methods still return a result
#![allow(clippy::unnecessary_wraps)]
mod adding;
mod ankidroid;
mod card;
mod cardrendering;
mod card_rendering;
mod collection;
mod config;
mod dbproxy;
mod deckconfig;
mod decks;
pub(crate) mod dbproxy;
mod error;
mod i18n;
mod image_occlusion;
mod import_export;
mod links;
mod media;
mod notes;
mod notetypes;
mod ops;
mod scheduler;
mod search;
mod stats;
mod sync;
mod tags;
use std::result;
use std::sync::Arc;
use std::sync::Mutex;
use std::thread::JoinHandle;
use anki_proto::ServiceIndex;
use once_cell::sync::OnceCell;
use prost::Message;
use tokio::runtime;
use tokio::runtime::Runtime;
use self::ankidroid::AnkidroidService;
use self::card::CardsService;
use self::cardrendering::CardRenderingService;
use self::collection::CollectionService;
use self::config::ConfigService;
use self::deckconfig::DeckConfigService;
use self::decks::DecksService;
use self::i18n::I18nService;
use self::image_occlusion::ImageOcclusionService;
use self::import_export::ImportExportService;
use self::links::LinksService;
use self::media::MediaService;
use self::notes::NotesService;
use self::notetypes::NotetypesService;
use self::scheduler::SchedulerService;
use self::search::SearchService;
use self::stats::StatsService;
use self::sync::SyncService;
use self::sync::SyncState;
use self::tags::TagsService;
use crate::backend::dbproxy::db_command_bytes;
use crate::backend::sync::SyncState;
use crate::prelude::*;
use crate::progress::AbortHandleSlot;
use crate::progress::Progress;
@ -68,7 +33,7 @@ use crate::progress::ThrottlingProgressHandler;
pub struct Backend {
col: Arc<Mutex<Option<Collection>>>,
tr: I18n,
pub(crate) tr: I18n,
server: bool,
sync_abort: AbortHandleSlot,
progress_state: Arc<Mutex<ProgressState>>,
@ -115,47 +80,6 @@ impl Backend {
&self.tr
}
pub fn run_method(
&self,
service: u32,
method: u32,
input: &[u8],
) -> result::Result<Vec<u8>, Vec<u8>> {
ServiceIndex::try_from(service)
.or_invalid("invalid service")
.and_then(|service| match service {
ServiceIndex::Ankidroid => AnkidroidService::run_method(self, method, input),
ServiceIndex::Scheduler => SchedulerService::run_method(self, method, input),
ServiceIndex::Decks => DecksService::run_method(self, method, input),
ServiceIndex::Notes => NotesService::run_method(self, method, input),
ServiceIndex::Notetypes => NotetypesService::run_method(self, method, input),
ServiceIndex::Config => ConfigService::run_method(self, method, input),
ServiceIndex::Sync => SyncService::run_method(self, method, input),
ServiceIndex::Tags => TagsService::run_method(self, method, input),
ServiceIndex::DeckConfig => DeckConfigService::run_method(self, method, input),
ServiceIndex::CardRendering => {
CardRenderingService::run_method(self, method, input)
}
ServiceIndex::Media => MediaService::run_method(self, method, input),
ServiceIndex::Stats => StatsService::run_method(self, method, input),
ServiceIndex::Search => SearchService::run_method(self, method, input),
ServiceIndex::I18n => I18nService::run_method(self, method, input),
ServiceIndex::Links => LinksService::run_method(self, method, input),
ServiceIndex::Collection => CollectionService::run_method(self, method, input),
ServiceIndex::Cards => CardsService::run_method(self, method, input),
ServiceIndex::ImportExport => ImportExportService::run_method(self, method, input),
ServiceIndex::ImageOcclusion => {
ImageOcclusionService::run_method(self, method, input)
}
})
.map_err(|err| {
let backend_err = err.into_protobuf(&self.tr);
let mut bytes = Vec::new();
backend_err.encode(&mut bytes).unwrap();
bytes
})
}
pub fn run_db_command_bytes(&self, input: &[u8]) -> result::Result<Vec<u8>, Vec<u8>> {
self.db_command(input).map_err(|err| {
let backend_err = err.into_protobuf(&self.tr);
@ -168,7 +92,7 @@ impl Backend {
/// If collection is open, run the provided closure while holding
/// the mutex.
/// If collection is not open, return an error.
fn with_col<F, T>(&self, func: F) -> Result<T>
pub(crate) fn with_col<F, T>(&self, func: F) -> Result<T>
where
F: FnOnce(&mut Collection) -> Result<T>,
{

View file

@ -1,201 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::collections::HashSet;
pub(super) use anki_proto::notes::notes_service::Service as NotesService;
use super::Backend;
use crate::cloze::add_cloze_numbers_in_string;
use crate::prelude::*;
impl NotesService for Backend {
type Error = AnkiError;
fn new_note(
&self,
input: anki_proto::notetypes::NotetypeId,
) -> Result<anki_proto::notes::Note> {
let ntid = input.into();
self.with_col(|col| {
let nt = col.get_notetype(ntid)?.or_not_found(ntid)?;
Ok(nt.new_note().into())
})
}
fn add_note(
&self,
input: anki_proto::notes::AddNoteRequest,
) -> Result<anki_proto::notes::AddNoteResponse> {
self.with_col(|col| {
let mut note: Note = input.note.or_invalid("no note provided")?.into();
let changes = col.add_note(&mut note, DeckId(input.deck_id))?;
Ok(anki_proto::notes::AddNoteResponse {
note_id: note.id.0,
changes: Some(changes.into()),
})
})
}
fn defaults_for_adding(
&self,
input: anki_proto::notes::DefaultsForAddingRequest,
) -> Result<anki_proto::notes::DeckAndNotetype> {
self.with_col(|col| {
let home_deck: DeckId = input.home_deck_of_current_review_card.into();
col.defaults_for_adding(home_deck).map(Into::into)
})
}
fn default_deck_for_notetype(
&self,
input: anki_proto::notetypes::NotetypeId,
) -> Result<anki_proto::decks::DeckId> {
self.with_col(|col| {
Ok(col
.default_deck_for_notetype(input.into())?
.unwrap_or(DeckId(0))
.into())
})
}
fn update_notes(
&self,
input: anki_proto::notes::UpdateNotesRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
let notes = input
.notes
.into_iter()
.map(Into::into)
.collect::<Vec<Note>>();
col.update_notes_maybe_undoable(notes, !input.skip_undo_entry)
})
.map(Into::into)
}
fn get_note(&self, input: anki_proto::notes::NoteId) -> Result<anki_proto::notes::Note> {
let nid = input.into();
self.with_col(|col| col.storage.get_note(nid)?.or_not_found(nid).map(Into::into))
}
fn remove_notes(
&self,
input: anki_proto::notes::RemoveNotesRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
if !input.note_ids.is_empty() {
col.remove_notes(
&input
.note_ids
.into_iter()
.map(Into::into)
.collect::<Vec<_>>(),
)
} else {
let nids = col.storage.note_ids_of_cards(
&input
.card_ids
.into_iter()
.map(Into::into)
.collect::<Vec<_>>(),
)?;
col.remove_notes(&nids.into_iter().collect::<Vec<_>>())
}
.map(Into::into)
})
}
fn cloze_numbers_in_note(
&self,
note: anki_proto::notes::Note,
) -> Result<anki_proto::notes::ClozeNumbersInNoteResponse> {
let mut set = HashSet::with_capacity(4);
for field in &note.fields {
add_cloze_numbers_in_string(field, &mut set);
}
Ok(anki_proto::notes::ClozeNumbersInNoteResponse {
numbers: set.into_iter().map(|n| n as u32).collect(),
})
}
fn after_note_updates(
&self,
input: anki_proto::notes::AfterNoteUpdatesRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
col.after_note_updates(
&to_note_ids(input.nids),
input.generate_cards,
input.mark_notes_modified,
)
.map(Into::into)
})
}
fn field_names_for_notes(
&self,
input: anki_proto::notes::FieldNamesForNotesRequest,
) -> Result<anki_proto::notes::FieldNamesForNotesResponse> {
self.with_col(|col| {
let nids: Vec<_> = input.nids.into_iter().map(NoteId).collect();
col.storage
.field_names_for_notes(&nids)
.map(|fields| anki_proto::notes::FieldNamesForNotesResponse { fields })
})
}
fn note_fields_check(
&self,
input: anki_proto::notes::Note,
) -> Result<anki_proto::notes::NoteFieldsCheckResponse> {
let note: Note = input.into();
self.with_col(|col| {
col.note_fields_check(&note)
.map(|r| anki_proto::notes::NoteFieldsCheckResponse { state: r as i32 })
})
}
fn cards_of_note(
&self,
input: anki_proto::notes::NoteId,
) -> Result<anki_proto::cards::CardIds> {
self.with_col(|col| {
col.storage
.all_card_ids_of_note_in_template_order(NoteId(input.nid))
.map(|v| anki_proto::cards::CardIds {
cids: v.into_iter().map(Into::into).collect(),
})
})
}
fn get_single_notetype_of_notes(
&self,
input: anki_proto::notes::NoteIds,
) -> Result<anki_proto::notetypes::NotetypeId> {
self.with_col(|col| {
col.get_single_notetype_of_notes(&input.note_ids.into_newtype(NoteId))
.map(Into::into)
})
}
}
pub(super) fn to_note_ids(ids: Vec<i64>) -> Vec<NoteId> {
ids.into_iter().map(NoteId).collect()
}
pub(super) fn to_i64s(ids: Vec<NoteId>) -> Vec<i64> {
ids.into_iter().map(Into::into).collect()
}
impl From<anki_proto::notes::NoteId> for NoteId {
fn from(nid: anki_proto::notes::NoteId) -> Self {
NoteId(nid.nid)
}
}
impl From<NoteId> for anki_proto::notes::NoteId {
fn from(nid: NoteId) -> Self {
anki_proto::notes::NoteId { nid: nid.0 }
}
}

View file

@ -1,284 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
mod answering;
mod states;
use anki_proto::generic;
use anki_proto::generic::Empty;
use anki_proto::scheduler;
pub(super) use anki_proto::scheduler::scheduler_service::Service as SchedulerService;
use anki_proto::scheduler::SchedulingStatesWithContext;
use anki_proto::scheduler::SetSchedulingStatesRequest;
use super::Backend;
use crate::prelude::*;
use crate::scheduler::new::NewCardDueOrder;
use crate::scheduler::states::CardState;
use crate::scheduler::states::SchedulingStates;
use crate::stats::studied_today;
impl SchedulerService for Backend {
type Error = AnkiError;
/// This behaves like _updateCutoff() in older code - it also unburies at
/// the start of a new day.
fn sched_timing_today(
&self,
_input: generic::Empty,
) -> Result<scheduler::SchedTimingTodayResponse> {
self.with_col(|col| {
let timing = col.timing_today()?;
col.unbury_if_day_rolled_over(timing)?;
Ok(timing.into())
})
}
/// Fetch data from DB and return rendered string.
fn studied_today(&self, _input: generic::Empty) -> Result<generic::String> {
self.with_col(|col| col.studied_today().map(Into::into))
}
/// Message rendering only, for old graphs.
fn studied_today_message(
&self,
input: scheduler::StudiedTodayMessageRequest,
) -> Result<generic::String> {
Ok(studied_today(input.cards, input.seconds as f32, &self.tr).into())
}
fn update_stats(&self, input: scheduler::UpdateStatsRequest) -> Result<generic::Empty> {
self.with_col(|col| {
col.transact_no_undo(|col| {
let today = col.current_due_day(0)?;
let usn = col.usn()?;
col.update_deck_stats(today, usn, input).map(Into::into)
})
})
}
fn extend_limits(&self, input: scheduler::ExtendLimitsRequest) -> Result<generic::Empty> {
self.with_col(|col| {
col.transact_no_undo(|col| {
let today = col.current_due_day(0)?;
let usn = col.usn()?;
col.extend_limits(
today,
usn,
input.deck_id.into(),
input.new_delta,
input.review_delta,
)
.map(Into::into)
})
})
}
fn counts_for_deck_today(
&self,
input: anki_proto::decks::DeckId,
) -> Result<scheduler::CountsForDeckTodayResponse> {
self.with_col(|col| col.counts_for_deck_today(input.did.into()))
}
fn congrats_info(&self, _input: generic::Empty) -> Result<scheduler::CongratsInfoResponse> {
self.with_col(|col| col.congrats_info())
}
fn restore_buried_and_suspended_cards(
&self,
input: anki_proto::cards::CardIds,
) -> Result<anki_proto::collection::OpChanges> {
let cids: Vec<_> = input.cids.into_iter().map(CardId).collect();
self.with_col(|col| col.unbury_or_unsuspend_cards(&cids).map(Into::into))
}
fn unbury_deck(
&self,
input: scheduler::UnburyDeckRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
col.unbury_deck(input.deck_id.into(), input.mode())
.map(Into::into)
})
}
fn bury_or_suspend_cards(
&self,
input: scheduler::BuryOrSuspendCardsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
let mode = input.mode();
let cids = if input.card_ids.is_empty() {
col.storage
.card_ids_of_notes(&input.note_ids.into_newtype(NoteId))?
} else {
input.card_ids.into_newtype(CardId)
};
col.bury_or_suspend_cards(&cids, mode).map(Into::into)
})
}
fn empty_filtered_deck(
&self,
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.empty_filtered_deck(input.did.into()).map(Into::into))
}
fn rebuild_filtered_deck(
&self,
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| col.rebuild_filtered_deck(input.did.into()).map(Into::into))
}
fn schedule_cards_as_new(
&self,
input: scheduler::ScheduleCardsAsNewRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
let cids = input.card_ids.into_newtype(CardId);
col.reschedule_cards_as_new(
&cids,
input.log,
input.restore_position,
input.reset_counts,
input
.context
.and_then(scheduler::schedule_cards_as_new_request::Context::from_i32),
)
.map(Into::into)
})
}
fn schedule_cards_as_new_defaults(
&self,
input: scheduler::ScheduleCardsAsNewDefaultsRequest,
) -> Result<scheduler::ScheduleCardsAsNewDefaultsResponse> {
self.with_col(|col| Ok(col.reschedule_cards_as_new_defaults(input.context())))
}
fn set_due_date(
&self,
input: scheduler::SetDueDateRequest,
) -> Result<anki_proto::collection::OpChanges> {
let config = input.config_key.map(|v| v.key().into());
let days = input.days;
let cids = input.card_ids.into_newtype(CardId);
self.with_col(|col| col.set_due_date(&cids, &days, config).map(Into::into))
}
fn sort_cards(
&self,
input: scheduler::SortCardsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
let cids = input.card_ids.into_newtype(CardId);
let (start, step, random, shift) = (
input.starting_from,
input.step_size,
input.randomize,
input.shift_existing,
);
let order = if random {
NewCardDueOrder::Random
} else {
NewCardDueOrder::Preserve
};
self.with_col(|col| {
col.sort_cards(&cids, start, step, order, shift)
.map(Into::into)
})
}
fn reposition_defaults(
&self,
_input: generic::Empty,
) -> Result<scheduler::RepositionDefaultsResponse> {
self.with_col(|col| Ok(col.reposition_defaults()))
}
fn sort_deck(
&self,
input: scheduler::SortDeckRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
col.sort_deck_legacy(input.deck_id.into(), input.randomize)
.map(Into::into)
})
}
fn get_scheduling_states(
&self,
input: anki_proto::cards::CardId,
) -> Result<scheduler::SchedulingStates> {
let cid: CardId = input.into();
self.with_col(|col| col.get_scheduling_states(cid))
.map(Into::into)
}
fn describe_next_states(
&self,
input: scheduler::SchedulingStates,
) -> Result<generic::StringList> {
let states: SchedulingStates = input.into();
self.with_col(|col| col.describe_next_states(states))
.map(Into::into)
}
fn state_is_leech(&self, input: scheduler::SchedulingState) -> Result<generic::Bool> {
let state: CardState = input.into();
Ok(state.leeched().into())
}
fn answer_card(
&self,
input: scheduler::CardAnswer,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.answer_card(&mut input.into()))
.map(Into::into)
}
fn upgrade_scheduler(&self, _input: generic::Empty) -> Result<generic::Empty> {
self.with_col(|col| col.transact_no_undo(|col| col.upgrade_to_v2_scheduler()))
.map(Into::into)
}
fn get_queued_cards(
&self,
input: scheduler::GetQueuedCardsRequest,
) -> Result<scheduler::QueuedCards> {
self.with_col(|col| {
col.get_queued_cards(input.fetch_limit as usize, input.intraday_learning_only)
.map(Into::into)
})
}
fn custom_study(
&self,
input: scheduler::CustomStudyRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.custom_study(input)).map(Into::into)
}
fn custom_study_defaults(
&self,
input: scheduler::CustomStudyDefaultsRequest,
) -> Result<scheduler::CustomStudyDefaultsResponse> {
self.with_col(|col| col.custom_study_defaults(input.deck_id.into()))
}
fn get_scheduling_states_with_context(
&self,
_input: Empty,
) -> std::result::Result<SchedulingStatesWithContext, Self::Error> {
invalid_input!("the frontend should implement this")
}
fn set_scheduling_states(
&self,
_input: SetSchedulingStatesRequest,
) -> std::result::Result<Empty, Self::Error> {
invalid_input!("the frontend should implement this")
}
}

View file

@ -3,8 +3,6 @@
use std::sync::Arc;
use anki_proto::generic;
pub(super) use anki_proto::sync::sync_service::Service as SyncService;
use anki_proto::sync::sync_status_response::Required;
use anki_proto::sync::SyncStatusResponse;
use futures::future::AbortHandle;
@ -99,27 +97,25 @@ impl TryFrom<anki_proto::sync::SyncAuth> for SyncAuth {
}
}
impl SyncService for Backend {
type Error = AnkiError;
fn sync_media(&self, input: anki_proto::sync::SyncAuth) -> Result<generic::Empty> {
impl crate::services::BackendSyncService for Backend {
fn sync_media(&self, input: anki_proto::sync::SyncAuth) -> Result<()> {
self.sync_media_inner(input).map(Into::into)
}
fn abort_sync(&self, _input: generic::Empty) -> Result<generic::Empty> {
fn abort_sync(&self) -> Result<()> {
if let Some(handle) = self.sync_abort.lock().unwrap().take() {
handle.abort();
}
Ok(().into())
Ok(())
}
/// Abort the media sync. Does not wait for completion.
fn abort_media_sync(&self, _input: generic::Empty) -> Result<generic::Empty> {
fn abort_media_sync(&self) -> Result<()> {
let guard = self.state.lock().unwrap();
if let Some(handle) = &guard.sync.media_sync_abort {
handle.abort();
}
Ok(().into())
Ok(())
}
fn sync_login(
@ -143,14 +139,14 @@ impl SyncService for Backend {
self.sync_collection_inner(input)
}
fn full_upload(&self, input: anki_proto::sync::SyncAuth) -> Result<generic::Empty> {
fn full_upload(&self, input: anki_proto::sync::SyncAuth) -> Result<()> {
self.full_sync_inner(input, true)?;
Ok(().into())
Ok(())
}
fn full_download(&self, input: anki_proto::sync::SyncAuth) -> Result<generic::Empty> {
fn full_download(&self, input: anki_proto::sync::SyncAuth) -> Result<()> {
self.full_sync_inner(input, false)?;
Ok(().into())
Ok(())
}
}

View file

@ -1,127 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::generic;
pub(super) use anki_proto::tags::tags_service::Service as TagsService;
use super::notes::to_note_ids;
use super::Backend;
use crate::prelude::*;
impl TagsService for Backend {
type Error = AnkiError;
fn clear_unused_tags(
&self,
_input: generic::Empty,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| col.clear_unused_tags().map(Into::into))
}
fn all_tags(&self, _input: generic::Empty) -> Result<generic::StringList> {
Ok(generic::StringList {
vals: self.with_col(|col| {
Ok(col
.storage
.all_tags()?
.into_iter()
.map(|t| t.name)
.collect())
})?,
})
}
fn remove_tags(
&self,
tags: generic::String,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| col.remove_tags(tags.val.as_str()).map(Into::into))
}
fn set_tag_collapsed(
&self,
input: anki_proto::tags::SetTagCollapsedRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
col.set_tag_collapsed(&input.name, input.collapsed)
.map(Into::into)
})
}
fn tag_tree(&self, _input: generic::Empty) -> Result<anki_proto::tags::TagTreeNode> {
self.with_col(|col| col.tag_tree())
}
fn reparent_tags(
&self,
input: anki_proto::tags::ReparentTagsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
let source_tags = input.tags;
let target_tag = if input.new_parent.is_empty() {
None
} else {
Some(input.new_parent)
};
self.with_col(|col| col.reparent_tags(&source_tags, target_tag))
.map(Into::into)
}
fn rename_tags(
&self,
input: anki_proto::tags::RenameTagsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| col.rename_tag(&input.current_prefix, &input.new_prefix))
.map(Into::into)
}
fn add_note_tags(
&self,
input: anki_proto::tags::NoteIdsAndTagsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
col.add_tags_to_notes(&to_note_ids(input.note_ids), &input.tags)
.map(Into::into)
})
}
fn remove_note_tags(
&self,
input: anki_proto::tags::NoteIdsAndTagsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
col.remove_tags_from_notes(&to_note_ids(input.note_ids), &input.tags)
.map(Into::into)
})
}
fn find_and_replace_tag(
&self,
input: anki_proto::tags::FindAndReplaceTagRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
let note_ids = if input.note_ids.is_empty() {
col.search_notes_unordered("")?
} else {
to_note_ids(input.note_ids)
};
col.find_and_replace_tag(
&note_ids,
&input.search,
&input.replacement,
input.regex,
input.match_case,
)
.map(Into::into)
})
}
fn complete_tag(
&self,
input: anki_proto::tags::CompleteTagRequest,
) -> Result<anki_proto::tags::CompleteTagResponse> {
self.with_col(|col| {
let tags = col.complete_tag(&input.input, input.match_limit as usize)?;
Ok(anki_proto::tags::CompleteTagResponse { tags })
})
}
}

View file

@ -1,6 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
mod service;
pub(crate) mod undo;
use std::collections::hash_map::Entry;

View file

@ -1,84 +1,83 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub(super) use anki_proto::cards::cards_service::Service as CardsService;
use anki_proto::generic;
use super::Backend;
use crate::card::Card;
use crate::card::CardId;
use crate::card::CardQueue;
use crate::card::CardType;
use crate::prelude::*;
use crate::collection::Collection;
use crate::decks::DeckId;
use crate::error;
use crate::error::AnkiError;
use crate::error::OrInvalid;
use crate::error::OrNotFound;
use crate::notes::NoteId;
use crate::prelude::TimestampSecs;
use crate::prelude::Usn;
impl CardsService for Backend {
type Error = AnkiError;
fn get_card(&self, input: anki_proto::cards::CardId) -> Result<anki_proto::cards::Card> {
impl crate::services::CardsService for Collection {
fn get_card(
&mut self,
input: anki_proto::cards::CardId,
) -> error::Result<anki_proto::cards::Card> {
let cid = input.into();
self.with_col(|col| {
col.storage
.get_card(cid)
.and_then(|opt| opt.or_not_found(cid))
.map(Into::into)
})
self.storage
.get_card(cid)
.and_then(|opt| opt.or_not_found(cid))
.map(Into::into)
}
fn update_cards(
&self,
&mut self,
input: anki_proto::cards::UpdateCardsRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| {
let cards = input
.cards
.into_iter()
.map(TryInto::try_into)
.collect::<Result<Vec<Card>, AnkiError>>()?;
for card in &cards {
card.validate_custom_data()?;
}
col.update_cards_maybe_undoable(cards, !input.skip_undo_entry)
})
.map(Into::into)
) -> error::Result<anki_proto::collection::OpChanges> {
let cards = input
.cards
.into_iter()
.map(TryInto::try_into)
.collect::<error::Result<Vec<Card>, AnkiError>>()?;
for card in &cards {
card.validate_custom_data()?;
}
self.update_cards_maybe_undoable(cards, !input.skip_undo_entry)
.map(Into::into)
}
fn remove_cards(&self, input: anki_proto::cards::RemoveCardsRequest) -> Result<generic::Empty> {
self.with_col(|col| {
col.transact_no_undo(|col| {
col.remove_cards_and_orphaned_notes(
&input
.card_ids
.into_iter()
.map(Into::into)
.collect::<Vec<_>>(),
)?;
Ok(().into())
})
fn remove_cards(&mut self, input: anki_proto::cards::RemoveCardsRequest) -> error::Result<()> {
self.transact_no_undo(|col| {
col.remove_cards_and_orphaned_notes(
&input
.card_ids
.into_iter()
.map(Into::into)
.collect::<Vec<_>>(),
)?;
Ok(())
})
}
fn set_deck(
&self,
&mut self,
input: anki_proto::cards::SetDeckRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
) -> error::Result<anki_proto::collection::OpChangesWithCount> {
let cids: Vec<_> = input.card_ids.into_iter().map(CardId).collect();
let deck_id = input.deck_id.into();
self.with_col(|col| col.set_deck(&cids, deck_id).map(Into::into))
self.set_deck(&cids, deck_id).map(Into::into)
}
fn set_flag(
&self,
&mut self,
input: anki_proto::cards::SetFlagRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.with_col(|col| {
col.set_card_flag(&to_card_ids(input.card_ids), input.flag)
.map(Into::into)
})
) -> error::Result<anki_proto::collection::OpChangesWithCount> {
self.set_card_flag(&to_card_ids(input.card_ids), input.flag)
.map(Into::into)
}
}
impl TryFrom<anki_proto::cards::Card> for Card {
type Error = AnkiError;
fn try_from(c: anki_proto::cards::Card) -> Result<Self, Self::Error> {
fn try_from(c: anki_proto::cards::Card) -> error::Result<Self, Self::Error> {
let ctype = CardType::try_from(c.ctype as u8).or_invalid("invalid card type")?;
let queue = CardQueue::try_from(c.queue as i8).or_invalid("invalid card queue")?;
Ok(Card {

View file

@ -6,6 +6,7 @@ use std::collections::HashMap;
use crate::prelude::*;
mod parser;
pub(crate) mod service;
pub mod tts;
mod writer;

View file

@ -1,22 +1,23 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub(super) use anki_proto::card_rendering::cardrendering_service::Service as CardRenderingService;
use anki_proto::card_rendering::ExtractClozeForTypingRequest;
use anki_proto::generic;
use super::Backend;
use crate::card::CardId;
use crate::card_rendering::extract_av_tags;
use crate::card_rendering::strip_av_tags;
use crate::card_rendering::tts;
use crate::cloze::extract_cloze_for_typing;
use crate::collection::Collection;
use crate::error::OrInvalid;
use crate::error::Result;
use crate::latex::extract_latex;
use crate::latex::extract_latex_expanding_clozes;
use crate::latex::ExtractedLatex;
use crate::markdown::render_markdown;
use crate::notetype::CardTemplateSchema11;
use crate::notetype::RenderCardOutput;
use crate::prelude::*;
use crate::template::RenderedNode;
use crate::text::decode_iri_paths;
use crate::text::encode_iri_paths;
@ -25,14 +26,15 @@ use crate::text::strip_html;
use crate::text::strip_html_preserving_media_filenames;
use crate::typeanswer::compare_answer;
impl CardRenderingService for Backend {
type Error = AnkiError;
/// While the majority of these methods do not actually require a collection,
/// they are unlikely to be executed without one, so we only bother implementing
/// them for the collection.
impl crate::services::CardRenderingService for Collection {
fn extract_av_tags(
&self,
&mut self,
input: anki_proto::card_rendering::ExtractAvTagsRequest,
) -> Result<anki_proto::card_rendering::ExtractAvTagsResponse> {
let out = extract_av_tags(input.text, input.question_side, self.i18n());
let out = extract_av_tags(input.text, input.question_side, &self.tr);
Ok(anki_proto::card_rendering::ExtractAvTagsResponse {
text: out.0,
av_tags: out.1,
@ -40,7 +42,7 @@ impl CardRenderingService for Backend {
}
fn extract_latex(
&self,
&mut self,
input: anki_proto::card_rendering::ExtractLatexRequest,
) -> Result<anki_proto::card_rendering::ExtractLatexResponse> {
let func = if input.expand_clozes {
@ -64,57 +66,49 @@ impl CardRenderingService for Backend {
})
}
fn get_empty_cards(
&self,
_input: generic::Empty,
) -> Result<anki_proto::card_rendering::EmptyCardsReport> {
self.with_col(|col| {
let mut empty = col.empty_cards()?;
let report = col.empty_cards_report(&mut empty)?;
fn get_empty_cards(&mut self) -> Result<anki_proto::card_rendering::EmptyCardsReport> {
let mut empty = self.empty_cards()?;
let report = self.empty_cards_report(&mut empty)?;
let mut outnotes = vec![];
for (_ntid, notes) in empty {
outnotes.extend(notes.into_iter().map(|e| {
anki_proto::card_rendering::empty_cards_report::NoteWithEmptyCards {
note_id: e.nid.0,
will_delete_note: e.empty.len() == e.current_count,
card_ids: e.empty.into_iter().map(|(_ord, id)| id.0).collect(),
}
}))
}
Ok(anki_proto::card_rendering::EmptyCardsReport {
report,
notes: outnotes,
})
let mut outnotes = vec![];
for (_ntid, notes) in empty {
outnotes.extend(notes.into_iter().map(|e| {
anki_proto::card_rendering::empty_cards_report::NoteWithEmptyCards {
note_id: e.nid.0,
will_delete_note: e.empty.len() == e.current_count,
card_ids: e.empty.into_iter().map(|(_ord, id)| id.0).collect(),
}
}))
}
Ok(anki_proto::card_rendering::EmptyCardsReport {
report,
notes: outnotes,
})
}
fn render_existing_card(
&self,
&mut self,
input: anki_proto::card_rendering::RenderExistingCardRequest,
) -> Result<anki_proto::card_rendering::RenderCardResponse> {
self.with_col(|col| {
col.render_existing_card(CardId(input.card_id), input.browser)
.map(Into::into)
})
self.render_existing_card(CardId(input.card_id), input.browser)
.map(Into::into)
}
fn render_uncommitted_card(
&self,
&mut self,
input: anki_proto::card_rendering::RenderUncommittedCardRequest,
) -> Result<anki_proto::card_rendering::RenderCardResponse> {
let template = input.template.or_invalid("missing template")?.into();
let mut note = input.note.or_invalid("missing note")?.into();
let ord = input.card_ord as u16;
let fill_empty = input.fill_empty;
self.with_col(|col| {
col.render_uncommitted_card(&mut note, &template, ord, fill_empty)
.map(Into::into)
})
self.render_uncommitted_card(&mut note, &template, ord, fill_empty)
.map(Into::into)
}
fn render_uncommitted_card_legacy(
&self,
&mut self,
input: anki_proto::card_rendering::RenderUncommittedCardLegacyRequest,
) -> Result<anki_proto::card_rendering::RenderCardResponse> {
let schema11: CardTemplateSchema11 = serde_json::from_slice(&input.template)?;
@ -122,18 +116,17 @@ impl CardRenderingService for Backend {
let mut note = input.note.or_invalid("missing note")?.into();
let ord = input.card_ord as u16;
let fill_empty = input.fill_empty;
self.with_col(|col| {
col.render_uncommitted_card(&mut note, &template, ord, fill_empty)
.map(Into::into)
})
self.render_uncommitted_card(&mut note, &template, ord, fill_empty)
.map(Into::into)
}
fn strip_av_tags(&self, input: generic::String) -> Result<generic::String> {
fn strip_av_tags(&mut self, input: generic::String) -> Result<generic::String> {
Ok(strip_av_tags(input.val).into())
}
fn render_markdown(
&self,
&mut self,
input: anki_proto::card_rendering::RenderMarkdownRequest,
) -> Result<generic::String> {
let mut text = render_markdown(&input.markdown);
@ -144,37 +137,30 @@ impl CardRenderingService for Backend {
Ok(text.into())
}
fn encode_iri_paths(&self, input: generic::String) -> Result<generic::String> {
fn encode_iri_paths(&mut self, input: generic::String) -> Result<generic::String> {
Ok(encode_iri_paths(&input.val).to_string().into())
}
fn decode_iri_paths(&self, input: generic::String) -> Result<generic::String> {
fn decode_iri_paths(&mut self, input: generic::String) -> Result<generic::String> {
Ok(decode_iri_paths(&input.val).to_string().into())
}
fn strip_html(
&self,
&mut self,
input: anki_proto::card_rendering::StripHtmlRequest,
) -> Result<generic::String> {
Ok(match input.mode() {
anki_proto::card_rendering::strip_html_request::Mode::Normal => strip_html(&input.text),
anki_proto::card_rendering::strip_html_request::Mode::PreserveMediaFilenames => {
strip_html_preserving_media_filenames(&input.text)
}
}
.to_string()
.into())
strip_html_proto(input)
}
fn compare_answer(
&self,
&mut self,
input: anki_proto::card_rendering::CompareAnswerRequest,
) -> Result<generic::String> {
Ok(compare_answer(&input.expected, &input.provided).into())
}
fn extract_cloze_for_typing(
&self,
&mut self,
input: ExtractClozeForTypingRequest,
) -> Result<generic::String> {
Ok(extract_cloze_for_typing(&input.text, input.ordinal as u16)
@ -183,7 +169,7 @@ impl CardRenderingService for Backend {
}
fn all_tts_voices(
&self,
&mut self,
input: anki_proto::card_rendering::AllTtsVoicesRequest,
) -> Result<anki_proto::card_rendering::AllTtsVoicesResponse> {
tts::all_voices(input.validate)
@ -191,9 +177,9 @@ impl CardRenderingService for Backend {
}
fn write_tts_stream(
&self,
&mut self,
request: anki_proto::card_rendering::WriteTtsStreamRequest,
) -> Result<generic::Empty> {
) -> Result<()> {
tts::write_stream(
&request.path,
&request.voice_id,
@ -246,3 +232,16 @@ impl From<RenderCardOutput> for anki_proto::card_rendering::RenderCardResponse {
}
}
}
pub(crate) fn strip_html_proto(
input: anki_proto::card_rendering::StripHtmlRequest,
) -> Result<generic::String> {
Ok(match input.mode() {
anki_proto::card_rendering::strip_html_request::Mode::Normal => strip_html(&input.text),
anki_proto::card_rendering::strip_html_request::Mode::PreserveMediaFilenames => {
strip_html_preserving_media_filenames(&input.text)
}
}
.to_string()
.into())
}

View file

@ -2,6 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
mod schema11;
mod service;
pub(crate) mod undo;
mod update;

View file

@ -1,92 +1,85 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub(super) use anki_proto::deckconfig::deckconfig_service::Service as DeckConfigService;
use anki_proto::generic;
use super::Backend;
use crate::collection::Collection;
use crate::deckconfig::DeckConfSchema11;
use crate::deckconfig::DeckConfig;
use crate::deckconfig::DeckConfigId;
use crate::deckconfig::UpdateDeckConfigsRequest;
use crate::prelude::*;
impl DeckConfigService for Backend {
type Error = AnkiError;
use crate::error;
impl crate::services::DeckConfigService for Collection {
fn add_or_update_deck_config_legacy(
&self,
&mut self,
input: generic::Json,
) -> Result<anki_proto::deckconfig::DeckConfigId> {
) -> error::Result<anki_proto::deckconfig::DeckConfigId> {
let conf: DeckConfSchema11 = serde_json::from_slice(&input.json)?;
let mut conf: DeckConfig = conf.into();
self.with_col(|col| {
col.transact_no_undo(|col| {
col.add_or_update_deck_config_legacy(&mut conf)?;
Ok(anki_proto::deckconfig::DeckConfigId { dcid: conf.id.0 })
})
self.transact_no_undo(|col| {
col.add_or_update_deck_config_legacy(&mut conf)?;
Ok(anki_proto::deckconfig::DeckConfigId { dcid: conf.id.0 })
})
.map(Into::into)
}
fn all_deck_config_legacy(&self, _input: generic::Empty) -> Result<generic::Json> {
self.with_col(|col| {
let conf: Vec<DeckConfSchema11> = col
.storage
.all_deck_config()?
.into_iter()
.map(Into::into)
.collect();
serde_json::to_vec(&conf).map_err(Into::into)
})
.map(Into::into)
fn all_deck_config_legacy(&mut self) -> error::Result<generic::Json> {
let conf: Vec<DeckConfSchema11> = self
.storage
.all_deck_config()?
.into_iter()
.map(Into::into)
.collect();
serde_json::to_vec(&conf)
.map_err(Into::into)
.map(Into::into)
}
fn get_deck_config(
&self,
&mut self,
input: anki_proto::deckconfig::DeckConfigId,
) -> Result<anki_proto::deckconfig::DeckConfig> {
self.with_col(|col| Ok(col.get_deck_config(input.into(), true)?.unwrap().into()))
) -> error::Result<anki_proto::deckconfig::DeckConfig> {
Ok(Collection::get_deck_config(self, input.into(), true)?
.unwrap()
.into())
}
fn get_deck_config_legacy(
&self,
&mut self,
input: anki_proto::deckconfig::DeckConfigId,
) -> Result<generic::Json> {
self.with_col(|col| {
let conf = col.get_deck_config(input.into(), true)?.unwrap();
let conf: DeckConfSchema11 = conf.into();
Ok(serde_json::to_vec(&conf)?)
})
.map(Into::into)
) -> error::Result<generic::Json> {
let conf = Collection::get_deck_config(self, input.into(), true)?.unwrap();
let conf: DeckConfSchema11 = conf.into();
Ok(serde_json::to_vec(&conf)?).map(Into::into)
}
fn new_deck_config_legacy(&self, _input: generic::Empty) -> Result<generic::Json> {
fn new_deck_config_legacy(&mut self) -> error::Result<generic::Json> {
serde_json::to_vec(&DeckConfSchema11::default())
.map_err(Into::into)
.map(Into::into)
}
fn remove_deck_config(
&self,
&mut self,
input: anki_proto::deckconfig::DeckConfigId,
) -> Result<generic::Empty> {
self.with_col(|col| col.transact_no_undo(|col| col.remove_deck_config_inner(input.into())))
) -> error::Result<()> {
self.transact_no_undo(|col| col.remove_deck_config_inner(input.into()))
.map(Into::into)
}
fn get_deck_configs_for_update(
&self,
&mut self,
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::deckconfig::DeckConfigsForUpdate> {
self.with_col(|col| col.get_deck_configs_for_update(input.did.into()))
) -> error::Result<anki_proto::deckconfig::DeckConfigsForUpdate> {
self.get_deck_configs_for_update(input.did.into())
}
fn update_deck_configs(
&self,
&mut self,
input: anki_proto::deckconfig::UpdateDeckConfigsRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.update_deck_configs(input.into()))
.map(Into::into)
) -> error::Result<anki_proto::collection::OpChanges> {
self.update_deck_configs(input.into()).map(Into::into)
}
}

View file

@ -10,6 +10,7 @@ mod name;
mod remove;
mod reparent;
mod schema11;
mod service;
mod stats;
pub mod tree;
pub(crate) mod undo;

317
rslib/src/decks/service.rs Normal file
View file

@ -0,0 +1,317 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::decks::deck::kind_container::Kind as DeckKind;
use anki_proto::generic;
use crate::collection::Collection;
use crate::decks::filtered::search_order_labels;
use crate::decks::Deck;
use crate::decks::DeckId;
use crate::decks::DeckSchema11;
use crate::decks::NativeDeckName;
use crate::error;
use crate::error::AnkiError;
use crate::error::OrInvalid;
use crate::error::OrNotFound;
use crate::prelude::TimestampSecs;
use crate::prelude::Usn;
use crate::scheduler::filtered::FilteredDeckForUpdate;
impl crate::services::DecksService for Collection {
fn new_deck(&mut self) -> error::Result<anki_proto::decks::Deck> {
Ok(Deck::new_normal().into())
}
fn add_deck(
&mut self,
deck: anki_proto::decks::Deck,
) -> error::Result<anki_proto::collection::OpChangesWithId> {
let mut deck: Deck = deck.try_into()?;
Ok(self.add_deck(&mut deck)?.map(|_| deck.id.0).into())
}
fn add_deck_legacy(
&mut self,
input: generic::Json,
) -> error::Result<anki_proto::collection::OpChangesWithId> {
let schema11: DeckSchema11 = serde_json::from_slice(&input.json)?;
let mut deck: Deck = schema11.into();
let output = self.add_deck(&mut deck)?;
Ok(output.map(|_| deck.id.0).into())
}
fn add_or_update_deck_legacy(
&mut self,
input: anki_proto::decks::AddOrUpdateDeckLegacyRequest,
) -> error::Result<anki_proto::decks::DeckId> {
let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?;
let mut deck: Deck = schema11.into();
if input.preserve_usn_and_mtime {
self.transact_no_undo(|col| {
let usn = col.usn()?;
col.add_or_update_single_deck_with_existing_id(&mut deck, usn)
})?;
} else {
self.add_or_update_deck(&mut deck)?;
}
Ok(anki_proto::decks::DeckId { did: deck.id.0 })
}
fn deck_tree(
&mut self,
input: anki_proto::decks::DeckTreeRequest,
) -> error::Result<anki_proto::decks::DeckTreeNode> {
let now = if input.now == 0 {
None
} else {
Some(TimestampSecs(input.now))
};
self.deck_tree(now)
}
fn deck_tree_legacy(&mut self) -> error::Result<generic::Json> {
let tree = self.legacy_deck_tree()?;
serde_json::to_vec(&tree)
.map_err(Into::into)
.map(Into::into)
}
fn get_all_decks_legacy(&mut self) -> error::Result<generic::Json> {
let decks = self.storage.get_all_decks_as_schema11()?;
serde_json::to_vec(&decks)
.map_err(Into::into)
.map(Into::into)
}
fn get_deck_id_by_name(
&mut self,
input: generic::String,
) -> error::Result<anki_proto::decks::DeckId> {
self.get_deck_id(&input.val).and_then(|d| {
d.or_not_found(input.val)
.map(|d| anki_proto::decks::DeckId { did: d.0 })
})
}
fn get_deck(
&mut self,
input: anki_proto::decks::DeckId,
) -> error::Result<anki_proto::decks::Deck> {
let did = input.into();
Ok(self.storage.get_deck(did)?.or_not_found(did)?.into())
}
fn update_deck(
&mut self,
input: anki_proto::decks::Deck,
) -> error::Result<anki_proto::collection::OpChanges> {
let mut deck = Deck::try_from(input)?;
self.update_deck(&mut deck).map(Into::into)
}
fn update_deck_legacy(
&mut self,
input: generic::Json,
) -> error::Result<anki_proto::collection::OpChanges> {
let deck: DeckSchema11 = serde_json::from_slice(&input.json)?;
let mut deck = deck.into();
self.update_deck(&mut deck).map(Into::into)
}
fn get_deck_legacy(
&mut self,
input: anki_proto::decks::DeckId,
) -> error::Result<generic::Json> {
let did = input.into();
let deck: DeckSchema11 = self.storage.get_deck(did)?.or_not_found(did)?.into();
serde_json::to_vec(&deck)
.map_err(Into::into)
.map(Into::into)
}
fn get_deck_names(
&mut self,
input: anki_proto::decks::GetDeckNamesRequest,
) -> error::Result<anki_proto::decks::DeckNames> {
let names = if input.include_filtered {
self.get_all_deck_names(input.skip_empty_default)?
} else {
self.get_all_normal_deck_names()?
};
Ok(deck_names_to_proto(names))
}
fn get_deck_and_child_names(
&mut self,
input: anki_proto::decks::DeckId,
) -> error::Result<anki_proto::decks::DeckNames> {
Collection::get_deck_and_child_names(self, input.did.into()).map(deck_names_to_proto)
}
fn new_deck_legacy(&mut self, input: generic::Bool) -> error::Result<generic::Json> {
let deck = if input.val {
Deck::new_filtered()
} else {
Deck::new_normal()
};
let schema11: DeckSchema11 = deck.into();
serde_json::to_vec(&schema11)
.map_err(Into::into)
.map(Into::into)
}
fn remove_decks(
&mut self,
input: anki_proto::decks::DeckIds,
) -> error::Result<anki_proto::collection::OpChangesWithCount> {
self.remove_decks_and_child_decks(&input.dids.into_iter().map(DeckId).collect::<Vec<_>>())
.map(Into::into)
}
fn reparent_decks(
&mut self,
input: anki_proto::decks::ReparentDecksRequest,
) -> error::Result<anki_proto::collection::OpChangesWithCount> {
let deck_ids: Vec<_> = input.deck_ids.into_iter().map(Into::into).collect();
let new_parent = if input.new_parent == 0 {
None
} else {
Some(input.new_parent.into())
};
self.reparent_decks(&deck_ids, new_parent).map(Into::into)
}
fn rename_deck(
&mut self,
input: anki_proto::decks::RenameDeckRequest,
) -> error::Result<anki_proto::collection::OpChanges> {
self.rename_deck(input.deck_id.into(), &input.new_name)
.map(Into::into)
}
fn get_or_create_filtered_deck(
&mut self,
input: anki_proto::decks::DeckId,
) -> error::Result<anki_proto::decks::FilteredDeckForUpdate> {
self.get_or_create_filtered_deck(input.into())
.map(Into::into)
}
fn add_or_update_filtered_deck(
&mut self,
input: anki_proto::decks::FilteredDeckForUpdate,
) -> error::Result<anki_proto::collection::OpChangesWithId> {
self.add_or_update_filtered_deck(input.into())
.map(|out| out.map(i64::from))
.map(Into::into)
}
fn filtered_deck_order_labels(&mut self) -> error::Result<generic::StringList> {
Ok(search_order_labels(&self.tr).into())
}
fn set_deck_collapsed(
&mut self,
input: anki_proto::decks::SetDeckCollapsedRequest,
) -> error::Result<anki_proto::collection::OpChanges> {
self.set_deck_collapsed(input.deck_id.into(), input.collapsed, input.scope())
.map(Into::into)
}
fn set_current_deck(
&mut self,
input: anki_proto::decks::DeckId,
) -> error::Result<anki_proto::collection::OpChanges> {
self.set_current_deck(input.did.into()).map(Into::into)
}
fn get_current_deck(&mut self) -> error::Result<anki_proto::decks::Deck> {
self.get_current_deck().map(|deck| (*deck).clone().into())
}
}
impl From<anki_proto::decks::DeckId> for DeckId {
fn from(did: anki_proto::decks::DeckId) -> Self {
DeckId(did.did)
}
}
impl From<DeckId> for anki_proto::decks::DeckId {
fn from(did: DeckId) -> Self {
anki_proto::decks::DeckId { did: did.0 }
}
}
impl From<FilteredDeckForUpdate> for anki_proto::decks::FilteredDeckForUpdate {
fn from(deck: FilteredDeckForUpdate) -> Self {
anki_proto::decks::FilteredDeckForUpdate {
id: deck.id.into(),
name: deck.human_name,
config: Some(deck.config),
}
}
}
impl From<anki_proto::decks::FilteredDeckForUpdate> for FilteredDeckForUpdate {
fn from(deck: anki_proto::decks::FilteredDeckForUpdate) -> Self {
FilteredDeckForUpdate {
id: deck.id.into(),
human_name: deck.name,
config: deck.config.unwrap_or_default(),
}
}
}
impl From<Deck> for anki_proto::decks::Deck {
fn from(d: Deck) -> Self {
anki_proto::decks::Deck {
id: d.id.0,
name: d.name.human_name(),
mtime_secs: d.mtime_secs.0,
usn: d.usn.0,
common: Some(d.common),
kind: Some(kind_from_inline(d.kind)),
}
}
}
impl TryFrom<anki_proto::decks::Deck> for Deck {
type Error = AnkiError;
fn try_from(d: anki_proto::decks::Deck) -> error::Result<Self, Self::Error> {
Ok(Deck {
id: DeckId(d.id),
name: NativeDeckName::from_human_name(&d.name),
mtime_secs: TimestampSecs(d.mtime_secs),
usn: Usn(d.usn),
common: d.common.unwrap_or_default(),
kind: kind_to_inline(d.kind.or_invalid("missing kind")?),
})
}
}
fn kind_to_inline(kind: anki_proto::decks::deck::Kind) -> DeckKind {
match kind {
anki_proto::decks::deck::Kind::Normal(normal) => DeckKind::Normal(normal),
anki_proto::decks::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered),
}
}
fn kind_from_inline(k: DeckKind) -> anki_proto::decks::deck::Kind {
match k {
DeckKind::Normal(n) => anki_proto::decks::deck::Kind::Normal(n),
DeckKind::Filtered(f) => anki_proto::decks::deck::Kind::Filtered(f),
}
}
fn deck_name_to_proto((id, name): (DeckId, String)) -> anki_proto::decks::DeckNameId {
anki_proto::decks::DeckNameId { id: id.0, name }
}
fn deck_names_to_proto(names: Vec<(DeckId, String)>) -> anki_proto::decks::DeckNames {
anki_proto::decks::DeckNames {
entries: names.into_iter().map(deck_name_to_proto).collect(),
}
}

View file

@ -13,7 +13,6 @@ pub mod windows;
use anki_i18n::I18n;
use anki_io::FileIoError;
use anki_io::FileOp;
use anki_proto::ProtoError;
pub use db::DbError;
pub use db::DbErrorKind;
pub use filtered::CustomStudyError;
@ -112,6 +111,8 @@ pub enum AnkiError {
WindowsError {
source: windows::WindowsError,
},
InvalidMethodIndex,
InvalidServiceIndex,
}
// error helpers
@ -157,6 +158,8 @@ impl AnkiError {
| AnkiError::CollectionNotOpen
| AnkiError::CollectionAlreadyOpen
| AnkiError::Existing
| AnkiError::InvalidServiceIndex
| AnkiError::InvalidMethodIndex
| AnkiError::UndoEmpty => format!("{:?}", self),
AnkiError::FileIoError { source } => source.message(),
AnkiError::InvalidInput { source } => source.message(),
@ -299,11 +302,3 @@ pub enum CardTypeErrorDetails {
MissingCloze,
ExtraneousCloze,
}
impl From<anki_proto::ProtoError> for AnkiError {
fn from(value: ProtoError) -> Self {
AnkiError::ProtoError {
info: value.to_string(),
}
}
}

3
rslib/src/i18n/mod.rs Normal file
View file

@ -0,0 +1,3 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub(crate) mod service;

91
rslib/src/i18n/service.rs Normal file
View file

@ -0,0 +1,91 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::collections::HashMap;
use anki_i18n::I18n;
use anki_proto::generic;
use anki_proto::generic::Json;
use anki_proto::i18n::format_timespan_request::Context;
use anki_proto::i18n::FormatTimespanRequest;
use anki_proto::i18n::I18nResourcesRequest;
use anki_proto::i18n::TranslateStringRequest;
use fluent_bundle::FluentArgs;
use fluent_bundle::FluentValue;
use crate::collection::Collection;
use crate::error;
use crate::scheduler::timespan::answer_button_time;
use crate::scheduler::timespan::time_span;
impl crate::services::I18nService for Collection {
fn translate_string(
&mut self,
input: TranslateStringRequest,
) -> error::Result<generic::String> {
translate_string(&self.tr, input)
}
fn format_timespan(&mut self, input: FormatTimespanRequest) -> error::Result<generic::String> {
format_timespan(&self.tr, input)
}
fn i18n_resources(&mut self, input: I18nResourcesRequest) -> error::Result<Json> {
i18n_resources(&self.tr, input)
}
}
pub(crate) fn translate_string(
tr: &I18n,
input: TranslateStringRequest,
) -> error::Result<generic::String> {
let args = build_fluent_args(input.args);
Ok(tr
.translate_via_index(
input.module_index as usize,
input.message_index as usize,
args,
)
.into())
}
pub(crate) fn format_timespan(
tr: &I18n,
input: FormatTimespanRequest,
) -> error::Result<generic::String> {
Ok(match input.context() {
Context::Precise => time_span(input.seconds, tr, true),
Context::Intervals => time_span(input.seconds, tr, false),
Context::AnswerButtons => answer_button_time(input.seconds, tr),
}
.into())
}
pub(crate) fn i18n_resources(
tr: &I18n,
input: I18nResourcesRequest,
) -> error::Result<generic::Json> {
serde_json::to_vec(&tr.resources_for_js(&input.modules))
.map(Into::into)
.map_err(Into::into)
}
fn build_fluent_args(
input: HashMap<String, anki_proto::i18n::TranslateArgValue>,
) -> FluentArgs<'static> {
let mut args = FluentArgs::new();
for (key, val) in input {
args.set(key, translate_arg_to_fluent_val(&val));
}
args
}
fn translate_arg_to_fluent_val(arg: &anki_proto::i18n::TranslateArgValue) -> FluentValue<'static> {
use anki_proto::i18n::translate_arg_value::Value as V;
match &arg.value {
Some(val) => match val {
V::Str(s) => FluentValue::String(s.to_owned().into()),
V::Number(f) => FluentValue::Number(f.into()),
},
None => FluentValue::String("".into()),
}
}

View file

@ -4,3 +4,4 @@
pub mod imagedata;
pub mod imageocclusion;
pub(crate) mod notetype;
mod service;

View file

@ -0,0 +1,60 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::image_occlusion::AddImageOcclusionNoteRequest;
use anki_proto::image_occlusion::GetImageForOcclusionRequest;
use anki_proto::image_occlusion::GetImageForOcclusionResponse;
use anki_proto::image_occlusion::GetImageOcclusionNoteRequest;
use anki_proto::image_occlusion::GetImageOcclusionNoteResponse;
use anki_proto::image_occlusion::UpdateImageOcclusionNoteRequest;
use crate::collection::Collection;
use crate::error;
impl crate::services::ImageOcclusionService for Collection {
fn get_image_for_occlusion(
&mut self,
input: GetImageForOcclusionRequest,
) -> error::Result<GetImageForOcclusionResponse> {
self.get_image_for_occlusion(&input.path)
}
fn add_image_occlusion_note(
&mut self,
input: AddImageOcclusionNoteRequest,
) -> error::Result<anki_proto::collection::OpChanges> {
self.add_image_occlusion_note(
input.notetype_id.into(),
&input.image_path,
&input.occlusions,
&input.header,
&input.back_extra,
input.tags,
)
.map(Into::into)
}
fn get_image_occlusion_note(
&mut self,
input: GetImageOcclusionNoteRequest,
) -> error::Result<GetImageOcclusionNoteResponse> {
self.get_image_occlusion_note(input.note_id.into())
}
fn update_image_occlusion_note(
&mut self,
input: UpdateImageOcclusionNoteRequest,
) -> error::Result<anki_proto::collection::OpChanges> {
self.update_image_occlusion_note(
input.note_id.into(),
&input.occlusions,
&input.header,
&input.back_extra,
input.tags,
)
.map(Into::into)
}
fn add_image_occlusion_notetype(&mut self) -> error::Result<anki_proto::collection::OpChanges> {
self.add_image_occlusion_notetype().map(Into::into)
}
}

View file

@ -4,6 +4,7 @@
mod gather;
mod insert;
pub mod package;
mod service;
pub mod text;
pub use anki_proto::import_export::import_response::Log as NoteLog;

View file

@ -0,0 +1,114 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::generic;
use anki_proto::import_export::import_response::Log as NoteLog;
use anki_proto::import_export::ExportLimit;
use crate::collection::Collection;
use crate::error;
use crate::ops::OpOutput;
use crate::search::SearchNode;
impl crate::services::ImportExportService for Collection {
fn import_anki_package(
&mut self,
input: anki_proto::import_export::ImportAnkiPackageRequest,
) -> error::Result<anki_proto::import_export::ImportResponse> {
self.import_apkg(&input.package_path).map(Into::into)
}
fn export_anki_package(
&mut self,
input: anki_proto::import_export::ExportAnkiPackageRequest,
) -> error::Result<generic::UInt32> {
self.export_apkg(
&input.out_path,
SearchNode::from(input.limit.unwrap_or_default()),
input.with_scheduling,
input.with_media,
input.legacy,
None,
)
.map(Into::into)
}
fn get_csv_metadata(
&mut self,
input: anki_proto::import_export::CsvMetadataRequest,
) -> error::Result<anki_proto::import_export::CsvMetadata> {
let delimiter = input.delimiter.is_some().then(|| input.delimiter());
self.get_csv_metadata(
&input.path,
delimiter,
input.notetype_id.map(Into::into),
input.deck_id.map(Into::into),
input.is_html,
)
}
fn import_csv(
&mut self,
input: anki_proto::import_export::ImportCsvRequest,
) -> error::Result<anki_proto::import_export::ImportResponse> {
self.import_csv(&input.path, input.metadata.unwrap_or_default())
.map(Into::into)
}
fn export_note_csv(
&mut self,
input: anki_proto::import_export::ExportNoteCsvRequest,
) -> error::Result<generic::UInt32> {
self.export_note_csv(input).map(Into::into)
}
fn export_card_csv(
&mut self,
input: anki_proto::import_export::ExportCardCsvRequest,
) -> error::Result<generic::UInt32> {
self.export_card_csv(
&input.out_path,
SearchNode::from(input.limit.unwrap_or_default()),
input.with_html,
)
.map(Into::into)
}
fn import_json_file(
&mut self,
input: generic::String,
) -> error::Result<anki_proto::import_export::ImportResponse> {
self.import_json_file(&input.val).map(Into::into)
}
fn import_json_string(
&mut self,
input: generic::String,
) -> error::Result<anki_proto::import_export::ImportResponse> {
self.import_json_string(&input.val).map(Into::into)
}
}
impl From<OpOutput<NoteLog>> for anki_proto::import_export::ImportResponse {
fn from(output: OpOutput<NoteLog>) -> Self {
Self {
changes: Some(output.changes.into()),
log: Some(output.output),
}
}
}
impl From<ExportLimit> for SearchNode {
fn from(export_limit: ExportLimit) -> Self {
use anki_proto::import_export::export_limit::Limit;
let limit = export_limit
.limit
.unwrap_or(Limit::WholeCollection(generic::Empty {}));
match limit {
Limit::WholeCollection(_) => Self::WholeCollection,
Limit::DeckId(did) => Self::from_deck_id(did, true),
Limit::NoteIds(nids) => Self::from_note_ids(nids.note_ids),
Limit::CardIds(cids) => Self::from_card_ids(cids.cids),
}
}
}

View file

@ -4,6 +4,7 @@
#![deny(unused_must_use)]
pub mod adding;
pub(crate) mod ankidroid;
pub mod backend;
pub mod browser_table;
pub mod card;
@ -16,6 +17,7 @@ pub mod deckconfig;
pub mod decks;
pub mod error;
pub mod findreplace;
pub mod i18n;
pub mod image_occlusion;
pub mod import_export;
pub mod latex;
@ -33,6 +35,7 @@ pub mod revlog;
pub mod scheduler;
pub mod search;
pub mod serde;
pub mod services;
mod stats;
pub mod storage;
pub mod sync;

View file

@ -3,6 +3,9 @@
pub use anki_proto::links::help_page_link_request::HelpPage;
use crate::collection::Collection;
use crate::error;
static HELP_SITE: &str = "https://docs.ankiweb.net/";
pub fn help_page_to_link(page: HelpPage) -> String {
@ -40,3 +43,12 @@ pub fn help_page_link_suffix(page: HelpPage) -> &'static str {
}
}
}
impl crate::services::LinksService for Collection {
fn help_page_link(
&mut self,
input: anki_proto::links::HelpPageLinkRequest,
) -> error::Result<anki_proto::generic::String> {
Ok(help_page_to_link(HelpPage::from_i32(input.page).unwrap_or(HelpPage::Index)).into())
}
}

View file

@ -3,6 +3,7 @@
pub mod check;
pub mod files;
mod service;
use std::borrow::Cow;
use std::collections::HashMap;

View file

@ -0,0 +1,50 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::generic;
use anki_proto::media::AddMediaFileRequest;
use anki_proto::media::CheckMediaResponse;
use anki_proto::media::TrashMediaFilesRequest;
use crate::collection::Collection;
use crate::error;
use crate::notes::service::to_i64s;
impl crate::services::MediaService for Collection {
fn check_media(&mut self) -> error::Result<CheckMediaResponse> {
self.transact_no_undo(|col| {
let mut checker = col.media_checker()?;
let mut output = checker.check()?;
let mut report = checker.summarize_output(&mut output);
col.report_media_field_referencing_templates(&mut report)?;
Ok(CheckMediaResponse {
unused: output.unused,
missing: output.missing,
missing_media_notes: to_i64s(output.missing_media_notes),
report,
have_trash: output.trash_count > 0,
})
})
}
fn add_media_file(&mut self, input: AddMediaFileRequest) -> error::Result<generic::String> {
Ok(self
.media()?
.add_file(&input.desired_name, &input.data)?
.to_string()
.into())
}
fn trash_media_files(&mut self, input: TrashMediaFilesRequest) -> error::Result<()> {
self.media()?.remove_files(&input.fnames)
}
fn empty_trash(&mut self) -> error::Result<()> {
self.media_checker()?.empty_trash()
}
fn restore_trash(&mut self) -> error::Result<()> {
self.media_checker()?.restore_trash()
}
}

View file

@ -1,6 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub(crate) mod service;
pub(crate) mod undo;
use std::borrow::Cow;

188
rslib/src/notes/service.rs Normal file
View file

@ -0,0 +1,188 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::collections::HashSet;
use crate::cloze::add_cloze_numbers_in_string;
use crate::collection::Collection;
use crate::decks::DeckId;
use crate::error;
use crate::error::OrInvalid;
use crate::error::OrNotFound;
use crate::notes::Note;
use crate::notes::NoteId;
use crate::prelude::IntoNewtypeVec;
pub(crate) fn to_i64s(ids: Vec<NoteId>) -> Vec<i64> {
ids.into_iter().map(Into::into).collect()
}
impl crate::services::NotesService for Collection {
fn new_note(
&mut self,
input: anki_proto::notetypes::NotetypeId,
) -> error::Result<anki_proto::notes::Note> {
let ntid = input.into();
let nt = self.get_notetype(ntid)?.or_not_found(ntid)?;
Ok(nt.new_note().into())
}
fn add_note(
&mut self,
input: anki_proto::notes::AddNoteRequest,
) -> error::Result<anki_proto::notes::AddNoteResponse> {
let mut note: Note = input.note.or_invalid("no note provided")?.into();
let changes = self.add_note(&mut note, DeckId(input.deck_id))?;
Ok(anki_proto::notes::AddNoteResponse {
note_id: note.id.0,
changes: Some(changes.into()),
})
}
fn defaults_for_adding(
&mut self,
input: anki_proto::notes::DefaultsForAddingRequest,
) -> error::Result<anki_proto::notes::DeckAndNotetype> {
let home_deck: DeckId = input.home_deck_of_current_review_card.into();
self.defaults_for_adding(home_deck).map(Into::into)
}
fn default_deck_for_notetype(
&mut self,
input: anki_proto::notetypes::NotetypeId,
) -> error::Result<anki_proto::decks::DeckId> {
Ok(self
.default_deck_for_notetype(input.into())?
.unwrap_or(DeckId(0))
.into())
}
fn update_notes(
&mut self,
input: anki_proto::notes::UpdateNotesRequest,
) -> error::Result<anki_proto::collection::OpChanges> {
let notes = input
.notes
.into_iter()
.map(Into::into)
.collect::<Vec<Note>>();
self.update_notes_maybe_undoable(notes, !input.skip_undo_entry)
.map(Into::into)
}
fn get_note(
&mut self,
input: anki_proto::notes::NoteId,
) -> error::Result<anki_proto::notes::Note> {
let nid = input.into();
self.storage
.get_note(nid)?
.or_not_found(nid)
.map(Into::into)
}
fn remove_notes(
&mut self,
input: anki_proto::notes::RemoveNotesRequest,
) -> error::Result<anki_proto::collection::OpChangesWithCount> {
if !input.note_ids.is_empty() {
self.remove_notes(
&input
.note_ids
.into_iter()
.map(Into::into)
.collect::<Vec<_>>(),
)
} else {
let nids = self.storage.note_ids_of_cards(
&input
.card_ids
.into_iter()
.map(Into::into)
.collect::<Vec<_>>(),
)?;
self.remove_notes(&nids.into_iter().collect::<Vec<_>>())
}
.map(Into::into)
}
fn cloze_numbers_in_note(
&mut self,
note: anki_proto::notes::Note,
) -> error::Result<anki_proto::notes::ClozeNumbersInNoteResponse> {
let mut set = HashSet::with_capacity(4);
for field in &note.fields {
add_cloze_numbers_in_string(field, &mut set);
}
Ok(anki_proto::notes::ClozeNumbersInNoteResponse {
numbers: set.into_iter().map(|n| n as u32).collect(),
})
}
fn after_note_updates(
&mut self,
input: anki_proto::notes::AfterNoteUpdatesRequest,
) -> error::Result<anki_proto::collection::OpChangesWithCount> {
self.after_note_updates(
&to_note_ids(input.nids),
input.generate_cards,
input.mark_notes_modified,
)
.map(Into::into)
}
fn field_names_for_notes(
&mut self,
input: anki_proto::notes::FieldNamesForNotesRequest,
) -> error::Result<anki_proto::notes::FieldNamesForNotesResponse> {
let nids: Vec<_> = input.nids.into_iter().map(NoteId).collect();
self.storage
.field_names_for_notes(&nids)
.map(|fields| anki_proto::notes::FieldNamesForNotesResponse { fields })
}
fn note_fields_check(
&mut self,
input: anki_proto::notes::Note,
) -> error::Result<anki_proto::notes::NoteFieldsCheckResponse> {
let note: Note = input.into();
self.note_fields_check(&note)
.map(|r| anki_proto::notes::NoteFieldsCheckResponse { state: r as i32 })
}
fn cards_of_note(
&mut self,
input: anki_proto::notes::NoteId,
) -> error::Result<anki_proto::cards::CardIds> {
self.storage
.all_card_ids_of_note_in_template_order(NoteId(input.nid))
.map(|v| anki_proto::cards::CardIds {
cids: v.into_iter().map(Into::into).collect(),
})
}
fn get_single_notetype_of_notes(
&mut self,
input: anki_proto::notes::NoteIds,
) -> error::Result<anki_proto::notetypes::NotetypeId> {
self.get_single_notetype_of_notes(&input.note_ids.into_newtype(NoteId))
.map(Into::into)
}
}
pub(crate) fn to_note_ids(ids: Vec<i64>) -> Vec<NoteId> {
ids.into_iter().map(NoteId).collect()
}
impl From<anki_proto::notes::NoteId> for NoteId {
fn from(nid: anki_proto::notes::NoteId) -> Self {
NoteId(nid.nid)
}
}
impl From<NoteId> for anki_proto::notes::NoteId {
fn from(nid: NoteId) -> Self {
anki_proto::notes::NoteId { nid: nid.0 }
}
}

View file

@ -10,6 +10,7 @@ mod render;
mod restore;
mod schema11;
mod schemachange;
mod service;
pub(crate) mod stock;
mod templates;
pub(crate) mod undo;

View file

@ -1,93 +1,88 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::generic;
pub(super) use anki_proto::notetypes::notetypes_service::Service as NotetypesService;
use anki_proto::notetypes::stock_notetype::Kind as StockKind;
use super::Backend;
use crate::collection::Collection;
use crate::config::get_aux_notetype_config_key;
use crate::error;
use crate::error::OrInvalid;
use crate::error::OrNotFound;
use crate::notes::NoteId;
use crate::notetype::stock::get_stock_notetype;
use crate::notetype::stock::StockKind;
use crate::notetype::ChangeNotetypeInput;
use crate::notetype::Notetype;
use crate::notetype::NotetypeChangeInfo;
use crate::notetype::NotetypeId;
use crate::notetype::NotetypeSchema11;
use crate::prelude::*;
impl NotetypesService for Backend {
type Error = AnkiError;
use crate::prelude::IntoNewtypeVec;
impl crate::services::NotetypesService for Collection {
fn add_notetype(
&self,
&mut self,
input: anki_proto::notetypes::Notetype,
) -> Result<anki_proto::collection::OpChangesWithId> {
) -> error::Result<anki_proto::collection::OpChangesWithId> {
let mut notetype: Notetype = input.into();
self.with_col(|col| {
Ok(col
.add_notetype(&mut notetype, false)?
.map(|_| notetype.id.0)
.into())
})
Ok(self
.add_notetype(&mut notetype, false)?
.map(|_| notetype.id.0)
.into())
}
fn update_notetype(
&self,
&mut self,
input: anki_proto::notetypes::Notetype,
) -> Result<anki_proto::collection::OpChanges> {
) -> error::Result<anki_proto::collection::OpChanges> {
let mut notetype: Notetype = input.into();
self.with_col(|col| col.update_notetype(&mut notetype, false))
.map(Into::into)
self.update_notetype(&mut notetype, false).map(Into::into)
}
fn add_notetype_legacy(
&self,
&mut self,
input: generic::Json,
) -> Result<anki_proto::collection::OpChangesWithId> {
) -> error::Result<anki_proto::collection::OpChangesWithId> {
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
let mut notetype: Notetype = legacy.into();
self.with_col(|col| {
Ok(col
.add_notetype(&mut notetype, false)?
.map(|_| notetype.id.0)
.into())
})
Ok(self
.add_notetype(&mut notetype, false)?
.map(|_| notetype.id.0)
.into())
}
fn update_notetype_legacy(
&self,
&mut self,
input: generic::Json,
) -> Result<anki_proto::collection::OpChanges> {
) -> error::Result<anki_proto::collection::OpChanges> {
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
let mut notetype: Notetype = legacy.into();
self.with_col(|col| col.update_notetype(&mut notetype, false))
.map(Into::into)
self.update_notetype(&mut notetype, false).map(Into::into)
}
fn add_or_update_notetype(
&self,
&mut self,
input: anki_proto::notetypes::AddOrUpdateNotetypeRequest,
) -> Result<anki_proto::notetypes::NotetypeId> {
self.with_col(|col| {
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
let mut nt: Notetype = legacy.into();
if !input.preserve_usn_and_mtime {
nt.set_modified(col.usn()?);
}
if nt.id.0 == 0 {
col.add_notetype(&mut nt, input.skip_checks)?;
} else if !input.preserve_usn_and_mtime {
col.update_notetype(&mut nt, input.skip_checks)?;
} else {
col.add_or_update_notetype_with_existing_id(&mut nt, input.skip_checks)?;
}
Ok(anki_proto::notetypes::NotetypeId { ntid: nt.id.0 })
})
) -> error::Result<anki_proto::notetypes::NotetypeId> {
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
let mut nt: Notetype = legacy.into();
if !input.preserve_usn_and_mtime {
nt.set_modified(self.usn()?);
}
if nt.id.0 == 0 {
self.add_notetype(&mut nt, input.skip_checks)?;
} else if !input.preserve_usn_and_mtime {
self.update_notetype(&mut nt, input.skip_checks)?;
} else {
self.add_or_update_notetype_with_existing_id(&mut nt, input.skip_checks)?;
}
Ok(anki_proto::notetypes::NotetypeId { ntid: nt.id.0 })
}
fn get_stock_notetype_legacy(
&self,
&mut self,
input: anki_proto::notetypes::StockNotetype,
) -> Result<generic::Json> {
) -> error::Result<generic::Json> {
let nt = get_stock_notetype(input.kind(), &self.tr);
let schema11: NotetypeSchema11 = nt.into();
serde_json::to_vec(&schema11)
@ -96,144 +91,125 @@ impl NotetypesService for Backend {
}
fn get_notetype(
&self,
&mut self,
input: anki_proto::notetypes::NotetypeId,
) -> Result<anki_proto::notetypes::Notetype> {
) -> error::Result<anki_proto::notetypes::Notetype> {
let ntid = input.into();
self.with_col(|col| {
col.storage
.get_notetype(ntid)?
.or_not_found(ntid)
.map(Into::into)
})
}
fn get_notetype_legacy(
&self,
input: anki_proto::notetypes::NotetypeId,
) -> Result<generic::Json> {
let ntid = input.into();
self.with_col(|col| {
let schema11: NotetypeSchema11 =
col.storage.get_notetype(ntid)?.or_not_found(ntid)?.into();
Ok(serde_json::to_vec(&schema11)?).map(Into::into)
})
}
fn get_notetype_names(
&self,
_input: generic::Empty,
) -> Result<anki_proto::notetypes::NotetypeNames> {
self.with_col(|col| {
let entries: Vec<_> = col
.storage
.get_all_notetype_names()?
.into_iter()
.map(|(id, name)| anki_proto::notetypes::NotetypeNameId { id: id.0, name })
.collect();
Ok(anki_proto::notetypes::NotetypeNames { entries })
})
}
fn get_notetype_names_and_counts(
&self,
_input: generic::Empty,
) -> Result<anki_proto::notetypes::NotetypeUseCounts> {
self.with_col(|col| {
let entries: Vec<_> = col
.storage
.get_notetype_use_counts()?
.into_iter()
.map(
|(id, name, use_count)| anki_proto::notetypes::NotetypeNameIdUseCount {
id: id.0,
name,
use_count,
},
)
.collect();
Ok(anki_proto::notetypes::NotetypeUseCounts { entries })
})
}
fn get_notetype_id_by_name(
&self,
input: generic::String,
) -> Result<anki_proto::notetypes::NotetypeId> {
self.with_col(|col| {
col.storage
.get_notetype_id(&input.val)
.and_then(|nt| nt.or_not_found(input.val))
.map(|ntid| anki_proto::notetypes::NotetypeId { ntid: ntid.0 })
})
}
fn remove_notetype(
&self,
input: anki_proto::notetypes::NotetypeId,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.remove_notetype(input.into()))
self.storage
.get_notetype(ntid)?
.or_not_found(ntid)
.map(Into::into)
}
fn get_notetype_legacy(
&mut self,
input: anki_proto::notetypes::NotetypeId,
) -> error::Result<generic::Json> {
let ntid = input.into();
let schema11: NotetypeSchema11 =
self.storage.get_notetype(ntid)?.or_not_found(ntid)?.into();
Ok(serde_json::to_vec(&schema11)?).map(Into::into)
}
fn get_notetype_names(&mut self) -> error::Result<anki_proto::notetypes::NotetypeNames> {
let entries: Vec<_> = self
.storage
.get_all_notetype_names()?
.into_iter()
.map(|(id, name)| anki_proto::notetypes::NotetypeNameId { id: id.0, name })
.collect();
Ok(anki_proto::notetypes::NotetypeNames { entries })
}
fn get_notetype_names_and_counts(
&mut self,
) -> error::Result<anki_proto::notetypes::NotetypeUseCounts> {
let entries: Vec<_> = self
.storage
.get_notetype_use_counts()?
.into_iter()
.map(
|(id, name, use_count)| anki_proto::notetypes::NotetypeNameIdUseCount {
id: id.0,
name,
use_count,
},
)
.collect();
Ok(anki_proto::notetypes::NotetypeUseCounts { entries })
}
fn get_notetype_id_by_name(
&mut self,
input: generic::String,
) -> error::Result<anki_proto::notetypes::NotetypeId> {
self.storage
.get_notetype_id(&input.val)
.and_then(|nt| nt.or_not_found(input.val))
.map(|ntid| anki_proto::notetypes::NotetypeId { ntid: ntid.0 })
}
fn remove_notetype(
&mut self,
input: anki_proto::notetypes::NotetypeId,
) -> error::Result<anki_proto::collection::OpChanges> {
self.remove_notetype(input.into()).map(Into::into)
}
fn get_aux_notetype_config_key(
&self,
&mut self,
input: anki_proto::notetypes::GetAuxConfigKeyRequest,
) -> Result<generic::String> {
) -> error::Result<generic::String> {
Ok(get_aux_notetype_config_key(input.id.into(), &input.key).into())
}
fn get_aux_template_config_key(
&self,
&mut self,
input: anki_proto::notetypes::GetAuxTemplateConfigKeyRequest,
) -> Result<generic::String> {
self.with_col(|col| {
col.get_aux_template_config_key(
input.notetype_id.into(),
input.card_ordinal as usize,
&input.key,
)
.map(Into::into)
})
) -> error::Result<generic::String> {
self.get_aux_template_config_key(
input.notetype_id.into(),
input.card_ordinal as usize,
&input.key,
)
.map(Into::into)
}
fn get_change_notetype_info(
&self,
&mut self,
input: anki_proto::notetypes::GetChangeNotetypeInfoRequest,
) -> Result<anki_proto::notetypes::ChangeNotetypeInfo> {
self.with_col(|col| {
col.notetype_change_info(input.old_notetype_id.into(), input.new_notetype_id.into())
.map(Into::into)
})
) -> error::Result<anki_proto::notetypes::ChangeNotetypeInfo> {
self.notetype_change_info(input.old_notetype_id.into(), input.new_notetype_id.into())
.map(Into::into)
}
fn change_notetype(
&self,
&mut self,
input: anki_proto::notetypes::ChangeNotetypeRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.with_col(|col| col.change_notetype_of_notes(input.into()).map(Into::into))
) -> error::Result<anki_proto::collection::OpChanges> {
self.change_notetype_of_notes(input.into()).map(Into::into)
}
fn get_field_names(
&self,
&mut self,
input: anki_proto::notetypes::NotetypeId,
) -> Result<generic::StringList> {
self.with_col(|col| col.storage.get_field_names(input.into()))
.map(Into::into)
) -> error::Result<generic::StringList> {
self.storage.get_field_names(input.into()).map(Into::into)
}
fn restore_notetype_to_stock(
&self,
&mut self,
input: anki_proto::notetypes::RestoreNotetypeToStockRequest,
) -> Result<anki_proto::collection::OpChanges> {
) -> error::Result<anki_proto::collection::OpChanges> {
let force_kind = input.force_kind.and_then(StockKind::from_i32);
self.with_col(|col| {
col.restore_notetype_to_stock(
input.notetype_id.or_invalid("missing notetype id")?.into(),
force_kind,
)
.map(Into::into)
})
self.restore_notetype_to_stock(
input.notetype_id.or_invalid("missing notetype id")?.into(),
force_kind,
)
.map(Into::into)
}
}

View file

@ -14,6 +14,7 @@ mod learning;
pub mod new;
pub(crate) mod queue;
mod reviews;
mod service;
pub mod states;
pub mod timespan;
pub mod timing;

View file

@ -0,0 +1,250 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
mod answering;
mod states;
use anki_proto::generic;
use anki_proto::scheduler;
use anki_proto::scheduler::SchedulingStatesWithContext;
use anki_proto::scheduler::SetSchedulingStatesRequest;
use crate::prelude::*;
use crate::scheduler::new::NewCardDueOrder;
use crate::scheduler::states::CardState;
use crate::scheduler::states::SchedulingStates;
use crate::stats::studied_today;
impl crate::services::SchedulerService for Collection {
/// This behaves like _updateCutoff() in older code - it also unburies at
/// the start of a new day.
fn sched_timing_today(&mut self) -> Result<scheduler::SchedTimingTodayResponse> {
let timing = self.timing_today()?;
self.unbury_if_day_rolled_over(timing)?;
Ok(timing.into())
}
/// Fetch data from DB and return rendered string.
fn studied_today(&mut self) -> Result<generic::String> {
self.studied_today().map(Into::into)
}
/// Message rendering only, for old graphs.
fn studied_today_message(
&mut self,
input: scheduler::StudiedTodayMessageRequest,
) -> Result<generic::String> {
Ok(studied_today(input.cards, input.seconds as f32, &self.tr).into())
}
fn update_stats(&mut self, input: scheduler::UpdateStatsRequest) -> Result<()> {
self.transact_no_undo(|col| {
let today = col.current_due_day(0)?;
let usn = col.usn()?;
col.update_deck_stats(today, usn, input).map(Into::into)
})
}
fn extend_limits(&mut self, input: scheduler::ExtendLimitsRequest) -> Result<()> {
self.transact_no_undo(|col| {
let today = col.current_due_day(0)?;
let usn = col.usn()?;
col.extend_limits(
today,
usn,
input.deck_id.into(),
input.new_delta,
input.review_delta,
)
.map(Into::into)
})
}
fn counts_for_deck_today(
&mut self,
input: anki_proto::decks::DeckId,
) -> Result<scheduler::CountsForDeckTodayResponse> {
self.counts_for_deck_today(input.did.into())
}
fn congrats_info(&mut self) -> Result<scheduler::CongratsInfoResponse> {
self.congrats_info()
}
fn restore_buried_and_suspended_cards(
&mut self,
input: anki_proto::cards::CardIds,
) -> Result<anki_proto::collection::OpChanges> {
let cids: Vec<_> = input.cids.into_iter().map(CardId).collect();
self.unbury_or_unsuspend_cards(&cids).map(Into::into)
}
fn unbury_deck(
&mut self,
input: scheduler::UnburyDeckRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.unbury_deck(input.deck_id.into(), input.mode())
.map(Into::into)
}
fn bury_or_suspend_cards(
&mut self,
input: scheduler::BuryOrSuspendCardsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
let mode = input.mode();
let cids = if input.card_ids.is_empty() {
self.storage
.card_ids_of_notes(&input.note_ids.into_newtype(NoteId))?
} else {
input.card_ids.into_newtype(CardId)
};
self.bury_or_suspend_cards(&cids, mode).map(Into::into)
}
fn empty_filtered_deck(
&mut self,
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::collection::OpChanges> {
self.empty_filtered_deck(input.did.into()).map(Into::into)
}
fn rebuild_filtered_deck(
&mut self,
input: anki_proto::decks::DeckId,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.rebuild_filtered_deck(input.did.into()).map(Into::into)
}
fn schedule_cards_as_new(
&mut self,
input: scheduler::ScheduleCardsAsNewRequest,
) -> Result<anki_proto::collection::OpChanges> {
let cids = input.card_ids.into_newtype(CardId);
self.reschedule_cards_as_new(
&cids,
input.log,
input.restore_position,
input.reset_counts,
input
.context
.and_then(scheduler::schedule_cards_as_new_request::Context::from_i32),
)
.map(Into::into)
}
fn schedule_cards_as_new_defaults(
&mut self,
input: scheduler::ScheduleCardsAsNewDefaultsRequest,
) -> Result<scheduler::ScheduleCardsAsNewDefaultsResponse> {
Ok(Collection::reschedule_cards_as_new_defaults(
self,
input.context(),
))
}
fn set_due_date(
&mut self,
input: scheduler::SetDueDateRequest,
) -> Result<anki_proto::collection::OpChanges> {
let config = input.config_key.map(|v| v.key().into());
let days = input.days;
let cids = input.card_ids.into_newtype(CardId);
self.set_due_date(&cids, &days, config).map(Into::into)
}
fn sort_cards(
&mut self,
input: scheduler::SortCardsRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
let cids = input.card_ids.into_newtype(CardId);
let (start, step, random, shift) = (
input.starting_from,
input.step_size,
input.randomize,
input.shift_existing,
);
let order = if random {
NewCardDueOrder::Random
} else {
NewCardDueOrder::Preserve
};
self.sort_cards(&cids, start, step, order, shift)
.map(Into::into)
}
fn reposition_defaults(&mut self) -> Result<scheduler::RepositionDefaultsResponse> {
Ok(Collection::reposition_defaults(self))
}
fn sort_deck(
&mut self,
input: scheduler::SortDeckRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
self.sort_deck_legacy(input.deck_id.into(), input.randomize)
.map(Into::into)
}
fn get_scheduling_states(
&mut self,
input: anki_proto::cards::CardId,
) -> Result<scheduler::SchedulingStates> {
let cid: CardId = input.into();
self.get_scheduling_states(cid).map(Into::into)
}
fn describe_next_states(
&mut self,
input: scheduler::SchedulingStates,
) -> Result<generic::StringList> {
let states: SchedulingStates = input.into();
self.describe_next_states(states).map(Into::into)
}
fn state_is_leech(&mut self, input: scheduler::SchedulingState) -> Result<generic::Bool> {
let state: CardState = input.into();
Ok(state.leeched().into())
}
fn answer_card(
&mut self,
input: scheduler::CardAnswer,
) -> Result<anki_proto::collection::OpChanges> {
self.answer_card(&mut input.into()).map(Into::into)
}
fn upgrade_scheduler(&mut self) -> Result<()> {
self.transact_no_undo(|col| col.upgrade_to_v2_scheduler())
.map(Into::into)
}
fn get_queued_cards(
&mut self,
input: scheduler::GetQueuedCardsRequest,
) -> Result<scheduler::QueuedCards> {
self.get_queued_cards(input.fetch_limit as usize, input.intraday_learning_only)
.map(Into::into)
}
fn custom_study(
&mut self,
input: scheduler::CustomStudyRequest,
) -> Result<anki_proto::collection::OpChanges> {
self.custom_study(input).map(Into::into)
}
fn custom_study_defaults(
&mut self,
input: scheduler::CustomStudyDefaultsRequest,
) -> Result<scheduler::CustomStudyDefaultsResponse> {
self.custom_study_defaults(input.deck_id.into())
}
fn get_scheduling_states_with_context(&mut self) -> Result<SchedulingStatesWithContext> {
invalid_input!("the frontend should implement this")
}
fn set_scheduling_states(&mut self, _input: SetSchedulingStatesRequest) -> Result<()> {
invalid_input!("the frontend should implement this")
}
}

View file

@ -3,6 +3,7 @@
mod builder;
mod parser;
mod service;
mod sqlwriter;
pub(crate) mod writer;

View file

@ -8,24 +8,20 @@ use std::str::FromStr;
use std::sync::Arc;
use anki_proto::generic;
pub(super) use anki_proto::search::search_service::Service as SearchService;
use anki_proto::search::sort_order::Value as SortOrderProto;
use super::notes::to_note_ids;
use super::Backend;
use crate::backend::search::browser_table::string_list_to_browser_columns;
use crate::browser_table::Column;
use crate::notes::service::to_note_ids;
use crate::prelude::*;
use crate::search::replace_search_node;
use crate::search::service::browser_table::string_list_to_browser_columns;
use crate::search::JoinSearches;
use crate::search::Node;
use crate::search::SortMode;
impl SearchService for Backend {
type Error = AnkiError;
impl crate::services::SearchService for Collection {
fn build_search_string(
&self,
&mut self,
input: anki_proto::search::SearchNode,
) -> Result<generic::String> {
let node: Node = input.try_into()?;
@ -33,33 +29,29 @@ impl SearchService for Backend {
}
fn search_cards(
&self,
&mut self,
input: anki_proto::search::SearchRequest,
) -> Result<anki_proto::search::SearchResponse> {
self.with_col(|col| {
let order = input.order.unwrap_or_default().value.into();
let cids = col.search_cards(&input.search, order)?;
Ok(anki_proto::search::SearchResponse {
ids: cids.into_iter().map(|v| v.0).collect(),
})
let order = input.order.unwrap_or_default().value.into();
let cids = self.search_cards(&input.search, order)?;
Ok(anki_proto::search::SearchResponse {
ids: cids.into_iter().map(|v| v.0).collect(),
})
}
fn search_notes(
&self,
&mut self,
input: anki_proto::search::SearchRequest,
) -> Result<anki_proto::search::SearchResponse> {
self.with_col(|col| {
let order = input.order.unwrap_or_default().value.into();
let nids = col.search_notes(&input.search, order)?;
Ok(anki_proto::search::SearchResponse {
ids: nids.into_iter().map(|v| v.0).collect(),
})
let order = input.order.unwrap_or_default().value.into();
let nids = self.search_notes(&input.search, order)?;
Ok(anki_proto::search::SearchResponse {
ids: nids.into_iter().map(|v| v.0).collect(),
})
}
fn join_search_nodes(
&self,
&mut self,
input: anki_proto::search::JoinSearchNodesRequest,
) -> Result<generic::String> {
let existing_node: Node = input.existing_node.unwrap_or_default().try_into()?;
@ -82,7 +74,7 @@ impl SearchService for Backend {
}
fn replace_search_node(
&self,
&mut self,
input: anki_proto::search::ReplaceSearchNodeRequest,
) -> Result<generic::String> {
let existing = {
@ -98,7 +90,7 @@ impl SearchService for Backend {
}
fn find_and_replace(
&self,
&mut self,
input: anki_proto::search::FindAndReplaceRequest,
) -> Result<anki_proto::collection::OpChangesWithCount> {
let mut search = if input.regex {
@ -116,33 +108,28 @@ impl SearchService for Backend {
Some(input.field_name)
};
let repl = input.replacement;
self.with_col(|col| {
if nids.is_empty() {
nids = col.search_notes_unordered("")?
};
col.find_and_replace(nids, &search, &repl, field_name)
.map(Into::into)
})
if nids.is_empty() {
nids = self.search_notes_unordered("")?
};
self.find_and_replace(nids, &search, &repl, field_name)
.map(Into::into)
}
fn all_browser_columns(
&self,
_input: generic::Empty,
) -> Result<anki_proto::search::BrowserColumns> {
self.with_col(|col| Ok(col.all_browser_columns()))
fn all_browser_columns(&mut self) -> Result<anki_proto::search::BrowserColumns> {
Ok(Collection::all_browser_columns(self))
}
fn set_active_browser_columns(&self, input: generic::StringList) -> Result<generic::Empty> {
self.with_col(|col| {
col.state.active_browser_columns =
Some(Arc::new(string_list_to_browser_columns(input)));
Ok(())
})
.map(Into::into)
fn set_active_browser_columns(&mut self, input: generic::StringList) -> Result<()> {
self.state.active_browser_columns = Some(Arc::new(string_list_to_browser_columns(input)));
Ok(()).map(Into::into)
}
fn browser_row_for_id(&self, input: generic::Int64) -> Result<anki_proto::search::BrowserRow> {
self.with_col(|col| col.browser_row_for_id(input.val).map(Into::into))
fn browser_row_for_id(
&mut self,
input: generic::Int64,
) -> Result<anki_proto::search::BrowserRow> {
self.browser_row_for_id(input.val).map(Into::into)
}
}

6
rslib/src/services.rs Normal file
View file

@ -0,0 +1,6 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
// Includes the automatically-generated *Service and Backend*Service traits,
// and some impls on Backend and Collection.
include!(concat!(env!("OUT_DIR"), "/backend.rs"));

View file

@ -3,6 +3,7 @@
mod card;
mod graphs;
mod service;
mod today;
pub use today::studied_today;

View file

@ -1,42 +1,33 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub(super) use anki_proto::stats::stats_service::Service as StatsService;
use super::Backend;
use crate::prelude::*;
use crate::collection::Collection;
use crate::error;
use crate::revlog::RevlogReviewKind;
impl StatsService for Backend {
type Error = AnkiError;
impl crate::services::StatsService for Collection {
fn card_stats(
&self,
&mut self,
input: anki_proto::cards::CardId,
) -> Result<anki_proto::stats::CardStatsResponse> {
self.with_col(|col| col.card_stats(input.cid.into()))
) -> error::Result<anki_proto::stats::CardStatsResponse> {
self.card_stats(input.cid.into())
}
fn graphs(
&self,
&mut self,
input: anki_proto::stats::GraphsRequest,
) -> Result<anki_proto::stats::GraphsResponse> {
self.with_col(|col| col.graph_data_for_search(&input.search, input.days))
) -> error::Result<anki_proto::stats::GraphsResponse> {
self.graph_data_for_search(&input.search, input.days)
}
fn get_graph_preferences(
&self,
_input: anki_proto::generic::Empty,
) -> Result<anki_proto::stats::GraphPreferences> {
self.with_col(|col| Ok(col.get_graph_preferences()))
fn get_graph_preferences(&mut self) -> error::Result<anki_proto::stats::GraphPreferences> {
Ok(Collection::get_graph_preferences(self))
}
fn set_graph_preferences(
&self,
&mut self,
input: anki_proto::stats::GraphPreferences,
) -> Result<anki_proto::generic::Empty> {
self.with_col(|col| col.set_graph_preferences(input))
.map(Into::into)
) -> error::Result<()> {
self.set_graph_preferences(input).map(Into::into)
}
}

View file

@ -10,6 +10,7 @@ mod register;
mod remove;
mod rename;
mod reparent;
mod service;
mod tree;
pub(crate) mod undo;

107
rslib/src/tags/service.rs Normal file
View file

@ -0,0 +1,107 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::generic;
use crate::collection::Collection;
use crate::error;
use crate::notes::service::to_note_ids;
impl crate::services::TagsService for Collection {
fn clear_unused_tags(&mut self) -> error::Result<anki_proto::collection::OpChangesWithCount> {
self.clear_unused_tags().map(Into::into)
}
fn all_tags(&mut self) -> error::Result<generic::StringList> {
Ok(generic::StringList {
vals: self
.storage
.all_tags()?
.into_iter()
.map(|t| t.name)
.collect(),
})
}
fn remove_tags(
&mut self,
tags: generic::String,
) -> error::Result<anki_proto::collection::OpChangesWithCount> {
self.remove_tags(tags.val.as_str()).map(Into::into)
}
fn set_tag_collapsed(
&mut self,
input: anki_proto::tags::SetTagCollapsedRequest,
) -> error::Result<anki_proto::collection::OpChanges> {
self.set_tag_collapsed(&input.name, input.collapsed)
.map(Into::into)
}
fn tag_tree(&mut self) -> error::Result<anki_proto::tags::TagTreeNode> {
self.tag_tree()
}
fn reparent_tags(
&mut self,
input: anki_proto::tags::ReparentTagsRequest,
) -> error::Result<anki_proto::collection::OpChangesWithCount> {
let source_tags = input.tags;
let target_tag = if input.new_parent.is_empty() {
None
} else {
Some(input.new_parent)
};
self.reparent_tags(&source_tags, target_tag).map(Into::into)
}
fn rename_tags(
&mut self,
input: anki_proto::tags::RenameTagsRequest,
) -> error::Result<anki_proto::collection::OpChangesWithCount> {
self.rename_tag(&input.current_prefix, &input.new_prefix)
.map(Into::into)
}
fn add_note_tags(
&mut self,
input: anki_proto::tags::NoteIdsAndTagsRequest,
) -> error::Result<anki_proto::collection::OpChangesWithCount> {
self.add_tags_to_notes(&to_note_ids(input.note_ids), &input.tags)
.map(Into::into)
}
fn remove_note_tags(
&mut self,
input: anki_proto::tags::NoteIdsAndTagsRequest,
) -> error::Result<anki_proto::collection::OpChangesWithCount> {
self.remove_tags_from_notes(&to_note_ids(input.note_ids), &input.tags)
.map(Into::into)
}
fn find_and_replace_tag(
&mut self,
input: anki_proto::tags::FindAndReplaceTagRequest,
) -> error::Result<anki_proto::collection::OpChangesWithCount> {
let note_ids = if input.note_ids.is_empty() {
self.search_notes_unordered("")?
} else {
to_note_ids(input.note_ids)
};
self.find_and_replace_tag(
&note_ids,
&input.search,
&input.replacement,
input.regex,
input.match_case,
)
.map(Into::into)
}
fn complete_tag(
&mut self,
input: anki_proto::tags::CompleteTagRequest,
) -> error::Result<anki_proto::tags::CompleteTagResponse> {
let tags = Collection::complete_tag(self, &input.input, input.match_limit as usize)?;
Ok(anki_proto::tags::CompleteTagResponse { tags })
}
}