From 553303fc125df51ab12089a61c16f56d52fec30a Mon Sep 17 00:00:00 2001 From: Damien Elmes Date: Mon, 19 Jun 2023 15:33:40 +1000 Subject: [PATCH] Refactor service generation (#2552) * Automatically elide empty inputs and outputs to backend methods * Refactor service generation Despite the fact that the majority of our Protobuf service methods require an open collection, they were not accessible with just a Collection object. To access the methods (e.g. because we haven't gotten around to exposing the correct API in Collection yet), you had to wrap the collection in a Backend object, and pay a mutex-acquisition cost for each call, even if you have exclusive access to the object. This commit migrates the majority of service methods to the Collection, so they can now be used directly, and improves the ergonomics a bit at the same time. The approach taken: - The service generation now happens in rslib instead of anki_proto, which avoids the need for trait constraints and associated types. - Service methods are assumed to be collection-based by default. Instead of implementing the service on Backend, we now implement it on Collection, which means our methods no longer need to use self.with_col(...). - We automatically generate methods in Backend which use self.with_col() to delegate to the Collection method. - For methods that are only appropriate for the backend, we add a flag in the .proto file. The codegen uses this flag to write the method into a BackendFooService instead of FooService, which the backend implements. - The flag can also allows us to define separate implementations for collection and backend, so we can e.g. skip the collection mutex in the i18n service while also providing the service on a collection. --- Cargo.lock | 55 +-- cargo/licenses.json | 15 +- proto/anki/ankidroid.proto | 23 +- proto/anki/card_rendering.proto | 6 +- proto/anki/codegen.proto | 34 ++ proto/anki/collection.proto | 17 +- proto/anki/i18n.proto | 13 +- proto/anki/import_export.proto | 9 +- proto/anki/media.proto | 2 +- proto/anki/sync.proto | 34 +- pylib/rsbridge/lib.rs | 2 +- rslib/Cargo.toml | 12 +- rslib/build.rs | 19 +- rslib/proto/build.rs | 4 +- rslib/proto/rust.rs | 204 ----------- rslib/proto/rust_protos.rs | 95 +++++ rslib/proto/src/generic_helpers.rs | 6 - rslib/proto/src/lib.rs | 18 +- rslib/rust_interface.rs | 319 +++++++++++++++++ rslib/src/{backend => }/ankidroid/db.rs | 4 +- rslib/src/{backend => }/ankidroid/error.rs | 2 +- rslib/src/ankidroid/mod.rs | 5 + rslib/src/ankidroid/service.rs | 70 ++++ rslib/src/backend/ankidroid.rs | 61 ++++ rslib/src/backend/ankidroid/mod.rs | 131 ------- rslib/src/backend/card_rendering.rs | 16 + rslib/src/backend/collection.rs | 116 +++--- rslib/src/backend/config.rs | 112 +++--- rslib/src/backend/dbproxy.rs | 12 +- rslib/src/backend/decks.rs | 334 ------------------ rslib/src/backend/error.rs | 1 + rslib/src/backend/i18n.rs | 76 +--- rslib/src/backend/image_occlusion.rs | 67 ---- rslib/src/backend/import_export.rs | 127 +------ rslib/src/backend/links.rs | 20 -- rslib/src/backend/media.rs | 64 ---- rslib/src/backend/mod.rs | 86 +---- rslib/src/backend/notes.rs | 201 ----------- rslib/src/backend/scheduler/mod.rs | 284 --------------- rslib/src/backend/{sync/mod.rs => sync.rs} | 24 +- rslib/src/backend/tags.rs | 127 ------- rslib/src/card/mod.rs | 1 + .../src/{backend/card.rs => card/service.rs} | 103 +++--- rslib/src/card_rendering/mod.rs | 1 + .../service.rs} | 125 ++++--- rslib/src/deckconfig/mod.rs | 1 + .../deckconfig.rs => deckconfig/service.rs} | 87 +++-- rslib/src/decks/mod.rs | 1 + rslib/src/decks/service.rs | 317 +++++++++++++++++ rslib/src/error/mod.rs | 13 +- rslib/src/i18n/mod.rs | 3 + rslib/src/i18n/service.rs | 91 +++++ rslib/src/image_occlusion/mod.rs | 1 + rslib/src/image_occlusion/service.rs | 60 ++++ rslib/src/import_export/mod.rs | 1 + rslib/src/import_export/service.rs | 114 ++++++ rslib/src/lib.rs | 3 + rslib/src/links.rs | 12 + rslib/src/media/mod.rs | 1 + rslib/src/media/service.rs | 50 +++ rslib/src/notes/mod.rs | 1 + rslib/src/notes/service.rs | 188 ++++++++++ rslib/src/notetype/mod.rs | 1 + .../notetypes.rs => notetype/service.rs} | 294 +++++++-------- rslib/src/scheduler/mod.rs | 1 + .../service}/answering.rs | 0 rslib/src/scheduler/service/mod.rs | 250 +++++++++++++ .../service}/states/filtered.rs | 0 .../service}/states/learning.rs | 0 .../service}/states/mod.rs | 0 .../service}/states/new.rs | 0 .../service}/states/normal.rs | 0 .../service}/states/preview.rs | 0 .../service}/states/relearning.rs | 0 .../service}/states/rescheduling.rs | 0 .../service}/states/review.rs | 0 rslib/src/search/mod.rs | 1 + .../service}/browser_table.rs | 0 .../{backend/search => search/service}/mod.rs | 79 ++--- .../search => search/service}/search_node.rs | 0 rslib/src/services.rs | 6 + rslib/src/stats/mod.rs | 1 + .../{backend/stats.rs => stats/service.rs} | 37 +- rslib/src/tags/mod.rs | 1 + rslib/src/tags/service.rs | 107 ++++++ 85 files changed, 2444 insertions(+), 2335 deletions(-) create mode 100644 proto/anki/codegen.proto delete mode 100644 rslib/proto/rust.rs create mode 100644 rslib/proto/rust_protos.rs create mode 100644 rslib/rust_interface.rs rename rslib/src/{backend => }/ankidroid/db.rs (99%) rename rslib/src/{backend => }/ankidroid/error.rs (98%) create mode 100644 rslib/src/ankidroid/mod.rs create mode 100644 rslib/src/ankidroid/service.rs create mode 100644 rslib/src/backend/ankidroid.rs delete mode 100644 rslib/src/backend/ankidroid/mod.rs create mode 100644 rslib/src/backend/card_rendering.rs delete mode 100644 rslib/src/backend/decks.rs delete mode 100644 rslib/src/backend/image_occlusion.rs delete mode 100644 rslib/src/backend/links.rs delete mode 100644 rslib/src/backend/media.rs delete mode 100644 rslib/src/backend/notes.rs delete mode 100644 rslib/src/backend/scheduler/mod.rs rename rslib/src/backend/{sync/mod.rs => sync.rs} (96%) delete mode 100644 rslib/src/backend/tags.rs rename rslib/src/{backend/card.rs => card/service.rs} (58%) rename rslib/src/{backend/cardrendering.rs => card_rendering/service.rs} (71%) rename rslib/src/{backend/deckconfig.rs => deckconfig/service.rs} (58%) create mode 100644 rslib/src/decks/service.rs create mode 100644 rslib/src/i18n/mod.rs create mode 100644 rslib/src/i18n/service.rs create mode 100644 rslib/src/image_occlusion/service.rs create mode 100644 rslib/src/import_export/service.rs create mode 100644 rslib/src/media/service.rs create mode 100644 rslib/src/notes/service.rs rename rslib/src/{backend/notetypes.rs => notetype/service.rs} (53%) rename rslib/src/{backend/scheduler => scheduler/service}/answering.rs (100%) create mode 100644 rslib/src/scheduler/service/mod.rs rename rslib/src/{backend/scheduler => scheduler/service}/states/filtered.rs (100%) rename rslib/src/{backend/scheduler => scheduler/service}/states/learning.rs (100%) rename rslib/src/{backend/scheduler => scheduler/service}/states/mod.rs (100%) rename rslib/src/{backend/scheduler => scheduler/service}/states/new.rs (100%) rename rslib/src/{backend/scheduler => scheduler/service}/states/normal.rs (100%) rename rslib/src/{backend/scheduler => scheduler/service}/states/preview.rs (100%) rename rslib/src/{backend/scheduler => scheduler/service}/states/relearning.rs (100%) rename rslib/src/{backend/scheduler => scheduler/service}/states/rescheduling.rs (100%) rename rslib/src/{backend/scheduler => scheduler/service}/states/review.rs (100%) rename rslib/src/{backend/search => search/service}/browser_table.rs (100%) rename rslib/src/{backend/search => search/service}/mod.rs (65%) rename rslib/src/{backend/search => search/service}/search_node.rs (100%) create mode 100644 rslib/src/services.rs rename rslib/src/{backend/stats.rs => stats/service.rs} (55%) create mode 100644 rslib/src/tags/service.rs diff --git a/Cargo.lock b/Cargo.lock index bc84daabd..0a19ba876 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -123,9 +123,10 @@ dependencies = [ "num_enum", "once_cell", "percent-encoding-iri", + "phf 0.11.1", "pin-project", + "prettyplease 0.2.7", "prost", - "prost-build", "prost-reflect", "prost-types", "pulldown-cmark 0.9.2", @@ -142,6 +143,7 @@ dependencies = [ "sha1", "snafu", "strum", + "syn 2.0.18", "tempfile", "tokio", "tokio-util", @@ -153,7 +155,6 @@ dependencies = [ "unicase", "unicode-normalization", "utime", - "which", "windows", "wiremock", "workspace-hack", @@ -378,7 +379,7 @@ checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" dependencies = [ "proc-macro2", "quote", - "syn 2.0.12", + "syn 2.0.18", ] [[package]] @@ -469,7 +470,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.12", + "syn 2.0.18", ] [[package]] @@ -762,7 +763,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.12", + "syn 2.0.18", ] [[package]] @@ -1035,7 +1036,7 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn 2.0.12", + "syn 2.0.18", ] [[package]] @@ -1052,7 +1053,7 @@ checksum = "2345488264226bf682893e25de0769f3360aac9957980ec49361b083ddaa5bc5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.12", + "syn 2.0.18", ] [[package]] @@ -1455,7 +1456,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.12", + "syn 2.0.18", ] [[package]] @@ -2541,7 +2542,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.12", + "syn 2.0.18", ] [[package]] @@ -2767,7 +2768,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.12", + "syn 2.0.18", ] [[package]] @@ -2973,6 +2974,16 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "prettyplease" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43ded2b5b204571f065ab8540367d738dfe1b3606ab9eb669dcfb5e7a3a07501" +dependencies = [ + "proc-macro2", + "syn 2.0.18", +] + [[package]] name = "proc-macro-crate" version = "1.3.1" @@ -2991,9 +3002,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.54" +version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e472a104799c74b514a57226160104aa483546de37e839ec50e3c2e41dd87534" +checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406" dependencies = [ "unicode-ident", ] @@ -3021,7 +3032,7 @@ dependencies = [ "log", "multimap", "petgraph", - "prettyplease", + "prettyplease 0.1.25", "prost", "prost-types", "regex", @@ -3158,9 +3169,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.26" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" +checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" dependencies = [ "proc-macro2", ] @@ -3615,7 +3626,7 @@ checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.12", + "syn 2.0.18", ] [[package]] @@ -3657,7 +3668,7 @@ checksum = "bcec881020c684085e55a25f7fd888954d56609ef363479dc5a1305eb0d40cab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.12", + "syn 2.0.18", ] [[package]] @@ -3900,9 +3911,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.12" +version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79d9531f94112cfc3e4c8f5f02cb2b58f72c97b7efd85f70203cc6d8efda5927" +checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e" dependencies = [ "proc-macro2", "quote", @@ -3999,7 +4010,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.12", + "syn 2.0.18", ] [[package]] @@ -4100,7 +4111,7 @@ checksum = "61a573bdc87985e9d6ddeed1b3d864e8a302c847e40d647746df2f1de209d1ce" dependencies = [ "proc-macro2", "quote", - "syn 2.0.12", + "syn 2.0.18", ] [[package]] @@ -5072,7 +5083,7 @@ dependencies = [ "snafu", "snafu-derive", "syn 1.0.109", - "syn 2.0.12", + "syn 2.0.18", "time", "tokio", "tokio-util", diff --git a/cargo/licenses.json b/cargo/licenses.json index c70571861..dcd03d843 100644 --- a/cargo/licenses.json +++ b/cargo/licenses.json @@ -1961,6 +1961,15 @@ "license_file": null, "description": "A minimal `syn` syntax tree pretty-printer" }, + { + "name": "prettyplease", + "version": "0.2.7", + "authors": "David Tolnay ", + "repository": "https://github.com/dtolnay/prettyplease", + "license": "Apache-2.0 OR MIT", + "license_file": null, + "description": "A minimal `syn` syntax tree pretty-printer" + }, { "name": "proc-macro-crate", "version": "1.3.1", @@ -1981,7 +1990,7 @@ }, { "name": "proc-macro2", - "version": "1.0.54", + "version": "1.0.60", "authors": "David Tolnay |Alex Crichton ", "repository": "https://github.com/dtolnay/proc-macro2", "license": "Apache-2.0 OR MIT", @@ -2044,7 +2053,7 @@ }, { "name": "quote", - "version": "1.0.26", + "version": "1.0.28", "authors": "David Tolnay ", "repository": "https://github.com/dtolnay/quote", "license": "Apache-2.0 OR MIT", @@ -2602,7 +2611,7 @@ }, { "name": "syn", - "version": "2.0.12", + "version": "2.0.18", "authors": "David Tolnay ", "repository": "https://github.com/dtolnay/syn", "license": "Apache-2.0 OR MIT", diff --git a/proto/anki/ankidroid.proto b/proto/anki/ankidroid.proto index 0544eb5e7..38c2b354b 100644 --- a/proto/anki/ankidroid.proto +++ b/proto/anki/ankidroid.proto @@ -4,25 +4,34 @@ option java_multiple_files = true; import "anki/generic.proto"; import "anki/scheduler.proto"; +import "anki/codegen.proto"; package anki.ankidroid; service AnkidroidService { rpc SchedTimingTodayLegacy(SchedTimingTodayLegacyRequest) - returns (scheduler.SchedTimingTodayResponse); - rpc LocalMinutesWestLegacy(generic.Int64) returns (generic.Int32); + returns (scheduler.SchedTimingTodayResponse) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } + rpc LocalMinutesWestLegacy(generic.Int64) returns (generic.Int32) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } rpc RunDbCommand(generic.Json) returns (generic.Json); - rpc RunDbCommandProto(generic.Json) returns (DBResponse); + rpc RunDbCommandProto(generic.Json) returns (DbResponse); rpc InsertForId(generic.Json) returns (generic.Int64); rpc RunDbCommandForRowCount(generic.Json) returns (generic.Int64); rpc FlushAllQueries(generic.Empty) returns (generic.Empty); rpc FlushQuery(generic.Int32) returns (generic.Empty); - rpc GetNextResultPage(GetNextResultPageRequest) returns (DBResponse); - rpc SetPageSize(generic.Int64) returns (generic.Empty); + rpc GetNextResultPage(GetNextResultPageRequest) returns (DbResponse); + rpc SetPageSize(generic.Int64) returns (generic.Empty) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } rpc GetColumnNamesFromQuery(generic.String) returns (generic.StringList); rpc GetActiveSequenceNumbers(generic.Empty) returns (GetActiveSequenceNumbersResponse); - rpc DebugProduceError(generic.String) returns (generic.Empty); + rpc DebugProduceError(generic.String) returns (generic.Empty) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } } message DebugActiveDatabaseSequenceNumbersResponse { @@ -57,7 +66,7 @@ message DbResult { repeated Row rows = 1; } -message DBResponse { +message DbResponse { DbResult result = 1; int32 sequenceNumber = 2; int32 rowCount = 3; diff --git a/proto/anki/card_rendering.proto b/proto/anki/card_rendering.proto index b0ecd71ca..ae5886c46 100644 --- a/proto/anki/card_rendering.proto +++ b/proto/anki/card_rendering.proto @@ -10,6 +10,7 @@ package anki.card_rendering; import "anki/generic.proto"; import "anki/notes.proto"; import "anki/notetypes.proto"; +import "anki/codegen.proto"; service CardRenderingService { rpc ExtractAvTags(ExtractAvTagsRequest) returns (ExtractAvTagsResponse); @@ -25,7 +26,10 @@ service CardRenderingService { rpc RenderMarkdown(RenderMarkdownRequest) returns (generic.String); rpc EncodeIriPaths(generic.String) returns (generic.String); rpc DecodeIriPaths(generic.String) returns (generic.String); - rpc StripHtml(StripHtmlRequest) returns (generic.String); + rpc StripHtml(StripHtmlRequest) returns (generic.String) { + // a bunch of our unit tests access this without a collection + option (codegen.rust_methods) = RUST_METHODS_COLLECTION_AND_MANUAL_BACKEND; + } rpc CompareAnswer(CompareAnswerRequest) returns (generic.String); rpc ExtractClozeForTyping(ExtractClozeForTypingRequest) returns (generic.String); diff --git a/proto/anki/codegen.proto b/proto/anki/codegen.proto new file mode 100644 index 000000000..a25591d3c --- /dev/null +++ b/proto/anki/codegen.proto @@ -0,0 +1,34 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +syntax = "proto3"; + +package anki.codegen; + +import "google/protobuf/descriptor.proto"; + +extend google.protobuf.MethodOptions { + RustMethods rust_methods = 50000; +} + +message MethodOptions { + RustMethods rust_methods = 50000; +} + +enum RustMethods { + /// Used for typical collection-based operations. We must implement the + // method on Collection. The same method is automatically implemented on + // Backend, which forwards to Collection. + RUST_METHODS_COLLECTION_AND_AUTO_BACKEND = 0; + /// Method only makes sense on the backend (eg one that closes and reopens + /// the collection). Backend method needs to be implemented. + RUST_METHODS_BACKEND_ONLY = 1; + /// Both the backend and collection need to implement the method; there + /// is no auto-delegation. Can be used to provide a method on both, but + /// skip the Collection mutex lock when a backend handle is available. + /// In practice we only do this for the i18n methods; for the occasional + /// method in other services that doesn't happen to need the collection, + /// we just delegate to the collection method for convenience, and to make + /// sure it's available even if the consumer is not using Backend. + RUST_METHODS_COLLECTION_AND_MANUAL_BACKEND = 2; +} diff --git a/proto/anki/collection.proto b/proto/anki/collection.proto index ff53f881c..475be93af 100644 --- a/proto/anki/collection.proto +++ b/proto/anki/collection.proto @@ -8,10 +8,15 @@ option java_multiple_files = true; package anki.collection; import "anki/generic.proto"; +import "anki/codegen.proto"; service CollectionService { - rpc OpenCollection(OpenCollectionRequest) returns (generic.Empty); - rpc CloseCollection(CloseCollectionRequest) returns (generic.Empty); + rpc OpenCollection(OpenCollectionRequest) returns (generic.Empty) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } + rpc CloseCollection(CloseCollectionRequest) returns (generic.Empty) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } rpc CheckDatabase(generic.Empty) returns (CheckDatabaseResponse); rpc GetUndoStatus(generic.Empty) returns (UndoStatus); rpc Undo(generic.Empty) returns (OpChangesAfterUndo); @@ -24,10 +29,14 @@ service CollectionService { // transaction. Unlike a collection export, does not require reopening the DB, // as there is no downgrade step. // Returns false if it's not time to make a backup yet. - rpc CreateBackup(CreateBackupRequest) returns (generic.Bool); + rpc CreateBackup(CreateBackupRequest) returns (generic.Bool) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } // If a backup is running, wait for it to complete. Will return an error // if the backup encountered an error. - rpc AwaitBackupCompletion(generic.Empty) returns (generic.Empty); + rpc AwaitBackupCompletion(generic.Empty) returns (generic.Empty) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } } message OpenCollectionRequest { diff --git a/proto/anki/i18n.proto b/proto/anki/i18n.proto index 1d5ca05ce..3330b190d 100644 --- a/proto/anki/i18n.proto +++ b/proto/anki/i18n.proto @@ -8,11 +8,18 @@ option java_multiple_files = true; package anki.i18n; import "anki/generic.proto"; +import "anki/codegen.proto"; service I18nService { - rpc TranslateString(TranslateStringRequest) returns (generic.String); - rpc FormatTimespan(FormatTimespanRequest) returns (generic.String); - rpc I18nResources(I18nResourcesRequest) returns (generic.Json); + rpc TranslateString(TranslateStringRequest) returns (generic.String) { + option (codegen.rust_methods) = RUST_METHODS_COLLECTION_AND_MANUAL_BACKEND; + } + rpc FormatTimespan(FormatTimespanRequest) returns (generic.String) { + option (codegen.rust_methods) = RUST_METHODS_COLLECTION_AND_MANUAL_BACKEND; + } + rpc I18nResources(I18nResourcesRequest) returns (generic.Json) { + option (codegen.rust_methods) = RUST_METHODS_COLLECTION_AND_MANUAL_BACKEND; + } } message TranslateStringRequest { diff --git a/proto/anki/import_export.proto b/proto/anki/import_export.proto index e418c5fe2..4d1d1d1a6 100644 --- a/proto/anki/import_export.proto +++ b/proto/anki/import_export.proto @@ -11,12 +11,17 @@ import "anki/cards.proto"; import "anki/collection.proto"; import "anki/notes.proto"; import "anki/generic.proto"; +import "anki/codegen.proto"; service ImportExportService { rpc ImportCollectionPackage(ImportCollectionPackageRequest) - returns (generic.Empty); + returns (generic.Empty) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } rpc ExportCollectionPackage(ExportCollectionPackageRequest) - returns (generic.Empty); + returns (generic.Empty) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } rpc ImportAnkiPackage(ImportAnkiPackageRequest) returns (ImportResponse); rpc ExportAnkiPackage(ExportAnkiPackageRequest) returns (generic.UInt32); rpc GetCsvMetadata(CsvMetadataRequest) returns (CsvMetadata); diff --git a/proto/anki/media.proto b/proto/anki/media.proto index 8affeba64..47b0ffcea 100644 --- a/proto/anki/media.proto +++ b/proto/anki/media.proto @@ -11,8 +11,8 @@ import "anki/generic.proto"; service MediaService { rpc CheckMedia(generic.Empty) returns (CheckMediaResponse); - rpc TrashMediaFiles(TrashMediaFilesRequest) returns (generic.Empty); rpc AddMediaFile(AddMediaFileRequest) returns (generic.String); + rpc TrashMediaFiles(TrashMediaFilesRequest) returns (generic.Empty); rpc EmptyTrash(generic.Empty) returns (generic.Empty); rpc RestoreTrash(generic.Empty) returns (generic.Empty); } diff --git a/proto/anki/sync.proto b/proto/anki/sync.proto index b0c0404e7..546496480 100644 --- a/proto/anki/sync.proto +++ b/proto/anki/sync.proto @@ -8,17 +8,33 @@ option java_multiple_files = true; package anki.sync; import "anki/generic.proto"; +import "anki/codegen.proto"; service SyncService { - rpc SyncMedia(SyncAuth) returns (generic.Empty); - rpc AbortMediaSync(generic.Empty) returns (generic.Empty); - - rpc SyncLogin(SyncLoginRequest) returns (SyncAuth); - rpc SyncStatus(SyncAuth) returns (SyncStatusResponse); - rpc SyncCollection(SyncAuth) returns (SyncCollectionResponse); - rpc FullUpload(SyncAuth) returns (generic.Empty); - rpc FullDownload(SyncAuth) returns (generic.Empty); - rpc AbortSync(generic.Empty) returns (generic.Empty); + rpc SyncMedia(SyncAuth) returns (generic.Empty) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } + rpc AbortMediaSync(generic.Empty) returns (generic.Empty) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } + rpc SyncLogin(SyncLoginRequest) returns (SyncAuth) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } + rpc SyncStatus(SyncAuth) returns (SyncStatusResponse) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } + rpc SyncCollection(SyncAuth) returns (SyncCollectionResponse) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } + rpc FullUpload(SyncAuth) returns (generic.Empty) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } + rpc FullDownload(SyncAuth) returns (generic.Empty) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } + rpc AbortSync(generic.Empty) returns (generic.Empty) { + option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY; + } } message SyncAuth { diff --git a/pylib/rsbridge/lib.rs b/pylib/rsbridge/lib.rs index 5655c330e..6f6ceffce 100644 --- a/pylib/rsbridge/lib.rs +++ b/pylib/rsbridge/lib.rs @@ -53,7 +53,7 @@ impl Backend { input: &PyBytes, ) -> PyResult { let in_bytes = input.as_bytes(); - py.allow_threads(|| self.backend.run_method(service, method, in_bytes)) + py.allow_threads(|| self.backend.run_service_method(service, method, in_bytes)) .map(|out_bytes| { let out_obj = PyBytes::new(py, &out_bytes); out_obj.into() diff --git a/rslib/Cargo.toml b/rslib/Cargo.toml index 11be3592c..31f70cd23 100644 --- a/rslib/Cargo.toml +++ b/rslib/Cargo.toml @@ -19,17 +19,16 @@ name = "benchmark" harness = false required-features = ["bench"] -# After updating anything below, run ../cargo/update_licenses.sh - [build-dependencies] +anki_io = { version = "0.0.0", path = "io" } +anki_proto = { version = "0.0.0", path = "proto" } anyhow = "1.0.71" inflections = "1.1.1" +prettyplease = "0.2.7" prost = "0.11.8" -prost-build = "0.11.8" prost-reflect = "0.11.4" prost-types = "0.11.9" -regex = "1.7.3" -which = "4.4.0" +syn = { version = "2.0.18", features = ["parsing", "printing"] } [dev-dependencies] async-stream = "0.3.4" @@ -45,6 +44,7 @@ features = ["json", "socks", "stream", "multipart"] anki_i18n = { path = "i18n" } anki_io = { path = "io" } anki_proto = { path = "proto" } +workspace-hack = { version = "0.1", path = "../tools/workspace-hack" } csv = { git = "https://github.com/ankitects/rust-csv.git", rev = "1c9d3aab6f79a7d815c69f925a46a4590c115f90" } percent-encoding-iri = { git = "https://github.com/ankitects/rust-url.git", rev = "bb930b8d089f4d30d7d19c12e54e66191de47b88" } @@ -80,6 +80,7 @@ nom = "7.1.3" num_cpus = "1.15.0" num_enum = "0.6.1" once_cell = "1.17.1" +phf = "0.11.1" pin-project = "1.0.12" prost = "0.11.8" pulldown-cmark = "0.9.2" @@ -105,7 +106,6 @@ tracing-subscriber = { version = "0.3.16", features = ["fmt", "env-filter"] } unic-ucd-category = "0.9.0" unicode-normalization = "0.1.22" utime = "0.3.1" -workspace-hack = { version = "0.1", path = "../tools/workspace-hack" } zip = { version = "0.6.4", default-features = false, features = ["deflate", "time"] } zstd = { version = "0.12.3", features = ["zstdmt"] } diff --git a/rslib/build.rs b/rslib/build.rs index 508c85277..8a7d2362b 100644 --- a/rslib/build.rs +++ b/rslib/build.rs @@ -1,10 +1,23 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use std::fs; +mod rust_interface; -fn main() { +use std::env; +use std::fs; +use std::path::PathBuf; + +use anyhow::Result; +use prost_reflect::DescriptorPool; + +fn main() -> Result<()> { println!("cargo:rerun-if-changed=../out/buildhash"); let buildhash = fs::read_to_string("../out/buildhash").unwrap_or_default(); - println!("cargo:rustc-env=BUILDHASH={buildhash}") + println!("cargo:rustc-env=BUILDHASH={buildhash}"); + + let descriptors_path = env::var("DESCRIPTORS_BIN").ok().map(PathBuf::from).unwrap(); + println!("cargo:rerun-if-changed={}", descriptors_path.display()); + let pool = DescriptorPool::decode(std::fs::read(descriptors_path)?.as_ref())?; + rust_interface::write_rust_interface(&pool)?; + Ok(()) } diff --git a/rslib/proto/build.rs b/rslib/proto/build.rs index 5d88a1b15..cd339a03b 100644 --- a/rslib/proto/build.rs +++ b/rslib/proto/build.rs @@ -2,7 +2,7 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html pub mod python; -pub mod rust; +pub mod rust_protos; pub mod ts; pub mod utils; @@ -14,7 +14,7 @@ use anyhow::Result; fn main() -> Result<()> { let descriptors_path = env::var("DESCRIPTORS_BIN").ok().map(PathBuf::from); - let pool = rust::write_backend_proto_rs(descriptors_path)?; + let pool = rust_protos::write_rust_protos(descriptors_path)?; python::write_python_interface(&pool)?; ts::write_ts_interface(&pool)?; diff --git a/rslib/proto/rust.rs b/rslib/proto/rust.rs deleted file mode 100644 index 8c36172a0..000000000 --- a/rslib/proto/rust.rs +++ /dev/null @@ -1,204 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use std::env; -use std::fmt::Write; -use std::path::Path; -use std::path::PathBuf; - -use anki_io::create_dir_all; -use anki_io::read_file; -use anki_io::write_file_if_changed; -use anyhow::Context; -use anyhow::Result; -use itertools::Itertools; -use prost_build::ServiceGenerator; -use prost_reflect::DescriptorPool; - -pub fn write_backend_proto_rs(descriptors_path: Option) -> Result { - set_protoc_path(); - let proto_dir = PathBuf::from("../../proto"); - let paths = gather_proto_paths(&proto_dir)?; - let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); - let tmp_descriptors = out_dir.join("descriptors.tmp"); - prost_build::Config::new() - .out_dir(&out_dir) - .file_descriptor_set_path(&tmp_descriptors) - .service_generator(RustCodeGenerator::boxed()) - .type_attribute( - "Deck.Filtered.SearchTerm.Order", - "#[derive(strum::EnumIter)]", - ) - .type_attribute( - "Deck.Normal.DayLimit", - "#[derive(Copy, Eq, serde::Deserialize, serde::Serialize)]", - ) - .type_attribute("HelpPageLinkRequest.HelpPage", "#[derive(strum::EnumIter)]") - .type_attribute("CsvMetadata.Delimiter", "#[derive(strum::EnumIter)]") - .type_attribute( - "Preferences.BackupLimits", - "#[derive(Copy, serde::Deserialize, serde::Serialize)]", - ) - .type_attribute( - "CsvMetadata.DupeResolution", - "#[derive(serde::Deserialize, serde::Serialize)]", - ) - .type_attribute( - "CsvMetadata.MatchScope", - "#[derive(serde::Deserialize, serde::Serialize)]", - ) - .compile_protos(paths.as_slice(), &[proto_dir]) - .context("prost build")?; - - let descriptors = read_file(&tmp_descriptors)?; - if let Some(descriptors_path) = descriptors_path { - create_dir_all( - descriptors_path - .parent() - .context("missing parent of descriptor")?, - )?; - write_file_if_changed(descriptors_path, &descriptors)?; - } - write_service_index(&out_dir, descriptors) -} - -fn write_service_index(out_dir: &Path, descriptors: Vec) -> Result { - let pool = - DescriptorPool::decode(descriptors.as_ref()).context("unable to decode descriptors")?; - let mut buf = String::new(); - - writeln!( - buf, - "#[derive(num_enum::TryFromPrimitive)] -#[repr(u32)] -pub enum ServiceIndex {{" - ) - .unwrap(); - for service in pool.services() { - writeln!( - buf, - " {} = {},", - service.name().replace("Service", ""), - service.index() - ) - .unwrap(); - } - writeln!(buf, "}}").unwrap(); - - write_file_if_changed(out_dir.join("service_index.rs"), buf)?; - - Ok(pool) -} - -fn gather_proto_paths(proto_dir: &Path) -> Result> { - let subfolders = &["anki"]; - let mut paths = vec![]; - for subfolder in subfolders { - for entry in proto_dir.join(subfolder).read_dir().unwrap() { - let entry = entry.unwrap(); - let path = entry.path(); - if path - .file_name() - .unwrap() - .to_str() - .unwrap() - .ends_with(".proto") - { - println!("cargo:rerun-if-changed={}", path.to_str().unwrap()); - paths.push(path); - } - } - } - paths.sort(); - Ok(paths) -} - -struct RustCodeGenerator {} - -impl RustCodeGenerator { - fn boxed() -> Box { - Box::new(Self {}) - } - - fn write_method_trait(&mut self, buf: &mut String, service: &prost_build::Service) { - buf.push_str( - r#" -pub trait Service { - type Error: From; - - fn run_method(&self, method: u32, input: &[u8]) -> Result, Self::Error> { - match method { -"#, - ); - for (idx, method) in service.methods.iter().enumerate() { - write!( - buf, - concat!(" ", - "{idx} => {{ let input = super::{input_type}::decode(input).map_err(crate::ProtoError::from)?;\n", - "let output = self.{rust_method}(input)?;\n", - "let mut out_bytes = Vec::new(); output.encode(&mut out_bytes).map_err(crate::ProtoError::from)?; Ok(out_bytes) }}, "), - idx = idx, - input_type = method.input_type, - rust_method = method.name - ) - .unwrap(); - } - buf.push_str( - r#" - _ => Err(crate::ProtoError::InvalidMethodIndex.into()), - } - } -"#, - ); - - for method in &service.methods { - let comments = method - .comments - .leading - .iter() - .map(|c| format!(" /// {c}")) - .join("\n"); - write!( - buf, - concat!( - "{comments}\n", - "fn {method_name}(&self, input: super::{input_type}) -> ", - "Result;\n" - ), - comments = comments, - method_name = method.name, - input_type = method.input_type, - output_type = method.output_type - ) - .unwrap(); - } - buf.push_str("}\n"); - } -} - -impl ServiceGenerator for RustCodeGenerator { - fn generate(&mut self, service: prost_build::Service, buf: &mut String) { - write!( - buf, - "pub mod {name}_service {{ - use prost::Message; - ", - name = service.name.replace("Service", "").to_ascii_lowercase() - ) - .unwrap(); - self.write_method_trait(buf, &service); - buf.push('}'); - } -} - -/// Set PROTOC to the custom path provided by PROTOC_BINARY, or add .exe to -/// the standard path if on Windows. -fn set_protoc_path() { - if let Ok(custom_protoc) = env::var("PROTOC_BINARY") { - env::set_var("PROTOC", custom_protoc); - } else if let Ok(bundled_protoc) = env::var("PROTOC") { - if cfg!(windows) && !bundled_protoc.ends_with(".exe") { - env::set_var("PROTOC", format!("{bundled_protoc}.exe")); - } - } -} diff --git a/rslib/proto/rust_protos.rs b/rslib/proto/rust_protos.rs new file mode 100644 index 000000000..6b08bca69 --- /dev/null +++ b/rslib/proto/rust_protos.rs @@ -0,0 +1,95 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use std::env; +use std::path::Path; +use std::path::PathBuf; + +use anki_io::create_dir_all; +use anki_io::read_file; +use anki_io::write_file_if_changed; +use anyhow::Context; +use anyhow::Result; +use prost_reflect::DescriptorPool; + +pub fn write_rust_protos(descriptors_path: Option) -> Result { + set_protoc_path(); + let proto_dir = PathBuf::from("../../proto"); + let paths = gather_proto_paths(&proto_dir)?; + let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); + let tmp_descriptors = out_dir.join("descriptors.tmp"); + prost_build::Config::new() + .out_dir(&out_dir) + .file_descriptor_set_path(&tmp_descriptors) + .type_attribute( + "Deck.Filtered.SearchTerm.Order", + "#[derive(strum::EnumIter)]", + ) + .type_attribute( + "Deck.Normal.DayLimit", + "#[derive(Copy, Eq, serde::Deserialize, serde::Serialize)]", + ) + .type_attribute("HelpPageLinkRequest.HelpPage", "#[derive(strum::EnumIter)]") + .type_attribute("CsvMetadata.Delimiter", "#[derive(strum::EnumIter)]") + .type_attribute( + "Preferences.BackupLimits", + "#[derive(Copy, serde::Deserialize, serde::Serialize)]", + ) + .type_attribute( + "CsvMetadata.DupeResolution", + "#[derive(serde::Deserialize, serde::Serialize)]", + ) + .type_attribute( + "CsvMetadata.MatchScope", + "#[derive(serde::Deserialize, serde::Serialize)]", + ) + .compile_protos(paths.as_slice(), &[proto_dir]) + .context("prost build")?; + + let descriptors = read_file(&tmp_descriptors)?; + if let Some(descriptors_path) = descriptors_path { + create_dir_all( + descriptors_path + .parent() + .context("missing parent of descriptor")?, + )?; + write_file_if_changed(descriptors_path, &descriptors)?; + } + let pool = DescriptorPool::decode(descriptors.as_ref())?; + Ok(pool) +} + +fn gather_proto_paths(proto_dir: &Path) -> Result> { + let subfolders = &["anki"]; + let mut paths = vec![]; + for subfolder in subfolders { + for entry in proto_dir.join(subfolder).read_dir().unwrap() { + let entry = entry.unwrap(); + let path = entry.path(); + if path + .file_name() + .unwrap() + .to_str() + .unwrap() + .ends_with(".proto") + { + println!("cargo:rerun-if-changed={}", path.to_str().unwrap()); + paths.push(path); + } + } + } + paths.sort(); + Ok(paths) +} + +/// Set PROTOC to the custom path provided by PROTOC_BINARY, or add .exe to +/// the standard path if on Windows. +fn set_protoc_path() { + if let Ok(custom_protoc) = env::var("PROTOC_BINARY") { + env::set_var("PROTOC", custom_protoc); + } else if let Ok(bundled_protoc) = env::var("PROTOC") { + if cfg!(windows) && !bundled_protoc.ends_with(".exe") { + env::set_var("PROTOC", format!("{bundled_protoc}.exe")); + } + } +} diff --git a/rslib/proto/src/generic_helpers.rs b/rslib/proto/src/generic_helpers.rs index ad1955b63..0784a83d9 100644 --- a/rslib/proto/src/generic_helpers.rs +++ b/rslib/proto/src/generic_helpers.rs @@ -48,9 +48,3 @@ impl From for crate::generic::UInt32 { crate::generic::UInt32 { val: val as u32 } } } - -impl From<()> for crate::generic::Empty { - fn from(_val: ()) -> Self { - crate::generic::Empty {} - } -} diff --git a/rslib/proto/src/lib.rs b/rslib/proto/src/lib.rs index ac2bc8d60..ec3c47053 100644 --- a/rslib/proto/src/lib.rs +++ b/rslib/proto/src/lib.rs @@ -11,27 +11,11 @@ macro_rules! protobuf { }; } -use snafu::Snafu; - -#[derive(Debug, Snafu)] -pub enum ProtoError { - InvalidMethodIndex, - #[snafu(context(false))] - DecodeError { - source: prost::DecodeError, - }, - #[snafu(context(false))] - EncodeError { - source: prost::EncodeError, - }, -} - -include!(concat!(env!("OUT_DIR"), "/service_index.rs")); - protobuf!(ankidroid, "ankidroid"); protobuf!(backend, "backend"); protobuf!(card_rendering, "card_rendering"); protobuf!(cards, "cards"); +protobuf!(codegen, "codegen"); protobuf!(collection, "collection"); protobuf!(config, "config"); protobuf!(deckconfig, "deckconfig"); diff --git a/rslib/rust_interface.rs b/rslib/rust_interface.rs new file mode 100644 index 000000000..4c4988af6 --- /dev/null +++ b/rslib/rust_interface.rs @@ -0,0 +1,319 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use std::env; +use std::fmt::Write; +use std::path::PathBuf; + +use anki_io::write_file_if_changed; +use anki_proto::codegen::RustMethods; +use anyhow::Context; +use anyhow::Result; +use inflections::Inflect; +use prost_reflect::DescriptorPool; + +pub fn write_rust_interface(pool: &DescriptorPool) -> Result<()> { + let mut buf = String::new(); + buf.push_str("use crate::error::Result; use prost::Message;"); + let services = pool + .services() + .map(RustService::from_proto) + .collect::>(); + for service in &services { + render_service(service, &mut buf); + } + + render_top_level_run_method(&mut buf, &services, true); + render_top_level_run_method(&mut buf, &services, false); + render_method_lookup(&mut buf, &services); + + // println!("{}", &buf); + let buf = format_code(buf)?; + // write into OUT_DIR so we can use it in build.rs + let out_dir = env::var("OUT_DIR").unwrap(); + let path = PathBuf::from(out_dir).join("backend.rs"); + write_file_if_changed(path, buf).context("write file")?; + Ok(()) +} + +#[derive(Debug)] +struct RustService { + name: String, + methods: Vec, +} + +#[derive(Debug)] +struct RustMethod { + name: String, + input_type: Option, + output_type: Option, + options: anki_proto::codegen::MethodOptions, +} + +impl RustMethod { + /// No text if generic::Empty + fn text_if_input_not_empty(&self, text: impl Fn(&String) -> String) -> String { + self.input_type.as_ref().map(text).unwrap_or_default() + } + + /// No text if generic::Empty + fn get_input_arg_with_label(&self) -> String { + self.input_type + .as_ref() + .map(|t| format!("input: {}", t)) + .unwrap_or_default() + } + + /// () if generic::Empty + fn get_output_type(&self) -> String { + self.output_type.as_deref().unwrap_or("()").into() + } + + fn text_if_output_not_empty(&self, text: impl Fn(&String) -> String) -> String { + self.output_type.as_ref().map(text).unwrap_or_default() + } + + fn wants_abstract_backend_method(&self) -> bool { + self.options.rust_methods() != RustMethods::CollectionAndAutoBackend + } + + fn wants_abstract_collection_method(&self) -> bool { + self.options.rust_methods() != RustMethods::BackendOnly + } +} + +impl RustMethod { + fn from_proto(method: prost_reflect::MethodDescriptor) -> Self { + RustMethod { + name: method.name().to_snake_case(), + input_type: rust_type(method.input().full_name()), + output_type: rust_type(method.output().full_name()), + options: method.options().transcode_to().unwrap(), + } + } +} + +impl RustService { + fn from_proto(service: prost_reflect::ServiceDescriptor) -> Self { + RustService { + name: service.name().to_string(), + methods: service.methods().map(RustMethod::from_proto).collect(), + } + } +} + +fn rust_type(name: &str) -> Option { + if name == "anki.generic.Empty" { + return None; + } + let Some((head, tail)) = name.rsplit_once( '.') else { panic!() }; + Some(format!( + "{}::{}", + head.to_snake_case() + .replace('.', "::") + .replace("anki::", "anki_proto::"), + tail + )) +} + +fn format_code(code: String) -> Result { + let syntax_tree = syn::parse_file(&code)?; + Ok(prettyplease::unparse(&syntax_tree)) +} + +fn render_abstract_collection_method(method: &RustMethod, buf: &mut String) { + let method_name = &method.name; + let input_with_label = method.get_input_arg_with_label(); + let output_type = method.get_output_type(); + writeln!( + buf, + "fn {method_name}(&mut self, {input_with_label}) -> Result<{output_type}>;" + ) + .unwrap(); +} + +fn render_abstract_backend_method(method: &RustMethod, buf: &mut String, _service: &RustService) { + let method_name = &method.name; + let input_with_label = method.get_input_arg_with_label(); + let output_type = method.get_output_type(); + writeln!( + buf, + "fn {method_name}(&self, {input_with_label}) -> Result<{output_type}>;" + ) + .unwrap(); +} + +fn render_delegating_backend_method(method: &RustMethod, buf: &mut String, service: &RustService) { + let method_name = &method.name; + let input_with_label = method.get_input_arg_with_label(); + let input = method.text_if_input_not_empty(|_| "input".into()); + let output_type = method.get_output_type(); + let col_service = &service.name; + writeln!( + buf, + "fn {method_name}(&self, {input_with_label}) -> Result<{output_type}> {{ + self.with_col(|col| {col_service}::{method_name}(col, {input})) }}", + ) + .unwrap(); +} + +fn render_service(service: &RustService, buf: &mut String) { + let have_collection = service + .methods + .iter() + .any(|m| m.wants_abstract_collection_method()); + if have_collection { + render_collection_trait(service, buf); + } + if service + .methods + .iter() + .any(|m| m.wants_abstract_backend_method()) + { + render_backend_trait(service, buf); + } + render_delegating_backend_methods(service, buf); + render_individual_service_run_method(buf, service, true); + render_individual_service_run_method(buf, service, false); +} + +fn render_collection_trait(service: &RustService, buf: &mut String) { + let name = &service.name; + writeln!(buf, "pub trait {name} {{").unwrap(); + for method in &service.methods { + if method.wants_abstract_collection_method() { + render_abstract_collection_method(method, buf); + } + } + buf.push('}'); +} + +fn render_backend_trait(service: &RustService, buf: &mut String) { + let name = format!("Backend{}", service.name); + writeln!(buf, "pub trait {name} {{").unwrap(); + for method in &service.methods { + if method.wants_abstract_backend_method() { + render_abstract_backend_method(method, buf, service); + } + } + buf.push('}'); +} + +fn render_delegating_backend_methods(service: &RustService, buf: &mut String) { + buf.push_str("impl crate::backend::Backend {"); + for method in &service.methods { + if method.wants_abstract_backend_method() { + continue; + } + render_delegating_backend_method(method, buf, service); + } + buf.push('}'); +} + +// Matches all service types and delegates to the revelant self.run_foo_method() +fn render_top_level_run_method(buf: &mut String, services: &[RustService], backend: bool) { + let self_kind = if backend { "&self" } else { "&mut self" }; + let struct_to_impl = if backend { + "crate::backend::Backend" + } else { + "crate::collection::Collection" + }; + writeln!(buf, + r#" impl {struct_to_impl} {{ + pub fn run_service_method({self_kind}, service: u32, method: u32, input: &[u8]) -> Result, Vec> {{ + match service {{ +"#, + ).unwrap(); + for (idx, service) in services.iter().enumerate() { + writeln!( + buf, + "{idx} => self.run_{service}_method(method, input),", + service = service.name.to_snake_case() + ) + .unwrap(); + } + buf.push_str( + r#" + _ => Err(crate::error::AnkiError::InvalidServiceIndex), + } + .map_err(|err| { + let backend_err = err.into_protobuf(&self.tr); + let mut bytes = Vec::new(); + backend_err.encode(&mut bytes).unwrap(); + bytes + }) + } }"#, + ); +} + +fn render_individual_service_run_method(buf: &mut String, service: &RustService, backend: bool) { + let self_kind = if backend { "&self" } else { "&mut self" }; + let struct_to_impl = if backend { + "crate::backend::Backend" + } else { + "crate::collection::Collection" + }; + let method_qualifier = if backend { + struct_to_impl + } else { + &service.name + }; + + let service_name = &service.name.to_snake_case(); + writeln!( + buf, + "#[allow(unused_variables, clippy::match_single_binding)] + impl {struct_to_impl} {{ pub(crate) fn run_{service_name}_method({self_kind}, + method: u32, input: &[u8]) -> Result> {{ + match method {{", + ) + .unwrap(); + for (idx, method) in service.methods.iter().enumerate() { + if !backend && !method.wants_abstract_collection_method() { + continue; + } + let decode_input = + method.text_if_input_not_empty(|kind| format!("let input = {kind}::decode(input)?;")); + let rust_method = &method.name; + let input = method.text_if_input_not_empty(|_| "input".into()); + let output_assign = method.text_if_output_not_empty(|_| "let output = ".into()); + let output = if method.output_type.is_none() { + "Vec::new()" + } else { + "{ let mut out_bytes = Vec::new(); + output.encode(&mut out_bytes)?; + out_bytes }" + }; + writeln!( + buf, + "{idx} => {{ {decode_input} + {output_assign} {method_qualifier}::{rust_method}(self, {input})?; + Ok({output}) }},", + ) + .unwrap(); + } + buf.push_str( + r#" + _ => Err(crate::error::AnkiError::InvalidMethodIndex), + } +} } +"#, + ); +} + +fn render_method_lookup(buf: &mut String, services: &[RustService]) { + writeln!( + buf, + " +pub const METHODS_BY_NAME: phf::Map<&str, (u32, u32)> = phf::phf_map! {{ +" + ) + .unwrap(); + for (sidx, service) in services.iter().enumerate() { + for (midx, method) in service.methods.iter().enumerate() { + let name = &method.name; + writeln!(buf, r#" "{name}" => ({sidx}, {midx}),"#,).unwrap(); + } + } + buf.push_str("};\n"); +} diff --git a/rslib/src/backend/ankidroid/db.rs b/rslib/src/ankidroid/db.rs similarity index 99% rename from rslib/src/backend/ankidroid/db.rs rename to rslib/src/ankidroid/db.rs index 67e4f4737..32b069ef8 100644 --- a/rslib/src/backend/ankidroid/db.rs +++ b/rslib/src/ankidroid/db.rs @@ -284,8 +284,8 @@ mod tests { use anki_proto::ankidroid::SqlValue; use super::*; - use crate::backend::ankidroid::db::select_slice_of_size; - use crate::backend::ankidroid::db::Sizable; + use crate::ankidroid::db::select_slice_of_size; + use crate::ankidroid::db::Sizable; fn gen_data() -> Vec { vec![ diff --git a/rslib/src/backend/ankidroid/error.rs b/rslib/src/ankidroid/error.rs similarity index 98% rename from rslib/src/backend/ankidroid/error.rs rename to rslib/src/ankidroid/error.rs index 4f92faf97..2e8dd162a 100644 --- a/rslib/src/backend/ankidroid/error.rs +++ b/rslib/src/ankidroid/error.rs @@ -13,7 +13,7 @@ use crate::error::SyncError; use crate::error::SyncErrorKind as Sync; use crate::prelude::AnkiError; -pub(super) fn debug_produce_error(s: &str) -> AnkiError { +pub(crate) fn debug_produce_error(s: &str) -> AnkiError { let info = "error_value".to_string(); match s { "TemplateError" => AnkiError::TemplateError { info }, diff --git a/rslib/src/ankidroid/mod.rs b/rslib/src/ankidroid/mod.rs new file mode 100644 index 000000000..1d76ab6ac --- /dev/null +++ b/rslib/src/ankidroid/mod.rs @@ -0,0 +1,5 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +pub(crate) mod db; +pub(crate) mod error; +pub mod service; diff --git a/rslib/src/ankidroid/service.rs b/rslib/src/ankidroid/service.rs new file mode 100644 index 000000000..e44146204 --- /dev/null +++ b/rslib/src/ankidroid/service.rs @@ -0,0 +1,70 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::ankidroid::DbResponse; +use anki_proto::ankidroid::GetActiveSequenceNumbersResponse; +use anki_proto::ankidroid::GetNextResultPageRequest; +use anki_proto::generic; + +use crate::ankidroid::db; +use crate::ankidroid::db::active_sequences; +use crate::ankidroid::db::execute_for_row_count; +use crate::ankidroid::db::insert_for_id; +use crate::backend::dbproxy::db_command_bytes; +use crate::backend::dbproxy::db_command_proto; +use crate::collection::Collection; +use crate::error; +use crate::error::OrInvalid; + +impl crate::services::AnkidroidService for Collection { + fn run_db_command(&mut self, input: generic::Json) -> error::Result { + db_command_bytes(self, &input.json).map(|json| generic::Json { json }) + } + + fn run_db_command_proto(&mut self, input: generic::Json) -> error::Result { + db_command_proto(self, &input.json) + } + + fn run_db_command_for_row_count( + &mut self, + input: generic::Json, + ) -> error::Result { + execute_for_row_count(self, &input.json).map(|val| generic::Int64 { val }) + } + + fn flush_all_queries(&mut self) -> error::Result<()> { + db::flush_collection(self); + Ok(()) + } + + fn flush_query(&mut self, input: generic::Int32) -> error::Result<()> { + db::flush_single_result(self, input.val); + Ok(()) + } + + fn get_next_result_page( + &mut self, + input: GetNextResultPageRequest, + ) -> error::Result { + db::get_next(self, input.sequence, input.index).or_invalid("missing result page") + } + + fn insert_for_id(&mut self, input: generic::Json) -> error::Result { + insert_for_id(self, &input.json).map(Into::into) + } + + fn get_column_names_from_query( + &mut self, + input: generic::String, + ) -> error::Result { + let stmt = self.storage.db.prepare(&input.val)?; + let names = stmt.column_names(); + let names: Vec<_> = names.iter().map(ToString::to_string).collect(); + Ok(names.into()) + } + + fn get_active_sequence_numbers(&mut self) -> error::Result { + Ok(GetActiveSequenceNumbersResponse { + numbers: active_sequences(self), + }) + } +} diff --git a/rslib/src/backend/ankidroid.rs b/rslib/src/backend/ankidroid.rs new file mode 100644 index 000000000..f33423317 --- /dev/null +++ b/rslib/src/backend/ankidroid.rs @@ -0,0 +1,61 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use anki_proto::generic; + +use super::Backend; +use crate::ankidroid::db; +use crate::ankidroid::error::debug_produce_error; +use crate::prelude::*; +use crate::scheduler::timing; +use crate::scheduler::timing::fixed_offset_from_minutes; +use crate::services::BackendAnkidroidService; + +impl BackendAnkidroidService for Backend { + fn sched_timing_today_legacy( + &self, + input: anki_proto::ankidroid::SchedTimingTodayLegacyRequest, + ) -> Result { + let result = timing::sched_timing_today( + TimestampSecs::from(input.created_secs), + TimestampSecs::from(input.now_secs), + input.created_mins_west.map(fixed_offset_from_minutes), + fixed_offset_from_minutes(input.now_mins_west), + Some(input.rollover_hour as u8), + )?; + Ok(anki_proto::scheduler::SchedTimingTodayResponse::from( + result, + )) + } + + fn local_minutes_west_legacy(&self, input: generic::Int64) -> Result { + Ok(generic::Int32 { + val: timing::local_minutes_west_for_stamp(input.val.into())?, + }) + } + + fn set_page_size(&self, input: generic::Int64) -> Result<()> { + // we don't require an open collection, but should avoid modifying this + // concurrently + let _guard = self.col.lock(); + db::set_max_page_size(input.val as usize); + Ok(()) + } + + fn debug_produce_error(&self, input: generic::String) -> Result<()> { + Err(debug_produce_error(&input.val)) + } +} + +impl From + for anki_proto::scheduler::SchedTimingTodayResponse +{ + fn from( + t: crate::scheduler::timing::SchedTimingToday, + ) -> anki_proto::scheduler::SchedTimingTodayResponse { + anki_proto::scheduler::SchedTimingTodayResponse { + days_elapsed: t.days_elapsed, + next_day_at: t.next_day_at.0, + } + } +} diff --git a/rslib/src/backend/ankidroid/mod.rs b/rslib/src/backend/ankidroid/mod.rs deleted file mode 100644 index 1a4df3bfe..000000000 --- a/rslib/src/backend/ankidroid/mod.rs +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -pub(crate) mod db; -pub(crate) mod error; - -pub(super) use anki_proto::ankidroid::ankidroid_service::Service as AnkidroidService; -use anki_proto::ankidroid::DbResponse; -use anki_proto::ankidroid::GetActiveSequenceNumbersResponse; -use anki_proto::ankidroid::GetNextResultPageRequest; -use anki_proto::generic; - -use self::db::active_sequences; -use self::error::debug_produce_error; -use super::dbproxy::db_command_bytes; -use super::dbproxy::db_command_proto; -use super::Backend; -use crate::backend::ankidroid::db::execute_for_row_count; -use crate::backend::ankidroid::db::insert_for_id; -use crate::prelude::*; -use crate::scheduler::timing; -use crate::scheduler::timing::fixed_offset_from_minutes; - -impl AnkidroidService for Backend { - type Error = AnkiError; - - fn sched_timing_today_legacy( - &self, - input: anki_proto::ankidroid::SchedTimingTodayLegacyRequest, - ) -> Result { - let result = timing::sched_timing_today( - TimestampSecs::from(input.created_secs), - TimestampSecs::from(input.now_secs), - input.created_mins_west.map(fixed_offset_from_minutes), - fixed_offset_from_minutes(input.now_mins_west), - Some(input.rollover_hour as u8), - )?; - Ok(anki_proto::scheduler::SchedTimingTodayResponse::from( - result, - )) - } - - fn local_minutes_west_legacy(&self, input: generic::Int64) -> Result { - Ok(generic::Int32 { - val: timing::local_minutes_west_for_stamp(input.val.into())?, - }) - } - - fn run_db_command(&self, input: generic::Json) -> Result { - self.with_col(|col| db_command_bytes(col, &input.json)) - .map(|json| generic::Json { json }) - } - - fn run_db_command_proto(&self, input: generic::Json) -> Result { - self.with_col(|col| db_command_proto(col, &input.json)) - } - - fn run_db_command_for_row_count(&self, input: generic::Json) -> Result { - self.with_col(|col| execute_for_row_count(col, &input.json)) - .map(|val| generic::Int64 { val }) - } - - fn flush_all_queries(&self, _input: generic::Empty) -> Result { - self.with_col(|col| { - db::flush_collection(col); - Ok(generic::Empty {}) - }) - } - - fn flush_query(&self, input: generic::Int32) -> Result { - self.with_col(|col| { - db::flush_single_result(col, input.val); - Ok(generic::Empty {}) - }) - } - - fn get_next_result_page(&self, input: GetNextResultPageRequest) -> Result { - self.with_col(|col| { - db::get_next(col, input.sequence, input.index).or_invalid("missing result page") - }) - } - - fn insert_for_id(&self, input: generic::Json) -> Result { - self.with_col(|col| insert_for_id(col, &input.json).map(Into::into)) - } - - fn set_page_size(&self, input: generic::Int64) -> Result { - // we don't require an open collection, but should avoid modifying this - // concurrently - let _guard = self.col.lock(); - db::set_max_page_size(input.val as usize); - Ok(().into()) - } - - fn get_column_names_from_query(&self, input: generic::String) -> Result { - self.with_col(|col| { - let stmt = col.storage.db.prepare(&input.val)?; - let names = stmt.column_names(); - let names: Vec<_> = names.iter().map(ToString::to_string).collect(); - Ok(names.into()) - }) - } - - fn get_active_sequence_numbers( - &self, - _input: generic::Empty, - ) -> Result { - self.with_col(|col| { - Ok(GetActiveSequenceNumbersResponse { - numbers: active_sequences(col), - }) - }) - } - - fn debug_produce_error(&self, input: generic::String) -> Result { - Err(debug_produce_error(&input.val)) - } -} - -impl From - for anki_proto::scheduler::SchedTimingTodayResponse -{ - fn from( - t: crate::scheduler::timing::SchedTimingToday, - ) -> anki_proto::scheduler::SchedTimingTodayResponse { - anki_proto::scheduler::SchedTimingTodayResponse { - days_elapsed: t.days_elapsed, - next_day_at: t.next_day_at.0, - } - } -} diff --git a/rslib/src/backend/card_rendering.rs b/rslib/src/backend/card_rendering.rs new file mode 100644 index 000000000..7c4520948 --- /dev/null +++ b/rslib/src/backend/card_rendering.rs @@ -0,0 +1,16 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::card_rendering::StripHtmlRequest; + +use crate::backend::Backend; +use crate::card_rendering::service::strip_html_proto; +use crate::services::BackendCardRenderingService; + +impl BackendCardRenderingService for Backend { + fn strip_html( + &self, + input: StripHtmlRequest, + ) -> crate::error::Result { + strip_html_proto(input) + } +} diff --git a/rslib/src/backend/collection.rs b/rslib/src/backend/collection.rs index 4cdc2a242..bce92bba3 100644 --- a/rslib/src/backend/collection.rs +++ b/rslib/src/backend/collection.rs @@ -3,7 +3,6 @@ use std::sync::MutexGuard; -pub(super) use anki_proto::collection::collection_service::Service as CollectionService; use anki_proto::generic; use tracing::error; @@ -11,25 +10,11 @@ use super::Backend; use crate::collection::CollectionBuilder; use crate::prelude::*; use crate::progress::progress_to_proto; +use crate::services::BackendCollectionService; use crate::storage::SchemaVersion; -impl CollectionService for Backend { - type Error = AnkiError; - - fn latest_progress(&self, _input: generic::Empty) -> Result { - let progress = self.progress_state.lock().unwrap().last_progress; - Ok(progress_to_proto(progress, &self.tr)) - } - - fn set_wants_abort(&self, _input: generic::Empty) -> Result { - self.progress_state.lock().unwrap().want_abort = true; - Ok(().into()) - } - - fn open_collection( - &self, - input: anki_proto::collection::OpenCollectionRequest, - ) -> Result { +impl BackendCollectionService for Backend { + fn open_collection(&self, input: anki_proto::collection::OpenCollectionRequest) -> Result<()> { let mut guard = self.lock_closed_collection()?; let mut builder = CollectionBuilder::new(input.collection_path); @@ -42,13 +27,13 @@ impl CollectionService for Backend { *guard = Some(builder.build()?); - Ok(().into()) + Ok(()) } fn close_collection( &self, input: anki_proto::collection::CloseCollectionRequest, - ) -> Result { + ) -> Result<()> { let desired_version = if input.downgrade_to_schema11 { Some(SchemaVersion::V11) } else { @@ -63,47 +48,7 @@ impl CollectionService for Backend { error!(" failed: {:?}", e); } - Ok(().into()) - } - - fn check_database( - &self, - _input: generic::Empty, - ) -> Result { - self.with_col(|col| { - col.check_database() - .map(|problems| anki_proto::collection::CheckDatabaseResponse { - problems: problems.to_i18n_strings(&col.tr), - }) - }) - } - - fn get_undo_status( - &self, - _input: generic::Empty, - ) -> Result { - self.with_col(|col| Ok(col.undo_status().into_protobuf(&col.tr))) - } - - fn undo(&self, _input: generic::Empty) -> Result { - self.with_col(|col| col.undo().map(|out| out.into_protobuf(&col.tr))) - } - - fn redo(&self, _input: generic::Empty) -> Result { - self.with_col(|col| col.redo().map(|out| out.into_protobuf(&col.tr))) - } - - fn add_custom_undo_entry(&self, input: generic::String) -> Result { - self.with_col(|col| Ok(col.add_custom_undo_step(input.val).into())) - } - - fn merge_undo_entries( - &self, - input: generic::UInt32, - ) -> Result { - let starting_from = input.val as usize; - self.with_col(|col| col.merge_undoable_ops(starting_from)) - .map(Into::into) + Ok(()) } fn create_backup( @@ -133,9 +78,54 @@ impl CollectionService for Backend { Ok(created.into()) } - fn await_backup_completion(&self, _input: generic::Empty) -> Result { + fn await_backup_completion(&self) -> Result<()> { self.await_backup_completion()?; - Ok(().into()) + Ok(()) + } +} + +impl crate::services::CollectionService for Collection { + fn check_database(&mut self) -> Result { + { + self.check_database() + .map(|problems| anki_proto::collection::CheckDatabaseResponse { + problems: problems.to_i18n_strings(&self.tr), + }) + } + } + + fn get_undo_status(&mut self) -> Result { + Ok(self.undo_status().into_protobuf(&self.tr)) + } + + fn undo(&mut self) -> Result { + self.undo().map(|out| out.into_protobuf(&self.tr)) + } + + fn redo(&mut self) -> Result { + self.redo().map(|out| out.into_protobuf(&self.tr)) + } + + fn add_custom_undo_entry(&mut self, input: generic::String) -> Result { + Ok(self.add_custom_undo_step(input.val).into()) + } + + fn merge_undo_entries( + &mut self, + input: generic::UInt32, + ) -> Result { + let starting_from = input.val as usize; + self.merge_undoable_ops(starting_from).map(Into::into) + } + + fn latest_progress(&mut self) -> Result { + let progress = self.state.progress.lock().unwrap().last_progress; + Ok(progress_to_proto(progress, &self.tr)) + } + + fn set_wants_abort(&mut self) -> Result<()> { + self.state.progress.lock().unwrap().want_abort = true; + Ok(()) } } diff --git a/rslib/src/backend/config.rs b/rslib/src/backend/config.rs index b8e611e71..7637d30d1 100644 --- a/rslib/src/backend/config.rs +++ b/rslib/src/backend/config.rs @@ -3,11 +3,9 @@ use anki_proto::config::config_key::Bool as BoolKeyProto; use anki_proto::config::config_key::String as StringKeyProto; -pub(super) use anki_proto::config::config_service::Service as ConfigService; use anki_proto::generic; use serde_json::Value; -use super::Backend; use crate::config::BoolKey; use crate::config::StringKey; use crate::prelude::*; @@ -53,100 +51,88 @@ impl From for StringKey { } } -impl ConfigService for Backend { - type Error = AnkiError; - - fn get_config_json(&self, input: generic::String) -> Result { - self.with_col(|col| { - let val: Option = col.get_config_optional(input.val.as_str()); - val.or_not_found(input.val) - .and_then(|v| serde_json::to_vec(&v).map_err(Into::into)) - .map(Into::into) - }) - } - - fn set_config_json( - &self, - input: anki_proto::config::SetConfigJsonRequest, - ) -> Result { - self.with_col(|col| { - let val: Value = serde_json::from_slice(&input.value_json)?; - col.set_config_json(input.key.as_str(), &val, input.undoable) - }) - .map(Into::into) - } - - fn set_config_json_no_undo( - &self, - input: anki_proto::config::SetConfigJsonRequest, - ) -> Result { - self.with_col(|col| { - let val: Value = serde_json::from_slice(&input.value_json)?; - col.transact_no_undo(|col| col.set_config(input.key.as_str(), &val).map(|_| ())) - }) - .map(Into::into) - } - - fn remove_config(&self, input: generic::String) -> Result { - self.with_col(|col| col.remove_config(input.val.as_str())) +impl crate::services::ConfigService for Collection { + fn get_config_json(&mut self, input: generic::String) -> Result { + let val: Option = self.get_config_optional(input.val.as_str()); + val.or_not_found(input.val) + .and_then(|v| serde_json::to_vec(&v).map_err(Into::into)) .map(Into::into) } - fn get_all_config(&self, _input: generic::Empty) -> Result { - self.with_col(|col| { - let conf = col.storage.get_all_config()?; - serde_json::to_vec(&conf).map_err(Into::into) - }) - .map(Into::into) + fn set_config_json( + &mut self, + input: anki_proto::config::SetConfigJsonRequest, + ) -> Result { + let val: Value = serde_json::from_slice(&input.value_json)?; + self.set_config_json(input.key.as_str(), &val, input.undoable) + .map(Into::into) + } + + fn set_config_json_no_undo( + &mut self, + input: anki_proto::config::SetConfigJsonRequest, + ) -> Result<()> { + let val: Value = serde_json::from_slice(&input.value_json)?; + self.transact_no_undo(|col| col.set_config(input.key.as_str(), &val).map(|_| ())) + .map(Into::into) + } + + fn remove_config( + &mut self, + input: generic::String, + ) -> Result { + self.remove_config(input.val.as_str()).map(Into::into) + } + + fn get_all_config(&mut self) -> Result { + let conf = self.storage.get_all_config()?; + serde_json::to_vec(&conf) + .map_err(Into::into) + .map(Into::into) } fn get_config_bool( - &self, + &mut self, input: anki_proto::config::GetConfigBoolRequest, ) -> Result { - self.with_col(|col| { - Ok(generic::Bool { - val: col.get_config_bool(input.key().into()), - }) + Ok(generic::Bool { + val: Collection::get_config_bool(self, input.key().into()), }) } fn set_config_bool( - &self, + &mut self, input: anki_proto::config::SetConfigBoolRequest, ) -> Result { - self.with_col(|col| col.set_config_bool(input.key().into(), input.value, input.undoable)) + self.set_config_bool(input.key().into(), input.value, input.undoable) .map(Into::into) } fn get_config_string( - &self, + &mut self, input: anki_proto::config::GetConfigStringRequest, ) -> Result { - self.with_col(|col| { - Ok(generic::String { - val: col.get_config_string(input.key().into()), - }) + Ok(generic::String { + val: Collection::get_config_string(self, input.key().into()), }) } fn set_config_string( - &self, + &mut self, input: anki_proto::config::SetConfigStringRequest, ) -> Result { - self.with_col(|col| col.set_config_string(input.key().into(), &input.value, input.undoable)) + self.set_config_string(input.key().into(), &input.value, input.undoable) .map(Into::into) } - fn get_preferences(&self, _input: generic::Empty) -> Result { - self.with_col(|col| col.get_preferences()) + fn get_preferences(&mut self) -> Result { + Collection::get_preferences(self) } fn set_preferences( - &self, + &mut self, input: anki_proto::config::Preferences, ) -> Result { - self.with_col(|col| col.set_preferences(input)) - .map(Into::into) + self.set_preferences(input).map(Into::into) } } diff --git a/rslib/src/backend/dbproxy.rs b/rslib/src/backend/dbproxy.rs index e9283672b..ecaadbdad 100644 --- a/rslib/src/backend/dbproxy.rs +++ b/rslib/src/backend/dbproxy.rs @@ -15,6 +15,8 @@ use rusqlite::OptionalExtension; use serde::Deserialize; use serde::Serialize; +use crate::ankidroid::db::next_sequence_number; +use crate::ankidroid::db::trim_and_cache_remaining; use crate::prelude::*; use crate::storage::SqliteStorage; @@ -44,7 +46,7 @@ pub(super) enum DbResult { #[derive(Serialize, Deserialize, Debug)] #[serde(untagged)] -pub(super) enum SqlValue { +pub(crate) enum SqlValue { Null, String(String), Int(i64), @@ -113,7 +115,7 @@ impl FromSql for SqlValue { } } -pub(super) fn db_command_bytes(col: &mut Collection, input: &[u8]) -> Result> { +pub(crate) fn db_command_bytes(col: &mut Collection, input: &[u8]) -> Result> { serde_json::to_vec(&db_command_bytes_inner(col, input)?).map_err(Into::into) } @@ -181,11 +183,7 @@ pub(crate) fn db_command_proto(col: &mut Collection, input: &[u8]) -> Result ProtoDbResult { rows: Vec::new() }, DbResult::Rows(rows) => rows_to_proto(&rows), }; - let trimmed = super::ankidroid::db::trim_and_cache_remaining( - col, - proto_resp, - super::ankidroid::db::next_sequence_number(), - ); + let trimmed = trim_and_cache_remaining(col, proto_resp, next_sequence_number()); Ok(trimmed) } diff --git a/rslib/src/backend/decks.rs b/rslib/src/backend/decks.rs deleted file mode 100644 index 08b75c9d1..000000000 --- a/rslib/src/backend/decks.rs +++ /dev/null @@ -1,334 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use std::convert::TryFrom; - -pub(super) use anki_proto::decks::decks_service::Service as DecksService; -use anki_proto::generic; - -use super::Backend; -use crate::decks::filtered::search_order_labels; -use crate::decks::DeckSchema11; -use crate::prelude::*; -use crate::scheduler::filtered::FilteredDeckForUpdate; - -impl DecksService for Backend { - type Error = AnkiError; - - fn new_deck(&self, _input: generic::Empty) -> Result { - Ok(Deck::new_normal().into()) - } - - fn add_deck( - &self, - deck: anki_proto::decks::Deck, - ) -> Result { - let mut deck: Deck = deck.try_into()?; - self.with_col(|col| Ok(col.add_deck(&mut deck)?.map(|_| deck.id.0).into())) - } - - fn add_deck_legacy( - &self, - input: generic::Json, - ) -> Result { - let schema11: DeckSchema11 = serde_json::from_slice(&input.json)?; - let mut deck: Deck = schema11.into(); - self.with_col(|col| { - let output = col.add_deck(&mut deck)?; - Ok(output.map(|_| deck.id.0).into()) - }) - } - - fn add_or_update_deck_legacy( - &self, - input: anki_proto::decks::AddOrUpdateDeckLegacyRequest, - ) -> Result { - self.with_col(|col| { - let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?; - let mut deck: Deck = schema11.into(); - if input.preserve_usn_and_mtime { - col.transact_no_undo(|col| { - let usn = col.usn()?; - col.add_or_update_single_deck_with_existing_id(&mut deck, usn) - })?; - } else { - col.add_or_update_deck(&mut deck)?; - } - Ok(anki_proto::decks::DeckId { did: deck.id.0 }) - }) - } - - fn deck_tree( - &self, - input: anki_proto::decks::DeckTreeRequest, - ) -> Result { - self.with_col(|col| { - let now = if input.now == 0 { - None - } else { - Some(TimestampSecs(input.now)) - }; - col.deck_tree(now) - }) - } - - fn deck_tree_legacy(&self, _input: generic::Empty) -> Result { - self.with_col(|col| { - let tree = col.legacy_deck_tree()?; - serde_json::to_vec(&tree) - .map_err(Into::into) - .map(Into::into) - }) - } - - fn get_all_decks_legacy(&self, _input: generic::Empty) -> Result { - self.with_col(|col| { - let decks = col.storage.get_all_decks_as_schema11()?; - serde_json::to_vec(&decks).map_err(Into::into) - }) - .map(Into::into) - } - - fn get_deck_id_by_name(&self, input: generic::String) -> Result { - self.with_col(|col| { - col.get_deck_id(&input.val).and_then(|d| { - d.or_not_found(input.val) - .map(|d| anki_proto::decks::DeckId { did: d.0 }) - }) - }) - } - - fn get_deck(&self, input: anki_proto::decks::DeckId) -> Result { - let did = input.into(); - self.with_col(|col| Ok(col.storage.get_deck(did)?.or_not_found(did)?.into())) - } - - fn update_deck( - &self, - input: anki_proto::decks::Deck, - ) -> Result { - self.with_col(|col| { - let mut deck = Deck::try_from(input)?; - col.update_deck(&mut deck).map(Into::into) - }) - } - - fn update_deck_legacy( - &self, - input: generic::Json, - ) -> Result { - self.with_col(|col| { - let deck: DeckSchema11 = serde_json::from_slice(&input.json)?; - let mut deck = deck.into(); - col.update_deck(&mut deck).map(Into::into) - }) - } - - fn get_deck_legacy(&self, input: anki_proto::decks::DeckId) -> Result { - let did = input.into(); - self.with_col(|col| { - let deck: DeckSchema11 = col.storage.get_deck(did)?.or_not_found(did)?.into(); - serde_json::to_vec(&deck) - .map_err(Into::into) - .map(Into::into) - }) - } - - fn get_deck_names( - &self, - input: anki_proto::decks::GetDeckNamesRequest, - ) -> Result { - self.with_col(|col| { - let names = if input.include_filtered { - col.get_all_deck_names(input.skip_empty_default)? - } else { - col.get_all_normal_deck_names()? - }; - Ok(deck_names_to_proto(names)) - }) - } - - fn get_deck_and_child_names( - &self, - input: anki_proto::decks::DeckId, - ) -> Result { - self.with_col(|col| { - col.get_deck_and_child_names(input.did.into()) - .map(deck_names_to_proto) - }) - } - - fn new_deck_legacy(&self, input: generic::Bool) -> Result { - let deck = if input.val { - Deck::new_filtered() - } else { - Deck::new_normal() - }; - let schema11: DeckSchema11 = deck.into(); - serde_json::to_vec(&schema11) - .map_err(Into::into) - .map(Into::into) - } - - fn remove_decks( - &self, - input: anki_proto::decks::DeckIds, - ) -> Result { - self.with_col(|col| { - col.remove_decks_and_child_decks( - &input.dids.into_iter().map(DeckId).collect::>(), - ) - }) - .map(Into::into) - } - - fn reparent_decks( - &self, - input: anki_proto::decks::ReparentDecksRequest, - ) -> Result { - let deck_ids: Vec<_> = input.deck_ids.into_iter().map(Into::into).collect(); - let new_parent = if input.new_parent == 0 { - None - } else { - Some(input.new_parent.into()) - }; - self.with_col(|col| col.reparent_decks(&deck_ids, new_parent)) - .map(Into::into) - } - - fn rename_deck( - &self, - input: anki_proto::decks::RenameDeckRequest, - ) -> Result { - self.with_col(|col| col.rename_deck(input.deck_id.into(), &input.new_name)) - .map(Into::into) - } - - fn get_or_create_filtered_deck( - &self, - input: anki_proto::decks::DeckId, - ) -> Result { - self.with_col(|col| col.get_or_create_filtered_deck(input.into())) - .map(Into::into) - } - - fn add_or_update_filtered_deck( - &self, - input: anki_proto::decks::FilteredDeckForUpdate, - ) -> Result { - self.with_col(|col| col.add_or_update_filtered_deck(input.into())) - .map(|out| out.map(i64::from)) - .map(Into::into) - } - - fn filtered_deck_order_labels(&self, _input: generic::Empty) -> Result { - Ok(search_order_labels(&self.tr).into()) - } - - fn set_deck_collapsed( - &self, - input: anki_proto::decks::SetDeckCollapsedRequest, - ) -> Result { - self.with_col(|col| { - col.set_deck_collapsed(input.deck_id.into(), input.collapsed, input.scope()) - }) - .map(Into::into) - } - - fn set_current_deck( - &self, - input: anki_proto::decks::DeckId, - ) -> Result { - self.with_col(|col| col.set_current_deck(input.did.into())) - .map(Into::into) - } - - fn get_current_deck(&self, _input: generic::Empty) -> Result { - self.with_col(|col| col.get_current_deck()) - .map(|deck| (*deck).clone().into()) - } -} - -impl From for DeckId { - fn from(did: anki_proto::decks::DeckId) -> Self { - DeckId(did.did) - } -} - -impl From for anki_proto::decks::DeckId { - fn from(did: DeckId) -> Self { - anki_proto::decks::DeckId { did: did.0 } - } -} - -impl From for anki_proto::decks::FilteredDeckForUpdate { - fn from(deck: FilteredDeckForUpdate) -> Self { - anki_proto::decks::FilteredDeckForUpdate { - id: deck.id.into(), - name: deck.human_name, - config: Some(deck.config), - } - } -} - -impl From for FilteredDeckForUpdate { - fn from(deck: anki_proto::decks::FilteredDeckForUpdate) -> Self { - FilteredDeckForUpdate { - id: deck.id.into(), - human_name: deck.name, - config: deck.config.unwrap_or_default(), - } - } -} - -impl From for anki_proto::decks::Deck { - fn from(d: Deck) -> Self { - anki_proto::decks::Deck { - id: d.id.0, - name: d.name.human_name(), - mtime_secs: d.mtime_secs.0, - usn: d.usn.0, - common: Some(d.common), - kind: Some(kind_from_inline(d.kind)), - } - } -} - -impl TryFrom for Deck { - type Error = AnkiError; - - fn try_from(d: anki_proto::decks::Deck) -> Result { - Ok(Deck { - id: DeckId(d.id), - name: NativeDeckName::from_human_name(&d.name), - mtime_secs: TimestampSecs(d.mtime_secs), - usn: Usn(d.usn), - common: d.common.unwrap_or_default(), - kind: kind_to_inline(d.kind.or_invalid("missing kind")?), - }) - } -} - -fn kind_to_inline(kind: anki_proto::decks::deck::Kind) -> DeckKind { - match kind { - anki_proto::decks::deck::Kind::Normal(normal) => DeckKind::Normal(normal), - anki_proto::decks::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered), - } -} - -fn kind_from_inline(k: DeckKind) -> anki_proto::decks::deck::Kind { - match k { - DeckKind::Normal(n) => anki_proto::decks::deck::Kind::Normal(n), - DeckKind::Filtered(f) => anki_proto::decks::deck::Kind::Filtered(f), - } -} - -fn deck_name_to_proto((id, name): (DeckId, String)) -> anki_proto::decks::DeckNameId { - anki_proto::decks::DeckNameId { id: id.0, name } -} - -fn deck_names_to_proto(names: Vec<(DeckId, String)>) -> anki_proto::decks::DeckNames { - anki_proto::decks::DeckNames { - entries: names.into_iter().map(deck_name_to_proto).collect(), - } -} diff --git a/rslib/src/backend/error.rs b/rslib/src/backend/error.rs index d4b378885..2bf61244d 100644 --- a/rslib/src/backend/error.rs +++ b/rslib/src/backend/error.rs @@ -40,6 +40,7 @@ impl AnkiError { AnkiError::FileIoError { .. } => Kind::IoError, AnkiError::MediaCheckRequired => Kind::InvalidInput, AnkiError::InvalidId => Kind::InvalidInput, + AnkiError::InvalidMethodIndex | AnkiError::InvalidServiceIndex => Kind::InvalidInput, #[cfg(windows)] AnkiError::WindowsError { .. } => Kind::OsError, }; diff --git a/rslib/src/backend/i18n.rs b/rslib/src/backend/i18n.rs index f094c4642..f21af7110 100644 --- a/rslib/src/backend/i18n.rs +++ b/rslib/src/backend/i18n.rs @@ -1,77 +1,27 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use std::collections::HashMap; - use anki_proto::generic; -pub(super) use anki_proto::i18n::i18n_service::Service as I18nService; -use fluent::FluentArgs; -use fluent::FluentValue; +use anki_proto::i18n::FormatTimespanRequest; +use anki_proto::i18n::I18nResourcesRequest; +use anki_proto::i18n::TranslateStringRequest; use super::Backend; +use crate::i18n::service; use crate::prelude::*; -use crate::scheduler::timespan::answer_button_time; -use crate::scheduler::timespan::time_span; -impl I18nService for Backend { - type Error = AnkiError; - - fn translate_string( - &self, - input: anki_proto::i18n::TranslateStringRequest, - ) -> Result { - let args = build_fluent_args(input.args); - - Ok(self - .tr - .translate_via_index( - input.module_index as usize, - input.message_index as usize, - args, - ) - .into()) +// We avoid delegating to collection for these, as tr doesn't require a +// collection lock. +impl crate::services::BackendI18nService for Backend { + fn translate_string(&self, input: TranslateStringRequest) -> Result { + service::translate_string(&self.tr, input) } - fn format_timespan( - &self, - input: anki_proto::i18n::FormatTimespanRequest, - ) -> Result { - use anki_proto::i18n::format_timespan_request::Context; - Ok(match input.context() { - Context::Precise => time_span(input.seconds, &self.tr, true), - Context::Intervals => time_span(input.seconds, &self.tr, false), - Context::AnswerButtons => answer_button_time(input.seconds, &self.tr), - } - .into()) + fn format_timespan(&self, input: FormatTimespanRequest) -> Result { + service::format_timespan(&self.tr, input) } - fn i18n_resources( - &self, - input: anki_proto::i18n::I18nResourcesRequest, - ) -> Result { - serde_json::to_vec(&self.tr.resources_for_js(&input.modules)) - .map(Into::into) - .map_err(Into::into) - } -} - -fn build_fluent_args( - input: HashMap, -) -> FluentArgs<'static> { - let mut args = FluentArgs::new(); - for (key, val) in input { - args.set(key, translate_arg_to_fluent_val(&val)); - } - args -} - -fn translate_arg_to_fluent_val(arg: &anki_proto::i18n::TranslateArgValue) -> FluentValue<'static> { - use anki_proto::i18n::translate_arg_value::Value as V; - match &arg.value { - Some(val) => match val { - V::Str(s) => FluentValue::String(s.to_owned().into()), - V::Number(f) => FluentValue::Number(f.into()), - }, - None => FluentValue::String("".into()), + fn i18n_resources(&self, input: I18nResourcesRequest) -> Result { + service::i18n_resources(&self.tr, input) } } diff --git a/rslib/src/backend/image_occlusion.rs b/rslib/src/backend/image_occlusion.rs deleted file mode 100644 index 152fdff1d..000000000 --- a/rslib/src/backend/image_occlusion.rs +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use anki_proto::generic; -pub(super) use anki_proto::image_occlusion::imageocclusion_service::Service as ImageOcclusionService; - -use super::Backend; -use crate::prelude::*; - -impl ImageOcclusionService for Backend { - type Error = AnkiError; - - fn get_image_for_occlusion( - &self, - input: anki_proto::image_occlusion::GetImageForOcclusionRequest, - ) -> Result { - self.with_col(|col| col.get_image_for_occlusion(&input.path)) - } - - fn add_image_occlusion_note( - &self, - input: anki_proto::image_occlusion::AddImageOcclusionNoteRequest, - ) -> Result { - self.with_col(|col| { - col.add_image_occlusion_note( - input.notetype_id.into(), - &input.image_path, - &input.occlusions, - &input.header, - &input.back_extra, - input.tags, - ) - }) - .map(Into::into) - } - - fn get_image_occlusion_note( - &self, - input: anki_proto::image_occlusion::GetImageOcclusionNoteRequest, - ) -> Result { - self.with_col(|col| col.get_image_occlusion_note(input.note_id.into())) - } - - fn update_image_occlusion_note( - &self, - input: anki_proto::image_occlusion::UpdateImageOcclusionNoteRequest, - ) -> Result { - self.with_col(|col| { - col.update_image_occlusion_note( - input.note_id.into(), - &input.occlusions, - &input.header, - &input.back_extra, - input.tags, - ) - }) - .map(Into::into) - } - - fn add_image_occlusion_notetype( - &self, - _input: generic::Empty, - ) -> Result { - self.with_col(|col| col.add_image_occlusion_notetype()) - .map(Into::into) - } -} diff --git a/rslib/src/backend/import_export.rs b/rslib/src/backend/import_export.rs index 6e1ccb519..5ecf79582 100644 --- a/rslib/src/backend/import_export.rs +++ b/rslib/src/backend/import_export.rs @@ -3,24 +3,16 @@ use std::path::Path; -use anki_proto::generic; -use anki_proto::import_export::export_limit; -pub(super) use anki_proto::import_export::importexport_service::Service as ImportExportService; -use anki_proto::import_export::ExportLimit; - use super::Backend; use crate::import_export::package::import_colpkg; -use crate::import_export::NoteLog; use crate::prelude::*; -use crate::search::SearchNode; - -impl ImportExportService for Backend { - type Error = AnkiError; +use crate::services::BackendImportExportService; +impl BackendImportExportService for Backend { fn export_collection_package( &self, input: anki_proto::import_export::ExportCollectionPackageRequest, - ) -> Result { + ) -> Result<()> { self.abort_media_sync_and_wait(); let mut guard = self.lock_open_collection()?; @@ -34,7 +26,7 @@ impl ImportExportService for Backend { fn import_collection_package( &self, input: anki_proto::import_export::ImportCollectionPackageRequest, - ) -> Result { + ) -> Result<()> { let _guard = self.lock_closed_collection()?; import_colpkg( @@ -46,115 +38,4 @@ impl ImportExportService for Backend { ) .map(Into::into) } - - fn import_anki_package( - &self, - input: anki_proto::import_export::ImportAnkiPackageRequest, - ) -> Result { - self.with_col(|col| col.import_apkg(&input.package_path)) - .map(Into::into) - } - - fn export_anki_package( - &self, - input: anki_proto::import_export::ExportAnkiPackageRequest, - ) -> Result { - self.with_col(|col| { - col.export_apkg( - &input.out_path, - SearchNode::from(input.limit.unwrap_or_default()), - input.with_scheduling, - input.with_media, - input.legacy, - None, - ) - }) - .map(Into::into) - } - - fn get_csv_metadata( - &self, - input: anki_proto::import_export::CsvMetadataRequest, - ) -> Result { - let delimiter = input.delimiter.is_some().then(|| input.delimiter()); - self.with_col(|col| { - col.get_csv_metadata( - &input.path, - delimiter, - input.notetype_id.map(Into::into), - input.deck_id.map(Into::into), - input.is_html, - ) - }) - } - - fn import_csv( - &self, - input: anki_proto::import_export::ImportCsvRequest, - ) -> Result { - self.with_col(|col| col.import_csv(&input.path, input.metadata.unwrap_or_default())) - .map(Into::into) - } - - fn export_note_csv( - &self, - input: anki_proto::import_export::ExportNoteCsvRequest, - ) -> Result { - self.with_col(|col| col.export_note_csv(input)) - .map(Into::into) - } - - fn export_card_csv( - &self, - input: anki_proto::import_export::ExportCardCsvRequest, - ) -> Result { - self.with_col(|col| { - col.export_card_csv( - &input.out_path, - SearchNode::from(input.limit.unwrap_or_default()), - input.with_html, - ) - }) - .map(Into::into) - } - - fn import_json_file( - &self, - input: generic::String, - ) -> Result { - self.with_col(|col| col.import_json_file(&input.val)) - .map(Into::into) - } - - fn import_json_string( - &self, - input: generic::String, - ) -> Result { - self.with_col(|col| col.import_json_string(&input.val)) - .map(Into::into) - } -} - -impl From> for anki_proto::import_export::ImportResponse { - fn from(output: OpOutput) -> Self { - Self { - changes: Some(output.changes.into()), - log: Some(output.output), - } - } -} - -impl From for SearchNode { - fn from(export_limit: ExportLimit) -> Self { - use export_limit::Limit; - let limit = export_limit - .limit - .unwrap_or(Limit::WholeCollection(generic::Empty {})); - match limit { - Limit::WholeCollection(_) => Self::WholeCollection, - Limit::DeckId(did) => Self::from_deck_id(did, true), - Limit::NoteIds(nids) => Self::from_note_ids(nids.note_ids), - Limit::CardIds(cids) => Self::from_card_ids(cids.cids), - } - } } diff --git a/rslib/src/backend/links.rs b/rslib/src/backend/links.rs deleted file mode 100644 index d19f9a752..000000000 --- a/rslib/src/backend/links.rs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use anki_proto::links::help_page_link_request::HelpPage; -pub(super) use anki_proto::links::links_service::Service as LinksService; - -use super::Backend; -use crate::links::help_page_to_link; -use crate::prelude::*; - -impl LinksService for Backend { - type Error = AnkiError; - - fn help_page_link( - &self, - input: anki_proto::links::HelpPageLinkRequest, - ) -> Result { - Ok(help_page_to_link(HelpPage::from_i32(input.page).unwrap_or(HelpPage::Index)).into()) - } -} diff --git a/rslib/src/backend/media.rs b/rslib/src/backend/media.rs deleted file mode 100644 index 49362aff8..000000000 --- a/rslib/src/backend/media.rs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use anki_proto::generic; -pub(super) use anki_proto::media::media_service::Service as MediaService; - -use super::notes::to_i64s; -use super::Backend; -use crate::prelude::*; - -impl MediaService for Backend { - type Error = AnkiError; - - fn check_media(&self, _input: generic::Empty) -> Result { - self.with_col(|col| { - col.transact_no_undo(|col| { - let mut checker = col.media_checker()?; - let mut output = checker.check()?; - - let mut report = checker.summarize_output(&mut output); - col.report_media_field_referencing_templates(&mut report)?; - - Ok(anki_proto::media::CheckMediaResponse { - unused: output.unused, - missing: output.missing, - missing_media_notes: to_i64s(output.missing_media_notes), - report, - have_trash: output.trash_count > 0, - }) - }) - }) - } - - fn trash_media_files( - &self, - input: anki_proto::media::TrashMediaFilesRequest, - ) -> Result { - self.with_col(|col| col.media()?.remove_files(&input.fnames)) - .map(Into::into) - } - - fn add_media_file( - &self, - input: anki_proto::media::AddMediaFileRequest, - ) -> Result { - self.with_col(|col| { - Ok(col - .media()? - .add_file(&input.desired_name, &input.data)? - .to_string() - .into()) - }) - } - - fn empty_trash(&self, _input: generic::Empty) -> Result { - self.with_col(|col| col.media_checker()?.empty_trash()) - .map(Into::into) - } - - fn restore_trash(&self, _input: generic::Empty) -> Result { - self.with_col(|col| col.media_checker()?.restore_trash()) - .map(Into::into) - } -} diff --git a/rslib/src/backend/mod.rs b/rslib/src/backend/mod.rs index c7b790f9d..f47672550 100644 --- a/rslib/src/backend/mod.rs +++ b/rslib/src/backend/mod.rs @@ -1,65 +1,30 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -// infallible backend methods still return a result -#![allow(clippy::unnecessary_wraps)] - mod adding; mod ankidroid; -mod card; -mod cardrendering; +mod card_rendering; mod collection; mod config; -mod dbproxy; -mod deckconfig; -mod decks; +pub(crate) mod dbproxy; mod error; mod i18n; -mod image_occlusion; mod import_export; -mod links; -mod media; -mod notes; -mod notetypes; mod ops; -mod scheduler; -mod search; -mod stats; mod sync; -mod tags; use std::result; use std::sync::Arc; use std::sync::Mutex; use std::thread::JoinHandle; -use anki_proto::ServiceIndex; use once_cell::sync::OnceCell; use prost::Message; use tokio::runtime; use tokio::runtime::Runtime; -use self::ankidroid::AnkidroidService; -use self::card::CardsService; -use self::cardrendering::CardRenderingService; -use self::collection::CollectionService; -use self::config::ConfigService; -use self::deckconfig::DeckConfigService; -use self::decks::DecksService; -use self::i18n::I18nService; -use self::image_occlusion::ImageOcclusionService; -use self::import_export::ImportExportService; -use self::links::LinksService; -use self::media::MediaService; -use self::notes::NotesService; -use self::notetypes::NotetypesService; -use self::scheduler::SchedulerService; -use self::search::SearchService; -use self::stats::StatsService; -use self::sync::SyncService; -use self::sync::SyncState; -use self::tags::TagsService; use crate::backend::dbproxy::db_command_bytes; +use crate::backend::sync::SyncState; use crate::prelude::*; use crate::progress::AbortHandleSlot; use crate::progress::Progress; @@ -68,7 +33,7 @@ use crate::progress::ThrottlingProgressHandler; pub struct Backend { col: Arc>>, - tr: I18n, + pub(crate) tr: I18n, server: bool, sync_abort: AbortHandleSlot, progress_state: Arc>, @@ -115,47 +80,6 @@ impl Backend { &self.tr } - pub fn run_method( - &self, - service: u32, - method: u32, - input: &[u8], - ) -> result::Result, Vec> { - ServiceIndex::try_from(service) - .or_invalid("invalid service") - .and_then(|service| match service { - ServiceIndex::Ankidroid => AnkidroidService::run_method(self, method, input), - ServiceIndex::Scheduler => SchedulerService::run_method(self, method, input), - ServiceIndex::Decks => DecksService::run_method(self, method, input), - ServiceIndex::Notes => NotesService::run_method(self, method, input), - ServiceIndex::Notetypes => NotetypesService::run_method(self, method, input), - ServiceIndex::Config => ConfigService::run_method(self, method, input), - ServiceIndex::Sync => SyncService::run_method(self, method, input), - ServiceIndex::Tags => TagsService::run_method(self, method, input), - ServiceIndex::DeckConfig => DeckConfigService::run_method(self, method, input), - ServiceIndex::CardRendering => { - CardRenderingService::run_method(self, method, input) - } - ServiceIndex::Media => MediaService::run_method(self, method, input), - ServiceIndex::Stats => StatsService::run_method(self, method, input), - ServiceIndex::Search => SearchService::run_method(self, method, input), - ServiceIndex::I18n => I18nService::run_method(self, method, input), - ServiceIndex::Links => LinksService::run_method(self, method, input), - ServiceIndex::Collection => CollectionService::run_method(self, method, input), - ServiceIndex::Cards => CardsService::run_method(self, method, input), - ServiceIndex::ImportExport => ImportExportService::run_method(self, method, input), - ServiceIndex::ImageOcclusion => { - ImageOcclusionService::run_method(self, method, input) - } - }) - .map_err(|err| { - let backend_err = err.into_protobuf(&self.tr); - let mut bytes = Vec::new(); - backend_err.encode(&mut bytes).unwrap(); - bytes - }) - } - pub fn run_db_command_bytes(&self, input: &[u8]) -> result::Result, Vec> { self.db_command(input).map_err(|err| { let backend_err = err.into_protobuf(&self.tr); @@ -168,7 +92,7 @@ impl Backend { /// If collection is open, run the provided closure while holding /// the mutex. /// If collection is not open, return an error. - fn with_col(&self, func: F) -> Result + pub(crate) fn with_col(&self, func: F) -> Result where F: FnOnce(&mut Collection) -> Result, { diff --git a/rslib/src/backend/notes.rs b/rslib/src/backend/notes.rs deleted file mode 100644 index 57f3d9d80..000000000 --- a/rslib/src/backend/notes.rs +++ /dev/null @@ -1,201 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use std::collections::HashSet; - -pub(super) use anki_proto::notes::notes_service::Service as NotesService; - -use super::Backend; -use crate::cloze::add_cloze_numbers_in_string; -use crate::prelude::*; - -impl NotesService for Backend { - type Error = AnkiError; - - fn new_note( - &self, - input: anki_proto::notetypes::NotetypeId, - ) -> Result { - let ntid = input.into(); - self.with_col(|col| { - let nt = col.get_notetype(ntid)?.or_not_found(ntid)?; - Ok(nt.new_note().into()) - }) - } - - fn add_note( - &self, - input: anki_proto::notes::AddNoteRequest, - ) -> Result { - self.with_col(|col| { - let mut note: Note = input.note.or_invalid("no note provided")?.into(); - let changes = col.add_note(&mut note, DeckId(input.deck_id))?; - Ok(anki_proto::notes::AddNoteResponse { - note_id: note.id.0, - changes: Some(changes.into()), - }) - }) - } - - fn defaults_for_adding( - &self, - input: anki_proto::notes::DefaultsForAddingRequest, - ) -> Result { - self.with_col(|col| { - let home_deck: DeckId = input.home_deck_of_current_review_card.into(); - col.defaults_for_adding(home_deck).map(Into::into) - }) - } - - fn default_deck_for_notetype( - &self, - input: anki_proto::notetypes::NotetypeId, - ) -> Result { - self.with_col(|col| { - Ok(col - .default_deck_for_notetype(input.into())? - .unwrap_or(DeckId(0)) - .into()) - }) - } - - fn update_notes( - &self, - input: anki_proto::notes::UpdateNotesRequest, - ) -> Result { - self.with_col(|col| { - let notes = input - .notes - .into_iter() - .map(Into::into) - .collect::>(); - col.update_notes_maybe_undoable(notes, !input.skip_undo_entry) - }) - .map(Into::into) - } - - fn get_note(&self, input: anki_proto::notes::NoteId) -> Result { - let nid = input.into(); - self.with_col(|col| col.storage.get_note(nid)?.or_not_found(nid).map(Into::into)) - } - - fn remove_notes( - &self, - input: anki_proto::notes::RemoveNotesRequest, - ) -> Result { - self.with_col(|col| { - if !input.note_ids.is_empty() { - col.remove_notes( - &input - .note_ids - .into_iter() - .map(Into::into) - .collect::>(), - ) - } else { - let nids = col.storage.note_ids_of_cards( - &input - .card_ids - .into_iter() - .map(Into::into) - .collect::>(), - )?; - col.remove_notes(&nids.into_iter().collect::>()) - } - .map(Into::into) - }) - } - - fn cloze_numbers_in_note( - &self, - note: anki_proto::notes::Note, - ) -> Result { - let mut set = HashSet::with_capacity(4); - for field in ¬e.fields { - add_cloze_numbers_in_string(field, &mut set); - } - Ok(anki_proto::notes::ClozeNumbersInNoteResponse { - numbers: set.into_iter().map(|n| n as u32).collect(), - }) - } - - fn after_note_updates( - &self, - input: anki_proto::notes::AfterNoteUpdatesRequest, - ) -> Result { - self.with_col(|col| { - col.after_note_updates( - &to_note_ids(input.nids), - input.generate_cards, - input.mark_notes_modified, - ) - .map(Into::into) - }) - } - - fn field_names_for_notes( - &self, - input: anki_proto::notes::FieldNamesForNotesRequest, - ) -> Result { - self.with_col(|col| { - let nids: Vec<_> = input.nids.into_iter().map(NoteId).collect(); - col.storage - .field_names_for_notes(&nids) - .map(|fields| anki_proto::notes::FieldNamesForNotesResponse { fields }) - }) - } - - fn note_fields_check( - &self, - input: anki_proto::notes::Note, - ) -> Result { - let note: Note = input.into(); - self.with_col(|col| { - col.note_fields_check(¬e) - .map(|r| anki_proto::notes::NoteFieldsCheckResponse { state: r as i32 }) - }) - } - - fn cards_of_note( - &self, - input: anki_proto::notes::NoteId, - ) -> Result { - self.with_col(|col| { - col.storage - .all_card_ids_of_note_in_template_order(NoteId(input.nid)) - .map(|v| anki_proto::cards::CardIds { - cids: v.into_iter().map(Into::into).collect(), - }) - }) - } - - fn get_single_notetype_of_notes( - &self, - input: anki_proto::notes::NoteIds, - ) -> Result { - self.with_col(|col| { - col.get_single_notetype_of_notes(&input.note_ids.into_newtype(NoteId)) - .map(Into::into) - }) - } -} - -pub(super) fn to_note_ids(ids: Vec) -> Vec { - ids.into_iter().map(NoteId).collect() -} - -pub(super) fn to_i64s(ids: Vec) -> Vec { - ids.into_iter().map(Into::into).collect() -} - -impl From for NoteId { - fn from(nid: anki_proto::notes::NoteId) -> Self { - NoteId(nid.nid) - } -} - -impl From for anki_proto::notes::NoteId { - fn from(nid: NoteId) -> Self { - anki_proto::notes::NoteId { nid: nid.0 } - } -} diff --git a/rslib/src/backend/scheduler/mod.rs b/rslib/src/backend/scheduler/mod.rs deleted file mode 100644 index 2a3d8afc6..000000000 --- a/rslib/src/backend/scheduler/mod.rs +++ /dev/null @@ -1,284 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -mod answering; -mod states; - -use anki_proto::generic; -use anki_proto::generic::Empty; -use anki_proto::scheduler; -pub(super) use anki_proto::scheduler::scheduler_service::Service as SchedulerService; -use anki_proto::scheduler::SchedulingStatesWithContext; -use anki_proto::scheduler::SetSchedulingStatesRequest; - -use super::Backend; -use crate::prelude::*; -use crate::scheduler::new::NewCardDueOrder; -use crate::scheduler::states::CardState; -use crate::scheduler::states::SchedulingStates; -use crate::stats::studied_today; - -impl SchedulerService for Backend { - type Error = AnkiError; - - /// This behaves like _updateCutoff() in older code - it also unburies at - /// the start of a new day. - fn sched_timing_today( - &self, - _input: generic::Empty, - ) -> Result { - self.with_col(|col| { - let timing = col.timing_today()?; - col.unbury_if_day_rolled_over(timing)?; - Ok(timing.into()) - }) - } - - /// Fetch data from DB and return rendered string. - fn studied_today(&self, _input: generic::Empty) -> Result { - self.with_col(|col| col.studied_today().map(Into::into)) - } - - /// Message rendering only, for old graphs. - fn studied_today_message( - &self, - input: scheduler::StudiedTodayMessageRequest, - ) -> Result { - Ok(studied_today(input.cards, input.seconds as f32, &self.tr).into()) - } - - fn update_stats(&self, input: scheduler::UpdateStatsRequest) -> Result { - self.with_col(|col| { - col.transact_no_undo(|col| { - let today = col.current_due_day(0)?; - let usn = col.usn()?; - col.update_deck_stats(today, usn, input).map(Into::into) - }) - }) - } - - fn extend_limits(&self, input: scheduler::ExtendLimitsRequest) -> Result { - self.with_col(|col| { - col.transact_no_undo(|col| { - let today = col.current_due_day(0)?; - let usn = col.usn()?; - col.extend_limits( - today, - usn, - input.deck_id.into(), - input.new_delta, - input.review_delta, - ) - .map(Into::into) - }) - }) - } - - fn counts_for_deck_today( - &self, - input: anki_proto::decks::DeckId, - ) -> Result { - self.with_col(|col| col.counts_for_deck_today(input.did.into())) - } - - fn congrats_info(&self, _input: generic::Empty) -> Result { - self.with_col(|col| col.congrats_info()) - } - - fn restore_buried_and_suspended_cards( - &self, - input: anki_proto::cards::CardIds, - ) -> Result { - let cids: Vec<_> = input.cids.into_iter().map(CardId).collect(); - self.with_col(|col| col.unbury_or_unsuspend_cards(&cids).map(Into::into)) - } - - fn unbury_deck( - &self, - input: scheduler::UnburyDeckRequest, - ) -> Result { - self.with_col(|col| { - col.unbury_deck(input.deck_id.into(), input.mode()) - .map(Into::into) - }) - } - - fn bury_or_suspend_cards( - &self, - input: scheduler::BuryOrSuspendCardsRequest, - ) -> Result { - self.with_col(|col| { - let mode = input.mode(); - let cids = if input.card_ids.is_empty() { - col.storage - .card_ids_of_notes(&input.note_ids.into_newtype(NoteId))? - } else { - input.card_ids.into_newtype(CardId) - }; - col.bury_or_suspend_cards(&cids, mode).map(Into::into) - }) - } - - fn empty_filtered_deck( - &self, - input: anki_proto::decks::DeckId, - ) -> Result { - self.with_col(|col| col.empty_filtered_deck(input.did.into()).map(Into::into)) - } - - fn rebuild_filtered_deck( - &self, - input: anki_proto::decks::DeckId, - ) -> Result { - self.with_col(|col| col.rebuild_filtered_deck(input.did.into()).map(Into::into)) - } - - fn schedule_cards_as_new( - &self, - input: scheduler::ScheduleCardsAsNewRequest, - ) -> Result { - self.with_col(|col| { - let cids = input.card_ids.into_newtype(CardId); - col.reschedule_cards_as_new( - &cids, - input.log, - input.restore_position, - input.reset_counts, - input - .context - .and_then(scheduler::schedule_cards_as_new_request::Context::from_i32), - ) - .map(Into::into) - }) - } - - fn schedule_cards_as_new_defaults( - &self, - input: scheduler::ScheduleCardsAsNewDefaultsRequest, - ) -> Result { - self.with_col(|col| Ok(col.reschedule_cards_as_new_defaults(input.context()))) - } - - fn set_due_date( - &self, - input: scheduler::SetDueDateRequest, - ) -> Result { - let config = input.config_key.map(|v| v.key().into()); - let days = input.days; - let cids = input.card_ids.into_newtype(CardId); - self.with_col(|col| col.set_due_date(&cids, &days, config).map(Into::into)) - } - - fn sort_cards( - &self, - input: scheduler::SortCardsRequest, - ) -> Result { - let cids = input.card_ids.into_newtype(CardId); - let (start, step, random, shift) = ( - input.starting_from, - input.step_size, - input.randomize, - input.shift_existing, - ); - let order = if random { - NewCardDueOrder::Random - } else { - NewCardDueOrder::Preserve - }; - self.with_col(|col| { - col.sort_cards(&cids, start, step, order, shift) - .map(Into::into) - }) - } - - fn reposition_defaults( - &self, - _input: generic::Empty, - ) -> Result { - self.with_col(|col| Ok(col.reposition_defaults())) - } - - fn sort_deck( - &self, - input: scheduler::SortDeckRequest, - ) -> Result { - self.with_col(|col| { - col.sort_deck_legacy(input.deck_id.into(), input.randomize) - .map(Into::into) - }) - } - - fn get_scheduling_states( - &self, - input: anki_proto::cards::CardId, - ) -> Result { - let cid: CardId = input.into(); - self.with_col(|col| col.get_scheduling_states(cid)) - .map(Into::into) - } - - fn describe_next_states( - &self, - input: scheduler::SchedulingStates, - ) -> Result { - let states: SchedulingStates = input.into(); - self.with_col(|col| col.describe_next_states(states)) - .map(Into::into) - } - - fn state_is_leech(&self, input: scheduler::SchedulingState) -> Result { - let state: CardState = input.into(); - Ok(state.leeched().into()) - } - - fn answer_card( - &self, - input: scheduler::CardAnswer, - ) -> Result { - self.with_col(|col| col.answer_card(&mut input.into())) - .map(Into::into) - } - - fn upgrade_scheduler(&self, _input: generic::Empty) -> Result { - self.with_col(|col| col.transact_no_undo(|col| col.upgrade_to_v2_scheduler())) - .map(Into::into) - } - - fn get_queued_cards( - &self, - input: scheduler::GetQueuedCardsRequest, - ) -> Result { - self.with_col(|col| { - col.get_queued_cards(input.fetch_limit as usize, input.intraday_learning_only) - .map(Into::into) - }) - } - - fn custom_study( - &self, - input: scheduler::CustomStudyRequest, - ) -> Result { - self.with_col(|col| col.custom_study(input)).map(Into::into) - } - - fn custom_study_defaults( - &self, - input: scheduler::CustomStudyDefaultsRequest, - ) -> Result { - self.with_col(|col| col.custom_study_defaults(input.deck_id.into())) - } - - fn get_scheduling_states_with_context( - &self, - _input: Empty, - ) -> std::result::Result { - invalid_input!("the frontend should implement this") - } - - fn set_scheduling_states( - &self, - _input: SetSchedulingStatesRequest, - ) -> std::result::Result { - invalid_input!("the frontend should implement this") - } -} diff --git a/rslib/src/backend/sync/mod.rs b/rslib/src/backend/sync.rs similarity index 96% rename from rslib/src/backend/sync/mod.rs rename to rslib/src/backend/sync.rs index 6ffd7b81a..3e781a855 100644 --- a/rslib/src/backend/sync/mod.rs +++ b/rslib/src/backend/sync.rs @@ -3,8 +3,6 @@ use std::sync::Arc; -use anki_proto::generic; -pub(super) use anki_proto::sync::sync_service::Service as SyncService; use anki_proto::sync::sync_status_response::Required; use anki_proto::sync::SyncStatusResponse; use futures::future::AbortHandle; @@ -99,27 +97,25 @@ impl TryFrom for SyncAuth { } } -impl SyncService for Backend { - type Error = AnkiError; - - fn sync_media(&self, input: anki_proto::sync::SyncAuth) -> Result { +impl crate::services::BackendSyncService for Backend { + fn sync_media(&self, input: anki_proto::sync::SyncAuth) -> Result<()> { self.sync_media_inner(input).map(Into::into) } - fn abort_sync(&self, _input: generic::Empty) -> Result { + fn abort_sync(&self) -> Result<()> { if let Some(handle) = self.sync_abort.lock().unwrap().take() { handle.abort(); } - Ok(().into()) + Ok(()) } /// Abort the media sync. Does not wait for completion. - fn abort_media_sync(&self, _input: generic::Empty) -> Result { + fn abort_media_sync(&self) -> Result<()> { let guard = self.state.lock().unwrap(); if let Some(handle) = &guard.sync.media_sync_abort { handle.abort(); } - Ok(().into()) + Ok(()) } fn sync_login( @@ -143,14 +139,14 @@ impl SyncService for Backend { self.sync_collection_inner(input) } - fn full_upload(&self, input: anki_proto::sync::SyncAuth) -> Result { + fn full_upload(&self, input: anki_proto::sync::SyncAuth) -> Result<()> { self.full_sync_inner(input, true)?; - Ok(().into()) + Ok(()) } - fn full_download(&self, input: anki_proto::sync::SyncAuth) -> Result { + fn full_download(&self, input: anki_proto::sync::SyncAuth) -> Result<()> { self.full_sync_inner(input, false)?; - Ok(().into()) + Ok(()) } } diff --git a/rslib/src/backend/tags.rs b/rslib/src/backend/tags.rs deleted file mode 100644 index d3db5d144..000000000 --- a/rslib/src/backend/tags.rs +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use anki_proto::generic; -pub(super) use anki_proto::tags::tags_service::Service as TagsService; - -use super::notes::to_note_ids; -use super::Backend; -use crate::prelude::*; - -impl TagsService for Backend { - type Error = AnkiError; - - fn clear_unused_tags( - &self, - _input: generic::Empty, - ) -> Result { - self.with_col(|col| col.clear_unused_tags().map(Into::into)) - } - - fn all_tags(&self, _input: generic::Empty) -> Result { - Ok(generic::StringList { - vals: self.with_col(|col| { - Ok(col - .storage - .all_tags()? - .into_iter() - .map(|t| t.name) - .collect()) - })?, - }) - } - - fn remove_tags( - &self, - tags: generic::String, - ) -> Result { - self.with_col(|col| col.remove_tags(tags.val.as_str()).map(Into::into)) - } - - fn set_tag_collapsed( - &self, - input: anki_proto::tags::SetTagCollapsedRequest, - ) -> Result { - self.with_col(|col| { - col.set_tag_collapsed(&input.name, input.collapsed) - .map(Into::into) - }) - } - - fn tag_tree(&self, _input: generic::Empty) -> Result { - self.with_col(|col| col.tag_tree()) - } - - fn reparent_tags( - &self, - input: anki_proto::tags::ReparentTagsRequest, - ) -> Result { - let source_tags = input.tags; - let target_tag = if input.new_parent.is_empty() { - None - } else { - Some(input.new_parent) - }; - self.with_col(|col| col.reparent_tags(&source_tags, target_tag)) - .map(Into::into) - } - - fn rename_tags( - &self, - input: anki_proto::tags::RenameTagsRequest, - ) -> Result { - self.with_col(|col| col.rename_tag(&input.current_prefix, &input.new_prefix)) - .map(Into::into) - } - - fn add_note_tags( - &self, - input: anki_proto::tags::NoteIdsAndTagsRequest, - ) -> Result { - self.with_col(|col| { - col.add_tags_to_notes(&to_note_ids(input.note_ids), &input.tags) - .map(Into::into) - }) - } - - fn remove_note_tags( - &self, - input: anki_proto::tags::NoteIdsAndTagsRequest, - ) -> Result { - self.with_col(|col| { - col.remove_tags_from_notes(&to_note_ids(input.note_ids), &input.tags) - .map(Into::into) - }) - } - - fn find_and_replace_tag( - &self, - input: anki_proto::tags::FindAndReplaceTagRequest, - ) -> Result { - self.with_col(|col| { - let note_ids = if input.note_ids.is_empty() { - col.search_notes_unordered("")? - } else { - to_note_ids(input.note_ids) - }; - col.find_and_replace_tag( - ¬e_ids, - &input.search, - &input.replacement, - input.regex, - input.match_case, - ) - .map(Into::into) - }) - } - - fn complete_tag( - &self, - input: anki_proto::tags::CompleteTagRequest, - ) -> Result { - self.with_col(|col| { - let tags = col.complete_tag(&input.input, input.match_limit as usize)?; - Ok(anki_proto::tags::CompleteTagResponse { tags }) - }) - } -} diff --git a/rslib/src/card/mod.rs b/rslib/src/card/mod.rs index 0d739be81..65898d638 100644 --- a/rslib/src/card/mod.rs +++ b/rslib/src/card/mod.rs @@ -1,6 +1,7 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +mod service; pub(crate) mod undo; use std::collections::hash_map::Entry; diff --git a/rslib/src/backend/card.rs b/rslib/src/card/service.rs similarity index 58% rename from rslib/src/backend/card.rs rename to rslib/src/card/service.rs index 84e849418..16a65112a 100644 --- a/rslib/src/backend/card.rs +++ b/rslib/src/card/service.rs @@ -1,84 +1,83 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -pub(super) use anki_proto::cards::cards_service::Service as CardsService; -use anki_proto::generic; - -use super::Backend; +use crate::card::Card; +use crate::card::CardId; use crate::card::CardQueue; use crate::card::CardType; -use crate::prelude::*; +use crate::collection::Collection; +use crate::decks::DeckId; +use crate::error; +use crate::error::AnkiError; +use crate::error::OrInvalid; +use crate::error::OrNotFound; +use crate::notes::NoteId; +use crate::prelude::TimestampSecs; +use crate::prelude::Usn; -impl CardsService for Backend { - type Error = AnkiError; - - fn get_card(&self, input: anki_proto::cards::CardId) -> Result { +impl crate::services::CardsService for Collection { + fn get_card( + &mut self, + input: anki_proto::cards::CardId, + ) -> error::Result { let cid = input.into(); - self.with_col(|col| { - col.storage - .get_card(cid) - .and_then(|opt| opt.or_not_found(cid)) - .map(Into::into) - }) + + self.storage + .get_card(cid) + .and_then(|opt| opt.or_not_found(cid)) + .map(Into::into) } fn update_cards( - &self, + &mut self, input: anki_proto::cards::UpdateCardsRequest, - ) -> Result { - self.with_col(|col| { - let cards = input - .cards - .into_iter() - .map(TryInto::try_into) - .collect::, AnkiError>>()?; - for card in &cards { - card.validate_custom_data()?; - } - col.update_cards_maybe_undoable(cards, !input.skip_undo_entry) - }) - .map(Into::into) + ) -> error::Result { + let cards = input + .cards + .into_iter() + .map(TryInto::try_into) + .collect::, AnkiError>>()?; + for card in &cards { + card.validate_custom_data()?; + } + self.update_cards_maybe_undoable(cards, !input.skip_undo_entry) + .map(Into::into) } - fn remove_cards(&self, input: anki_proto::cards::RemoveCardsRequest) -> Result { - self.with_col(|col| { - col.transact_no_undo(|col| { - col.remove_cards_and_orphaned_notes( - &input - .card_ids - .into_iter() - .map(Into::into) - .collect::>(), - )?; - Ok(().into()) - }) + fn remove_cards(&mut self, input: anki_proto::cards::RemoveCardsRequest) -> error::Result<()> { + self.transact_no_undo(|col| { + col.remove_cards_and_orphaned_notes( + &input + .card_ids + .into_iter() + .map(Into::into) + .collect::>(), + )?; + Ok(()) }) } fn set_deck( - &self, + &mut self, input: anki_proto::cards::SetDeckRequest, - ) -> Result { + ) -> error::Result { let cids: Vec<_> = input.card_ids.into_iter().map(CardId).collect(); let deck_id = input.deck_id.into(); - self.with_col(|col| col.set_deck(&cids, deck_id).map(Into::into)) + self.set_deck(&cids, deck_id).map(Into::into) } fn set_flag( - &self, + &mut self, input: anki_proto::cards::SetFlagRequest, - ) -> Result { - self.with_col(|col| { - col.set_card_flag(&to_card_ids(input.card_ids), input.flag) - .map(Into::into) - }) + ) -> error::Result { + self.set_card_flag(&to_card_ids(input.card_ids), input.flag) + .map(Into::into) } } impl TryFrom for Card { type Error = AnkiError; - fn try_from(c: anki_proto::cards::Card) -> Result { + fn try_from(c: anki_proto::cards::Card) -> error::Result { let ctype = CardType::try_from(c.ctype as u8).or_invalid("invalid card type")?; let queue = CardQueue::try_from(c.queue as i8).or_invalid("invalid card queue")?; Ok(Card { diff --git a/rslib/src/card_rendering/mod.rs b/rslib/src/card_rendering/mod.rs index 350f76afa..9194503a1 100644 --- a/rslib/src/card_rendering/mod.rs +++ b/rslib/src/card_rendering/mod.rs @@ -6,6 +6,7 @@ use std::collections::HashMap; use crate::prelude::*; mod parser; +pub(crate) mod service; pub mod tts; mod writer; diff --git a/rslib/src/backend/cardrendering.rs b/rslib/src/card_rendering/service.rs similarity index 71% rename from rslib/src/backend/cardrendering.rs rename to rslib/src/card_rendering/service.rs index d249a3081..fe4bfe7ed 100644 --- a/rslib/src/backend/cardrendering.rs +++ b/rslib/src/card_rendering/service.rs @@ -1,22 +1,23 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -pub(super) use anki_proto::card_rendering::cardrendering_service::Service as CardRenderingService; use anki_proto::card_rendering::ExtractClozeForTypingRequest; use anki_proto::generic; -use super::Backend; +use crate::card::CardId; use crate::card_rendering::extract_av_tags; use crate::card_rendering::strip_av_tags; use crate::card_rendering::tts; use crate::cloze::extract_cloze_for_typing; +use crate::collection::Collection; +use crate::error::OrInvalid; +use crate::error::Result; use crate::latex::extract_latex; use crate::latex::extract_latex_expanding_clozes; use crate::latex::ExtractedLatex; use crate::markdown::render_markdown; use crate::notetype::CardTemplateSchema11; use crate::notetype::RenderCardOutput; -use crate::prelude::*; use crate::template::RenderedNode; use crate::text::decode_iri_paths; use crate::text::encode_iri_paths; @@ -25,14 +26,15 @@ use crate::text::strip_html; use crate::text::strip_html_preserving_media_filenames; use crate::typeanswer::compare_answer; -impl CardRenderingService for Backend { - type Error = AnkiError; - +/// While the majority of these methods do not actually require a collection, +/// they are unlikely to be executed without one, so we only bother implementing +/// them for the collection. +impl crate::services::CardRenderingService for Collection { fn extract_av_tags( - &self, + &mut self, input: anki_proto::card_rendering::ExtractAvTagsRequest, ) -> Result { - let out = extract_av_tags(input.text, input.question_side, self.i18n()); + let out = extract_av_tags(input.text, input.question_side, &self.tr); Ok(anki_proto::card_rendering::ExtractAvTagsResponse { text: out.0, av_tags: out.1, @@ -40,7 +42,7 @@ impl CardRenderingService for Backend { } fn extract_latex( - &self, + &mut self, input: anki_proto::card_rendering::ExtractLatexRequest, ) -> Result { let func = if input.expand_clozes { @@ -64,57 +66,49 @@ impl CardRenderingService for Backend { }) } - fn get_empty_cards( - &self, - _input: generic::Empty, - ) -> Result { - self.with_col(|col| { - let mut empty = col.empty_cards()?; - let report = col.empty_cards_report(&mut empty)?; + fn get_empty_cards(&mut self) -> Result { + let mut empty = self.empty_cards()?; + let report = self.empty_cards_report(&mut empty)?; - let mut outnotes = vec![]; - for (_ntid, notes) in empty { - outnotes.extend(notes.into_iter().map(|e| { - anki_proto::card_rendering::empty_cards_report::NoteWithEmptyCards { - note_id: e.nid.0, - will_delete_note: e.empty.len() == e.current_count, - card_ids: e.empty.into_iter().map(|(_ord, id)| id.0).collect(), - } - })) - } - Ok(anki_proto::card_rendering::EmptyCardsReport { - report, - notes: outnotes, - }) + let mut outnotes = vec![]; + for (_ntid, notes) in empty { + outnotes.extend(notes.into_iter().map(|e| { + anki_proto::card_rendering::empty_cards_report::NoteWithEmptyCards { + note_id: e.nid.0, + will_delete_note: e.empty.len() == e.current_count, + card_ids: e.empty.into_iter().map(|(_ord, id)| id.0).collect(), + } + })) + } + Ok(anki_proto::card_rendering::EmptyCardsReport { + report, + notes: outnotes, }) } fn render_existing_card( - &self, + &mut self, input: anki_proto::card_rendering::RenderExistingCardRequest, ) -> Result { - self.with_col(|col| { - col.render_existing_card(CardId(input.card_id), input.browser) - .map(Into::into) - }) + self.render_existing_card(CardId(input.card_id), input.browser) + .map(Into::into) } fn render_uncommitted_card( - &self, + &mut self, input: anki_proto::card_rendering::RenderUncommittedCardRequest, ) -> Result { let template = input.template.or_invalid("missing template")?.into(); let mut note = input.note.or_invalid("missing note")?.into(); let ord = input.card_ord as u16; let fill_empty = input.fill_empty; - self.with_col(|col| { - col.render_uncommitted_card(&mut note, &template, ord, fill_empty) - .map(Into::into) - }) + + self.render_uncommitted_card(&mut note, &template, ord, fill_empty) + .map(Into::into) } fn render_uncommitted_card_legacy( - &self, + &mut self, input: anki_proto::card_rendering::RenderUncommittedCardLegacyRequest, ) -> Result { let schema11: CardTemplateSchema11 = serde_json::from_slice(&input.template)?; @@ -122,18 +116,17 @@ impl CardRenderingService for Backend { let mut note = input.note.or_invalid("missing note")?.into(); let ord = input.card_ord as u16; let fill_empty = input.fill_empty; - self.with_col(|col| { - col.render_uncommitted_card(&mut note, &template, ord, fill_empty) - .map(Into::into) - }) + + self.render_uncommitted_card(&mut note, &template, ord, fill_empty) + .map(Into::into) } - fn strip_av_tags(&self, input: generic::String) -> Result { + fn strip_av_tags(&mut self, input: generic::String) -> Result { Ok(strip_av_tags(input.val).into()) } fn render_markdown( - &self, + &mut self, input: anki_proto::card_rendering::RenderMarkdownRequest, ) -> Result { let mut text = render_markdown(&input.markdown); @@ -144,37 +137,30 @@ impl CardRenderingService for Backend { Ok(text.into()) } - fn encode_iri_paths(&self, input: generic::String) -> Result { + fn encode_iri_paths(&mut self, input: generic::String) -> Result { Ok(encode_iri_paths(&input.val).to_string().into()) } - fn decode_iri_paths(&self, input: generic::String) -> Result { + fn decode_iri_paths(&mut self, input: generic::String) -> Result { Ok(decode_iri_paths(&input.val).to_string().into()) } fn strip_html( - &self, + &mut self, input: anki_proto::card_rendering::StripHtmlRequest, ) -> Result { - Ok(match input.mode() { - anki_proto::card_rendering::strip_html_request::Mode::Normal => strip_html(&input.text), - anki_proto::card_rendering::strip_html_request::Mode::PreserveMediaFilenames => { - strip_html_preserving_media_filenames(&input.text) - } - } - .to_string() - .into()) + strip_html_proto(input) } fn compare_answer( - &self, + &mut self, input: anki_proto::card_rendering::CompareAnswerRequest, ) -> Result { Ok(compare_answer(&input.expected, &input.provided).into()) } fn extract_cloze_for_typing( - &self, + &mut self, input: ExtractClozeForTypingRequest, ) -> Result { Ok(extract_cloze_for_typing(&input.text, input.ordinal as u16) @@ -183,7 +169,7 @@ impl CardRenderingService for Backend { } fn all_tts_voices( - &self, + &mut self, input: anki_proto::card_rendering::AllTtsVoicesRequest, ) -> Result { tts::all_voices(input.validate) @@ -191,9 +177,9 @@ impl CardRenderingService for Backend { } fn write_tts_stream( - &self, + &mut self, request: anki_proto::card_rendering::WriteTtsStreamRequest, - ) -> Result { + ) -> Result<()> { tts::write_stream( &request.path, &request.voice_id, @@ -246,3 +232,16 @@ impl From for anki_proto::card_rendering::RenderCardResponse { } } } + +pub(crate) fn strip_html_proto( + input: anki_proto::card_rendering::StripHtmlRequest, +) -> Result { + Ok(match input.mode() { + anki_proto::card_rendering::strip_html_request::Mode::Normal => strip_html(&input.text), + anki_proto::card_rendering::strip_html_request::Mode::PreserveMediaFilenames => { + strip_html_preserving_media_filenames(&input.text) + } + } + .to_string() + .into()) +} diff --git a/rslib/src/deckconfig/mod.rs b/rslib/src/deckconfig/mod.rs index 785f8085b..9b6f45f60 100644 --- a/rslib/src/deckconfig/mod.rs +++ b/rslib/src/deckconfig/mod.rs @@ -2,6 +2,7 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html mod schema11; +mod service; pub(crate) mod undo; mod update; diff --git a/rslib/src/backend/deckconfig.rs b/rslib/src/deckconfig/service.rs similarity index 58% rename from rslib/src/backend/deckconfig.rs rename to rslib/src/deckconfig/service.rs index 4d11b9f93..438098321 100644 --- a/rslib/src/backend/deckconfig.rs +++ b/rslib/src/deckconfig/service.rs @@ -1,92 +1,85 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -pub(super) use anki_proto::deckconfig::deckconfig_service::Service as DeckConfigService; use anki_proto::generic; -use super::Backend; +use crate::collection::Collection; use crate::deckconfig::DeckConfSchema11; use crate::deckconfig::DeckConfig; +use crate::deckconfig::DeckConfigId; use crate::deckconfig::UpdateDeckConfigsRequest; -use crate::prelude::*; - -impl DeckConfigService for Backend { - type Error = AnkiError; +use crate::error; +impl crate::services::DeckConfigService for Collection { fn add_or_update_deck_config_legacy( - &self, + &mut self, input: generic::Json, - ) -> Result { + ) -> error::Result { let conf: DeckConfSchema11 = serde_json::from_slice(&input.json)?; let mut conf: DeckConfig = conf.into(); - self.with_col(|col| { - col.transact_no_undo(|col| { - col.add_or_update_deck_config_legacy(&mut conf)?; - Ok(anki_proto::deckconfig::DeckConfigId { dcid: conf.id.0 }) - }) + + self.transact_no_undo(|col| { + col.add_or_update_deck_config_legacy(&mut conf)?; + Ok(anki_proto::deckconfig::DeckConfigId { dcid: conf.id.0 }) }) .map(Into::into) } - fn all_deck_config_legacy(&self, _input: generic::Empty) -> Result { - self.with_col(|col| { - let conf: Vec = col - .storage - .all_deck_config()? - .into_iter() - .map(Into::into) - .collect(); - serde_json::to_vec(&conf).map_err(Into::into) - }) - .map(Into::into) + fn all_deck_config_legacy(&mut self) -> error::Result { + let conf: Vec = self + .storage + .all_deck_config()? + .into_iter() + .map(Into::into) + .collect(); + serde_json::to_vec(&conf) + .map_err(Into::into) + .map(Into::into) } fn get_deck_config( - &self, + &mut self, input: anki_proto::deckconfig::DeckConfigId, - ) -> Result { - self.with_col(|col| Ok(col.get_deck_config(input.into(), true)?.unwrap().into())) + ) -> error::Result { + Ok(Collection::get_deck_config(self, input.into(), true)? + .unwrap() + .into()) } fn get_deck_config_legacy( - &self, + &mut self, input: anki_proto::deckconfig::DeckConfigId, - ) -> Result { - self.with_col(|col| { - let conf = col.get_deck_config(input.into(), true)?.unwrap(); - let conf: DeckConfSchema11 = conf.into(); - Ok(serde_json::to_vec(&conf)?) - }) - .map(Into::into) + ) -> error::Result { + let conf = Collection::get_deck_config(self, input.into(), true)?.unwrap(); + let conf: DeckConfSchema11 = conf.into(); + Ok(serde_json::to_vec(&conf)?).map(Into::into) } - fn new_deck_config_legacy(&self, _input: generic::Empty) -> Result { + fn new_deck_config_legacy(&mut self) -> error::Result { serde_json::to_vec(&DeckConfSchema11::default()) .map_err(Into::into) .map(Into::into) } fn remove_deck_config( - &self, + &mut self, input: anki_proto::deckconfig::DeckConfigId, - ) -> Result { - self.with_col(|col| col.transact_no_undo(|col| col.remove_deck_config_inner(input.into()))) + ) -> error::Result<()> { + self.transact_no_undo(|col| col.remove_deck_config_inner(input.into())) .map(Into::into) } fn get_deck_configs_for_update( - &self, + &mut self, input: anki_proto::decks::DeckId, - ) -> Result { - self.with_col(|col| col.get_deck_configs_for_update(input.did.into())) + ) -> error::Result { + self.get_deck_configs_for_update(input.did.into()) } fn update_deck_configs( - &self, + &mut self, input: anki_proto::deckconfig::UpdateDeckConfigsRequest, - ) -> Result { - self.with_col(|col| col.update_deck_configs(input.into())) - .map(Into::into) + ) -> error::Result { + self.update_deck_configs(input.into()).map(Into::into) } } diff --git a/rslib/src/decks/mod.rs b/rslib/src/decks/mod.rs index 25713092c..f33f3b879 100644 --- a/rslib/src/decks/mod.rs +++ b/rslib/src/decks/mod.rs @@ -10,6 +10,7 @@ mod name; mod remove; mod reparent; mod schema11; +mod service; mod stats; pub mod tree; pub(crate) mod undo; diff --git a/rslib/src/decks/service.rs b/rslib/src/decks/service.rs new file mode 100644 index 000000000..51265e2d7 --- /dev/null +++ b/rslib/src/decks/service.rs @@ -0,0 +1,317 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::decks::deck::kind_container::Kind as DeckKind; +use anki_proto::generic; + +use crate::collection::Collection; +use crate::decks::filtered::search_order_labels; +use crate::decks::Deck; +use crate::decks::DeckId; +use crate::decks::DeckSchema11; +use crate::decks::NativeDeckName; +use crate::error; +use crate::error::AnkiError; +use crate::error::OrInvalid; +use crate::error::OrNotFound; +use crate::prelude::TimestampSecs; +use crate::prelude::Usn; +use crate::scheduler::filtered::FilteredDeckForUpdate; + +impl crate::services::DecksService for Collection { + fn new_deck(&mut self) -> error::Result { + Ok(Deck::new_normal().into()) + } + + fn add_deck( + &mut self, + deck: anki_proto::decks::Deck, + ) -> error::Result { + let mut deck: Deck = deck.try_into()?; + Ok(self.add_deck(&mut deck)?.map(|_| deck.id.0).into()) + } + + fn add_deck_legacy( + &mut self, + input: generic::Json, + ) -> error::Result { + let schema11: DeckSchema11 = serde_json::from_slice(&input.json)?; + let mut deck: Deck = schema11.into(); + + let output = self.add_deck(&mut deck)?; + Ok(output.map(|_| deck.id.0).into()) + } + + fn add_or_update_deck_legacy( + &mut self, + input: anki_proto::decks::AddOrUpdateDeckLegacyRequest, + ) -> error::Result { + let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?; + let mut deck: Deck = schema11.into(); + if input.preserve_usn_and_mtime { + self.transact_no_undo(|col| { + let usn = col.usn()?; + col.add_or_update_single_deck_with_existing_id(&mut deck, usn) + })?; + } else { + self.add_or_update_deck(&mut deck)?; + } + Ok(anki_proto::decks::DeckId { did: deck.id.0 }) + } + + fn deck_tree( + &mut self, + input: anki_proto::decks::DeckTreeRequest, + ) -> error::Result { + let now = if input.now == 0 { + None + } else { + Some(TimestampSecs(input.now)) + }; + self.deck_tree(now) + } + + fn deck_tree_legacy(&mut self) -> error::Result { + let tree = self.legacy_deck_tree()?; + serde_json::to_vec(&tree) + .map_err(Into::into) + .map(Into::into) + } + + fn get_all_decks_legacy(&mut self) -> error::Result { + let decks = self.storage.get_all_decks_as_schema11()?; + serde_json::to_vec(&decks) + .map_err(Into::into) + .map(Into::into) + } + + fn get_deck_id_by_name( + &mut self, + input: generic::String, + ) -> error::Result { + self.get_deck_id(&input.val).and_then(|d| { + d.or_not_found(input.val) + .map(|d| anki_proto::decks::DeckId { did: d.0 }) + }) + } + + fn get_deck( + &mut self, + input: anki_proto::decks::DeckId, + ) -> error::Result { + let did = input.into(); + Ok(self.storage.get_deck(did)?.or_not_found(did)?.into()) + } + + fn update_deck( + &mut self, + input: anki_proto::decks::Deck, + ) -> error::Result { + let mut deck = Deck::try_from(input)?; + self.update_deck(&mut deck).map(Into::into) + } + + fn update_deck_legacy( + &mut self, + input: generic::Json, + ) -> error::Result { + let deck: DeckSchema11 = serde_json::from_slice(&input.json)?; + let mut deck = deck.into(); + self.update_deck(&mut deck).map(Into::into) + } + + fn get_deck_legacy( + &mut self, + input: anki_proto::decks::DeckId, + ) -> error::Result { + let did = input.into(); + + let deck: DeckSchema11 = self.storage.get_deck(did)?.or_not_found(did)?.into(); + serde_json::to_vec(&deck) + .map_err(Into::into) + .map(Into::into) + } + + fn get_deck_names( + &mut self, + input: anki_proto::decks::GetDeckNamesRequest, + ) -> error::Result { + let names = if input.include_filtered { + self.get_all_deck_names(input.skip_empty_default)? + } else { + self.get_all_normal_deck_names()? + }; + Ok(deck_names_to_proto(names)) + } + + fn get_deck_and_child_names( + &mut self, + input: anki_proto::decks::DeckId, + ) -> error::Result { + Collection::get_deck_and_child_names(self, input.did.into()).map(deck_names_to_proto) + } + + fn new_deck_legacy(&mut self, input: generic::Bool) -> error::Result { + let deck = if input.val { + Deck::new_filtered() + } else { + Deck::new_normal() + }; + let schema11: DeckSchema11 = deck.into(); + serde_json::to_vec(&schema11) + .map_err(Into::into) + .map(Into::into) + } + + fn remove_decks( + &mut self, + input: anki_proto::decks::DeckIds, + ) -> error::Result { + self.remove_decks_and_child_decks(&input.dids.into_iter().map(DeckId).collect::>()) + .map(Into::into) + } + + fn reparent_decks( + &mut self, + input: anki_proto::decks::ReparentDecksRequest, + ) -> error::Result { + let deck_ids: Vec<_> = input.deck_ids.into_iter().map(Into::into).collect(); + let new_parent = if input.new_parent == 0 { + None + } else { + Some(input.new_parent.into()) + }; + self.reparent_decks(&deck_ids, new_parent).map(Into::into) + } + + fn rename_deck( + &mut self, + input: anki_proto::decks::RenameDeckRequest, + ) -> error::Result { + self.rename_deck(input.deck_id.into(), &input.new_name) + .map(Into::into) + } + + fn get_or_create_filtered_deck( + &mut self, + input: anki_proto::decks::DeckId, + ) -> error::Result { + self.get_or_create_filtered_deck(input.into()) + .map(Into::into) + } + + fn add_or_update_filtered_deck( + &mut self, + input: anki_proto::decks::FilteredDeckForUpdate, + ) -> error::Result { + self.add_or_update_filtered_deck(input.into()) + .map(|out| out.map(i64::from)) + .map(Into::into) + } + + fn filtered_deck_order_labels(&mut self) -> error::Result { + Ok(search_order_labels(&self.tr).into()) + } + + fn set_deck_collapsed( + &mut self, + input: anki_proto::decks::SetDeckCollapsedRequest, + ) -> error::Result { + self.set_deck_collapsed(input.deck_id.into(), input.collapsed, input.scope()) + .map(Into::into) + } + + fn set_current_deck( + &mut self, + input: anki_proto::decks::DeckId, + ) -> error::Result { + self.set_current_deck(input.did.into()).map(Into::into) + } + + fn get_current_deck(&mut self) -> error::Result { + self.get_current_deck().map(|deck| (*deck).clone().into()) + } +} + +impl From for DeckId { + fn from(did: anki_proto::decks::DeckId) -> Self { + DeckId(did.did) + } +} + +impl From for anki_proto::decks::DeckId { + fn from(did: DeckId) -> Self { + anki_proto::decks::DeckId { did: did.0 } + } +} + +impl From for anki_proto::decks::FilteredDeckForUpdate { + fn from(deck: FilteredDeckForUpdate) -> Self { + anki_proto::decks::FilteredDeckForUpdate { + id: deck.id.into(), + name: deck.human_name, + config: Some(deck.config), + } + } +} + +impl From for FilteredDeckForUpdate { + fn from(deck: anki_proto::decks::FilteredDeckForUpdate) -> Self { + FilteredDeckForUpdate { + id: deck.id.into(), + human_name: deck.name, + config: deck.config.unwrap_or_default(), + } + } +} + +impl From for anki_proto::decks::Deck { + fn from(d: Deck) -> Self { + anki_proto::decks::Deck { + id: d.id.0, + name: d.name.human_name(), + mtime_secs: d.mtime_secs.0, + usn: d.usn.0, + common: Some(d.common), + kind: Some(kind_from_inline(d.kind)), + } + } +} + +impl TryFrom for Deck { + type Error = AnkiError; + + fn try_from(d: anki_proto::decks::Deck) -> error::Result { + Ok(Deck { + id: DeckId(d.id), + name: NativeDeckName::from_human_name(&d.name), + mtime_secs: TimestampSecs(d.mtime_secs), + usn: Usn(d.usn), + common: d.common.unwrap_or_default(), + kind: kind_to_inline(d.kind.or_invalid("missing kind")?), + }) + } +} + +fn kind_to_inline(kind: anki_proto::decks::deck::Kind) -> DeckKind { + match kind { + anki_proto::decks::deck::Kind::Normal(normal) => DeckKind::Normal(normal), + anki_proto::decks::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered), + } +} + +fn kind_from_inline(k: DeckKind) -> anki_proto::decks::deck::Kind { + match k { + DeckKind::Normal(n) => anki_proto::decks::deck::Kind::Normal(n), + DeckKind::Filtered(f) => anki_proto::decks::deck::Kind::Filtered(f), + } +} + +fn deck_name_to_proto((id, name): (DeckId, String)) -> anki_proto::decks::DeckNameId { + anki_proto::decks::DeckNameId { id: id.0, name } +} + +fn deck_names_to_proto(names: Vec<(DeckId, String)>) -> anki_proto::decks::DeckNames { + anki_proto::decks::DeckNames { + entries: names.into_iter().map(deck_name_to_proto).collect(), + } +} diff --git a/rslib/src/error/mod.rs b/rslib/src/error/mod.rs index 8a880f0a3..eb0eb02db 100644 --- a/rslib/src/error/mod.rs +++ b/rslib/src/error/mod.rs @@ -13,7 +13,6 @@ pub mod windows; use anki_i18n::I18n; use anki_io::FileIoError; use anki_io::FileOp; -use anki_proto::ProtoError; pub use db::DbError; pub use db::DbErrorKind; pub use filtered::CustomStudyError; @@ -112,6 +111,8 @@ pub enum AnkiError { WindowsError { source: windows::WindowsError, }, + InvalidMethodIndex, + InvalidServiceIndex, } // error helpers @@ -157,6 +158,8 @@ impl AnkiError { | AnkiError::CollectionNotOpen | AnkiError::CollectionAlreadyOpen | AnkiError::Existing + | AnkiError::InvalidServiceIndex + | AnkiError::InvalidMethodIndex | AnkiError::UndoEmpty => format!("{:?}", self), AnkiError::FileIoError { source } => source.message(), AnkiError::InvalidInput { source } => source.message(), @@ -299,11 +302,3 @@ pub enum CardTypeErrorDetails { MissingCloze, ExtraneousCloze, } - -impl From for AnkiError { - fn from(value: ProtoError) -> Self { - AnkiError::ProtoError { - info: value.to_string(), - } - } -} diff --git a/rslib/src/i18n/mod.rs b/rslib/src/i18n/mod.rs new file mode 100644 index 000000000..fa79e1463 --- /dev/null +++ b/rslib/src/i18n/mod.rs @@ -0,0 +1,3 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +pub(crate) mod service; diff --git a/rslib/src/i18n/service.rs b/rslib/src/i18n/service.rs new file mode 100644 index 000000000..8aa8e9bb5 --- /dev/null +++ b/rslib/src/i18n/service.rs @@ -0,0 +1,91 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use std::collections::HashMap; + +use anki_i18n::I18n; +use anki_proto::generic; +use anki_proto::generic::Json; +use anki_proto::i18n::format_timespan_request::Context; +use anki_proto::i18n::FormatTimespanRequest; +use anki_proto::i18n::I18nResourcesRequest; +use anki_proto::i18n::TranslateStringRequest; +use fluent_bundle::FluentArgs; +use fluent_bundle::FluentValue; + +use crate::collection::Collection; +use crate::error; +use crate::scheduler::timespan::answer_button_time; +use crate::scheduler::timespan::time_span; + +impl crate::services::I18nService for Collection { + fn translate_string( + &mut self, + input: TranslateStringRequest, + ) -> error::Result { + translate_string(&self.tr, input) + } + + fn format_timespan(&mut self, input: FormatTimespanRequest) -> error::Result { + format_timespan(&self.tr, input) + } + + fn i18n_resources(&mut self, input: I18nResourcesRequest) -> error::Result { + i18n_resources(&self.tr, input) + } +} + +pub(crate) fn translate_string( + tr: &I18n, + input: TranslateStringRequest, +) -> error::Result { + let args = build_fluent_args(input.args); + Ok(tr + .translate_via_index( + input.module_index as usize, + input.message_index as usize, + args, + ) + .into()) +} + +pub(crate) fn format_timespan( + tr: &I18n, + input: FormatTimespanRequest, +) -> error::Result { + Ok(match input.context() { + Context::Precise => time_span(input.seconds, tr, true), + Context::Intervals => time_span(input.seconds, tr, false), + Context::AnswerButtons => answer_button_time(input.seconds, tr), + } + .into()) +} + +pub(crate) fn i18n_resources( + tr: &I18n, + input: I18nResourcesRequest, +) -> error::Result { + serde_json::to_vec(&tr.resources_for_js(&input.modules)) + .map(Into::into) + .map_err(Into::into) +} + +fn build_fluent_args( + input: HashMap, +) -> FluentArgs<'static> { + let mut args = FluentArgs::new(); + for (key, val) in input { + args.set(key, translate_arg_to_fluent_val(&val)); + } + args +} + +fn translate_arg_to_fluent_val(arg: &anki_proto::i18n::TranslateArgValue) -> FluentValue<'static> { + use anki_proto::i18n::translate_arg_value::Value as V; + match &arg.value { + Some(val) => match val { + V::Str(s) => FluentValue::String(s.to_owned().into()), + V::Number(f) => FluentValue::Number(f.into()), + }, + None => FluentValue::String("".into()), + } +} diff --git a/rslib/src/image_occlusion/mod.rs b/rslib/src/image_occlusion/mod.rs index 0983198b1..3dc23167b 100644 --- a/rslib/src/image_occlusion/mod.rs +++ b/rslib/src/image_occlusion/mod.rs @@ -4,3 +4,4 @@ pub mod imagedata; pub mod imageocclusion; pub(crate) mod notetype; +mod service; diff --git a/rslib/src/image_occlusion/service.rs b/rslib/src/image_occlusion/service.rs new file mode 100644 index 000000000..308eef0c3 --- /dev/null +++ b/rslib/src/image_occlusion/service.rs @@ -0,0 +1,60 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::image_occlusion::AddImageOcclusionNoteRequest; +use anki_proto::image_occlusion::GetImageForOcclusionRequest; +use anki_proto::image_occlusion::GetImageForOcclusionResponse; +use anki_proto::image_occlusion::GetImageOcclusionNoteRequest; +use anki_proto::image_occlusion::GetImageOcclusionNoteResponse; +use anki_proto::image_occlusion::UpdateImageOcclusionNoteRequest; + +use crate::collection::Collection; +use crate::error; + +impl crate::services::ImageOcclusionService for Collection { + fn get_image_for_occlusion( + &mut self, + input: GetImageForOcclusionRequest, + ) -> error::Result { + self.get_image_for_occlusion(&input.path) + } + + fn add_image_occlusion_note( + &mut self, + input: AddImageOcclusionNoteRequest, + ) -> error::Result { + self.add_image_occlusion_note( + input.notetype_id.into(), + &input.image_path, + &input.occlusions, + &input.header, + &input.back_extra, + input.tags, + ) + .map(Into::into) + } + + fn get_image_occlusion_note( + &mut self, + input: GetImageOcclusionNoteRequest, + ) -> error::Result { + self.get_image_occlusion_note(input.note_id.into()) + } + + fn update_image_occlusion_note( + &mut self, + input: UpdateImageOcclusionNoteRequest, + ) -> error::Result { + self.update_image_occlusion_note( + input.note_id.into(), + &input.occlusions, + &input.header, + &input.back_extra, + input.tags, + ) + .map(Into::into) + } + + fn add_image_occlusion_notetype(&mut self) -> error::Result { + self.add_image_occlusion_notetype().map(Into::into) + } +} diff --git a/rslib/src/import_export/mod.rs b/rslib/src/import_export/mod.rs index c3ac029ac..889c5b878 100644 --- a/rslib/src/import_export/mod.rs +++ b/rslib/src/import_export/mod.rs @@ -4,6 +4,7 @@ mod gather; mod insert; pub mod package; +mod service; pub mod text; pub use anki_proto::import_export::import_response::Log as NoteLog; diff --git a/rslib/src/import_export/service.rs b/rslib/src/import_export/service.rs new file mode 100644 index 000000000..2fe89de4f --- /dev/null +++ b/rslib/src/import_export/service.rs @@ -0,0 +1,114 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::generic; +use anki_proto::import_export::import_response::Log as NoteLog; +use anki_proto::import_export::ExportLimit; + +use crate::collection::Collection; +use crate::error; +use crate::ops::OpOutput; +use crate::search::SearchNode; + +impl crate::services::ImportExportService for Collection { + fn import_anki_package( + &mut self, + input: anki_proto::import_export::ImportAnkiPackageRequest, + ) -> error::Result { + self.import_apkg(&input.package_path).map(Into::into) + } + + fn export_anki_package( + &mut self, + input: anki_proto::import_export::ExportAnkiPackageRequest, + ) -> error::Result { + self.export_apkg( + &input.out_path, + SearchNode::from(input.limit.unwrap_or_default()), + input.with_scheduling, + input.with_media, + input.legacy, + None, + ) + .map(Into::into) + } + + fn get_csv_metadata( + &mut self, + input: anki_proto::import_export::CsvMetadataRequest, + ) -> error::Result { + let delimiter = input.delimiter.is_some().then(|| input.delimiter()); + + self.get_csv_metadata( + &input.path, + delimiter, + input.notetype_id.map(Into::into), + input.deck_id.map(Into::into), + input.is_html, + ) + } + + fn import_csv( + &mut self, + input: anki_proto::import_export::ImportCsvRequest, + ) -> error::Result { + self.import_csv(&input.path, input.metadata.unwrap_or_default()) + .map(Into::into) + } + + fn export_note_csv( + &mut self, + input: anki_proto::import_export::ExportNoteCsvRequest, + ) -> error::Result { + self.export_note_csv(input).map(Into::into) + } + + fn export_card_csv( + &mut self, + input: anki_proto::import_export::ExportCardCsvRequest, + ) -> error::Result { + self.export_card_csv( + &input.out_path, + SearchNode::from(input.limit.unwrap_or_default()), + input.with_html, + ) + .map(Into::into) + } + + fn import_json_file( + &mut self, + input: generic::String, + ) -> error::Result { + self.import_json_file(&input.val).map(Into::into) + } + + fn import_json_string( + &mut self, + input: generic::String, + ) -> error::Result { + self.import_json_string(&input.val).map(Into::into) + } +} + +impl From> for anki_proto::import_export::ImportResponse { + fn from(output: OpOutput) -> Self { + Self { + changes: Some(output.changes.into()), + log: Some(output.output), + } + } +} + +impl From for SearchNode { + fn from(export_limit: ExportLimit) -> Self { + use anki_proto::import_export::export_limit::Limit; + let limit = export_limit + .limit + .unwrap_or(Limit::WholeCollection(generic::Empty {})); + match limit { + Limit::WholeCollection(_) => Self::WholeCollection, + Limit::DeckId(did) => Self::from_deck_id(did, true), + Limit::NoteIds(nids) => Self::from_note_ids(nids.note_ids), + Limit::CardIds(cids) => Self::from_card_ids(cids.cids), + } + } +} diff --git a/rslib/src/lib.rs b/rslib/src/lib.rs index d2f4c2b6e..f023d1273 100644 --- a/rslib/src/lib.rs +++ b/rslib/src/lib.rs @@ -4,6 +4,7 @@ #![deny(unused_must_use)] pub mod adding; +pub(crate) mod ankidroid; pub mod backend; pub mod browser_table; pub mod card; @@ -16,6 +17,7 @@ pub mod deckconfig; pub mod decks; pub mod error; pub mod findreplace; +pub mod i18n; pub mod image_occlusion; pub mod import_export; pub mod latex; @@ -33,6 +35,7 @@ pub mod revlog; pub mod scheduler; pub mod search; pub mod serde; +pub mod services; mod stats; pub mod storage; pub mod sync; diff --git a/rslib/src/links.rs b/rslib/src/links.rs index ac4df5ab6..b7b8e2edf 100644 --- a/rslib/src/links.rs +++ b/rslib/src/links.rs @@ -3,6 +3,9 @@ pub use anki_proto::links::help_page_link_request::HelpPage; +use crate::collection::Collection; +use crate::error; + static HELP_SITE: &str = "https://docs.ankiweb.net/"; pub fn help_page_to_link(page: HelpPage) -> String { @@ -40,3 +43,12 @@ pub fn help_page_link_suffix(page: HelpPage) -> &'static str { } } } + +impl crate::services::LinksService for Collection { + fn help_page_link( + &mut self, + input: anki_proto::links::HelpPageLinkRequest, + ) -> error::Result { + Ok(help_page_to_link(HelpPage::from_i32(input.page).unwrap_or(HelpPage::Index)).into()) + } +} diff --git a/rslib/src/media/mod.rs b/rslib/src/media/mod.rs index 39b420c32..dba9989ff 100644 --- a/rslib/src/media/mod.rs +++ b/rslib/src/media/mod.rs @@ -3,6 +3,7 @@ pub mod check; pub mod files; +mod service; use std::borrow::Cow; use std::collections::HashMap; diff --git a/rslib/src/media/service.rs b/rslib/src/media/service.rs new file mode 100644 index 000000000..028a7f842 --- /dev/null +++ b/rslib/src/media/service.rs @@ -0,0 +1,50 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::generic; +use anki_proto::media::AddMediaFileRequest; +use anki_proto::media::CheckMediaResponse; +use anki_proto::media::TrashMediaFilesRequest; + +use crate::collection::Collection; +use crate::error; +use crate::notes::service::to_i64s; + +impl crate::services::MediaService for Collection { + fn check_media(&mut self) -> error::Result { + self.transact_no_undo(|col| { + let mut checker = col.media_checker()?; + let mut output = checker.check()?; + + let mut report = checker.summarize_output(&mut output); + col.report_media_field_referencing_templates(&mut report)?; + + Ok(CheckMediaResponse { + unused: output.unused, + missing: output.missing, + missing_media_notes: to_i64s(output.missing_media_notes), + report, + have_trash: output.trash_count > 0, + }) + }) + } + + fn add_media_file(&mut self, input: AddMediaFileRequest) -> error::Result { + Ok(self + .media()? + .add_file(&input.desired_name, &input.data)? + .to_string() + .into()) + } + + fn trash_media_files(&mut self, input: TrashMediaFilesRequest) -> error::Result<()> { + self.media()?.remove_files(&input.fnames) + } + + fn empty_trash(&mut self) -> error::Result<()> { + self.media_checker()?.empty_trash() + } + + fn restore_trash(&mut self) -> error::Result<()> { + self.media_checker()?.restore_trash() + } +} diff --git a/rslib/src/notes/mod.rs b/rslib/src/notes/mod.rs index 40fdabf68..f26089a93 100644 --- a/rslib/src/notes/mod.rs +++ b/rslib/src/notes/mod.rs @@ -1,6 +1,7 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +pub(crate) mod service; pub(crate) mod undo; use std::borrow::Cow; diff --git a/rslib/src/notes/service.rs b/rslib/src/notes/service.rs new file mode 100644 index 000000000..47ecb9711 --- /dev/null +++ b/rslib/src/notes/service.rs @@ -0,0 +1,188 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use std::collections::HashSet; + +use crate::cloze::add_cloze_numbers_in_string; +use crate::collection::Collection; +use crate::decks::DeckId; +use crate::error; +use crate::error::OrInvalid; +use crate::error::OrNotFound; +use crate::notes::Note; +use crate::notes::NoteId; +use crate::prelude::IntoNewtypeVec; + +pub(crate) fn to_i64s(ids: Vec) -> Vec { + ids.into_iter().map(Into::into).collect() +} + +impl crate::services::NotesService for Collection { + fn new_note( + &mut self, + input: anki_proto::notetypes::NotetypeId, + ) -> error::Result { + let ntid = input.into(); + + let nt = self.get_notetype(ntid)?.or_not_found(ntid)?; + Ok(nt.new_note().into()) + } + + fn add_note( + &mut self, + input: anki_proto::notes::AddNoteRequest, + ) -> error::Result { + let mut note: Note = input.note.or_invalid("no note provided")?.into(); + let changes = self.add_note(&mut note, DeckId(input.deck_id))?; + Ok(anki_proto::notes::AddNoteResponse { + note_id: note.id.0, + changes: Some(changes.into()), + }) + } + + fn defaults_for_adding( + &mut self, + input: anki_proto::notes::DefaultsForAddingRequest, + ) -> error::Result { + let home_deck: DeckId = input.home_deck_of_current_review_card.into(); + self.defaults_for_adding(home_deck).map(Into::into) + } + + fn default_deck_for_notetype( + &mut self, + input: anki_proto::notetypes::NotetypeId, + ) -> error::Result { + Ok(self + .default_deck_for_notetype(input.into())? + .unwrap_or(DeckId(0)) + .into()) + } + + fn update_notes( + &mut self, + input: anki_proto::notes::UpdateNotesRequest, + ) -> error::Result { + let notes = input + .notes + .into_iter() + .map(Into::into) + .collect::>(); + self.update_notes_maybe_undoable(notes, !input.skip_undo_entry) + .map(Into::into) + } + + fn get_note( + &mut self, + input: anki_proto::notes::NoteId, + ) -> error::Result { + let nid = input.into(); + self.storage + .get_note(nid)? + .or_not_found(nid) + .map(Into::into) + } + + fn remove_notes( + &mut self, + input: anki_proto::notes::RemoveNotesRequest, + ) -> error::Result { + if !input.note_ids.is_empty() { + self.remove_notes( + &input + .note_ids + .into_iter() + .map(Into::into) + .collect::>(), + ) + } else { + let nids = self.storage.note_ids_of_cards( + &input + .card_ids + .into_iter() + .map(Into::into) + .collect::>(), + )?; + self.remove_notes(&nids.into_iter().collect::>()) + } + .map(Into::into) + } + + fn cloze_numbers_in_note( + &mut self, + note: anki_proto::notes::Note, + ) -> error::Result { + let mut set = HashSet::with_capacity(4); + for field in ¬e.fields { + add_cloze_numbers_in_string(field, &mut set); + } + Ok(anki_proto::notes::ClozeNumbersInNoteResponse { + numbers: set.into_iter().map(|n| n as u32).collect(), + }) + } + + fn after_note_updates( + &mut self, + input: anki_proto::notes::AfterNoteUpdatesRequest, + ) -> error::Result { + self.after_note_updates( + &to_note_ids(input.nids), + input.generate_cards, + input.mark_notes_modified, + ) + .map(Into::into) + } + + fn field_names_for_notes( + &mut self, + input: anki_proto::notes::FieldNamesForNotesRequest, + ) -> error::Result { + let nids: Vec<_> = input.nids.into_iter().map(NoteId).collect(); + self.storage + .field_names_for_notes(&nids) + .map(|fields| anki_proto::notes::FieldNamesForNotesResponse { fields }) + } + + fn note_fields_check( + &mut self, + input: anki_proto::notes::Note, + ) -> error::Result { + let note: Note = input.into(); + + self.note_fields_check(¬e) + .map(|r| anki_proto::notes::NoteFieldsCheckResponse { state: r as i32 }) + } + + fn cards_of_note( + &mut self, + input: anki_proto::notes::NoteId, + ) -> error::Result { + self.storage + .all_card_ids_of_note_in_template_order(NoteId(input.nid)) + .map(|v| anki_proto::cards::CardIds { + cids: v.into_iter().map(Into::into).collect(), + }) + } + + fn get_single_notetype_of_notes( + &mut self, + input: anki_proto::notes::NoteIds, + ) -> error::Result { + self.get_single_notetype_of_notes(&input.note_ids.into_newtype(NoteId)) + .map(Into::into) + } +} + +pub(crate) fn to_note_ids(ids: Vec) -> Vec { + ids.into_iter().map(NoteId).collect() +} + +impl From for NoteId { + fn from(nid: anki_proto::notes::NoteId) -> Self { + NoteId(nid.nid) + } +} + +impl From for anki_proto::notes::NoteId { + fn from(nid: NoteId) -> Self { + anki_proto::notes::NoteId { nid: nid.0 } + } +} diff --git a/rslib/src/notetype/mod.rs b/rslib/src/notetype/mod.rs index b11dd8485..118f08aef 100644 --- a/rslib/src/notetype/mod.rs +++ b/rslib/src/notetype/mod.rs @@ -10,6 +10,7 @@ mod render; mod restore; mod schema11; mod schemachange; +mod service; pub(crate) mod stock; mod templates; pub(crate) mod undo; diff --git a/rslib/src/backend/notetypes.rs b/rslib/src/notetype/service.rs similarity index 53% rename from rslib/src/backend/notetypes.rs rename to rslib/src/notetype/service.rs index 0898d8253..98ab93b75 100644 --- a/rslib/src/backend/notetypes.rs +++ b/rslib/src/notetype/service.rs @@ -1,93 +1,88 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - use anki_proto::generic; -pub(super) use anki_proto::notetypes::notetypes_service::Service as NotetypesService; +use anki_proto::notetypes::stock_notetype::Kind as StockKind; -use super::Backend; +use crate::collection::Collection; use crate::config::get_aux_notetype_config_key; +use crate::error; +use crate::error::OrInvalid; +use crate::error::OrNotFound; +use crate::notes::NoteId; use crate::notetype::stock::get_stock_notetype; -use crate::notetype::stock::StockKind; use crate::notetype::ChangeNotetypeInput; use crate::notetype::Notetype; use crate::notetype::NotetypeChangeInfo; +use crate::notetype::NotetypeId; use crate::notetype::NotetypeSchema11; -use crate::prelude::*; - -impl NotetypesService for Backend { - type Error = AnkiError; +use crate::prelude::IntoNewtypeVec; +impl crate::services::NotetypesService for Collection { fn add_notetype( - &self, + &mut self, input: anki_proto::notetypes::Notetype, - ) -> Result { + ) -> error::Result { let mut notetype: Notetype = input.into(); - self.with_col(|col| { - Ok(col - .add_notetype(&mut notetype, false)? - .map(|_| notetype.id.0) - .into()) - }) + + Ok(self + .add_notetype(&mut notetype, false)? + .map(|_| notetype.id.0) + .into()) } fn update_notetype( - &self, + &mut self, input: anki_proto::notetypes::Notetype, - ) -> Result { + ) -> error::Result { let mut notetype: Notetype = input.into(); - self.with_col(|col| col.update_notetype(&mut notetype, false)) - .map(Into::into) + self.update_notetype(&mut notetype, false).map(Into::into) } fn add_notetype_legacy( - &self, + &mut self, input: generic::Json, - ) -> Result { + ) -> error::Result { let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?; let mut notetype: Notetype = legacy.into(); - self.with_col(|col| { - Ok(col - .add_notetype(&mut notetype, false)? - .map(|_| notetype.id.0) - .into()) - }) + + Ok(self + .add_notetype(&mut notetype, false)? + .map(|_| notetype.id.0) + .into()) } fn update_notetype_legacy( - &self, + &mut self, input: generic::Json, - ) -> Result { + ) -> error::Result { let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?; let mut notetype: Notetype = legacy.into(); - self.with_col(|col| col.update_notetype(&mut notetype, false)) - .map(Into::into) + self.update_notetype(&mut notetype, false).map(Into::into) } fn add_or_update_notetype( - &self, + &mut self, input: anki_proto::notetypes::AddOrUpdateNotetypeRequest, - ) -> Result { - self.with_col(|col| { - let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?; - let mut nt: Notetype = legacy.into(); - if !input.preserve_usn_and_mtime { - nt.set_modified(col.usn()?); - } - if nt.id.0 == 0 { - col.add_notetype(&mut nt, input.skip_checks)?; - } else if !input.preserve_usn_and_mtime { - col.update_notetype(&mut nt, input.skip_checks)?; - } else { - col.add_or_update_notetype_with_existing_id(&mut nt, input.skip_checks)?; - } - Ok(anki_proto::notetypes::NotetypeId { ntid: nt.id.0 }) - }) + ) -> error::Result { + let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?; + let mut nt: Notetype = legacy.into(); + if !input.preserve_usn_and_mtime { + nt.set_modified(self.usn()?); + } + if nt.id.0 == 0 { + self.add_notetype(&mut nt, input.skip_checks)?; + } else if !input.preserve_usn_and_mtime { + self.update_notetype(&mut nt, input.skip_checks)?; + } else { + self.add_or_update_notetype_with_existing_id(&mut nt, input.skip_checks)?; + } + Ok(anki_proto::notetypes::NotetypeId { ntid: nt.id.0 }) } fn get_stock_notetype_legacy( - &self, + &mut self, input: anki_proto::notetypes::StockNotetype, - ) -> Result { + ) -> error::Result { let nt = get_stock_notetype(input.kind(), &self.tr); let schema11: NotetypeSchema11 = nt.into(); serde_json::to_vec(&schema11) @@ -96,144 +91,125 @@ impl NotetypesService for Backend { } fn get_notetype( - &self, + &mut self, input: anki_proto::notetypes::NotetypeId, - ) -> Result { + ) -> error::Result { let ntid = input.into(); - self.with_col(|col| { - col.storage - .get_notetype(ntid)? - .or_not_found(ntid) - .map(Into::into) - }) - } - fn get_notetype_legacy( - &self, - input: anki_proto::notetypes::NotetypeId, - ) -> Result { - let ntid = input.into(); - self.with_col(|col| { - let schema11: NotetypeSchema11 = - col.storage.get_notetype(ntid)?.or_not_found(ntid)?.into(); - Ok(serde_json::to_vec(&schema11)?).map(Into::into) - }) - } - - fn get_notetype_names( - &self, - _input: generic::Empty, - ) -> Result { - self.with_col(|col| { - let entries: Vec<_> = col - .storage - .get_all_notetype_names()? - .into_iter() - .map(|(id, name)| anki_proto::notetypes::NotetypeNameId { id: id.0, name }) - .collect(); - Ok(anki_proto::notetypes::NotetypeNames { entries }) - }) - } - - fn get_notetype_names_and_counts( - &self, - _input: generic::Empty, - ) -> Result { - self.with_col(|col| { - let entries: Vec<_> = col - .storage - .get_notetype_use_counts()? - .into_iter() - .map( - |(id, name, use_count)| anki_proto::notetypes::NotetypeNameIdUseCount { - id: id.0, - name, - use_count, - }, - ) - .collect(); - Ok(anki_proto::notetypes::NotetypeUseCounts { entries }) - }) - } - - fn get_notetype_id_by_name( - &self, - input: generic::String, - ) -> Result { - self.with_col(|col| { - col.storage - .get_notetype_id(&input.val) - .and_then(|nt| nt.or_not_found(input.val)) - .map(|ntid| anki_proto::notetypes::NotetypeId { ntid: ntid.0 }) - }) - } - - fn remove_notetype( - &self, - input: anki_proto::notetypes::NotetypeId, - ) -> Result { - self.with_col(|col| col.remove_notetype(input.into())) + self.storage + .get_notetype(ntid)? + .or_not_found(ntid) .map(Into::into) } + fn get_notetype_legacy( + &mut self, + input: anki_proto::notetypes::NotetypeId, + ) -> error::Result { + let ntid = input.into(); + + let schema11: NotetypeSchema11 = + self.storage.get_notetype(ntid)?.or_not_found(ntid)?.into(); + Ok(serde_json::to_vec(&schema11)?).map(Into::into) + } + + fn get_notetype_names(&mut self) -> error::Result { + let entries: Vec<_> = self + .storage + .get_all_notetype_names()? + .into_iter() + .map(|(id, name)| anki_proto::notetypes::NotetypeNameId { id: id.0, name }) + .collect(); + Ok(anki_proto::notetypes::NotetypeNames { entries }) + } + + fn get_notetype_names_and_counts( + &mut self, + ) -> error::Result { + let entries: Vec<_> = self + .storage + .get_notetype_use_counts()? + .into_iter() + .map( + |(id, name, use_count)| anki_proto::notetypes::NotetypeNameIdUseCount { + id: id.0, + name, + use_count, + }, + ) + .collect(); + Ok(anki_proto::notetypes::NotetypeUseCounts { entries }) + } + + fn get_notetype_id_by_name( + &mut self, + input: generic::String, + ) -> error::Result { + self.storage + .get_notetype_id(&input.val) + .and_then(|nt| nt.or_not_found(input.val)) + .map(|ntid| anki_proto::notetypes::NotetypeId { ntid: ntid.0 }) + } + + fn remove_notetype( + &mut self, + input: anki_proto::notetypes::NotetypeId, + ) -> error::Result { + self.remove_notetype(input.into()).map(Into::into) + } + fn get_aux_notetype_config_key( - &self, + &mut self, input: anki_proto::notetypes::GetAuxConfigKeyRequest, - ) -> Result { + ) -> error::Result { Ok(get_aux_notetype_config_key(input.id.into(), &input.key).into()) } fn get_aux_template_config_key( - &self, + &mut self, input: anki_proto::notetypes::GetAuxTemplateConfigKeyRequest, - ) -> Result { - self.with_col(|col| { - col.get_aux_template_config_key( - input.notetype_id.into(), - input.card_ordinal as usize, - &input.key, - ) - .map(Into::into) - }) + ) -> error::Result { + self.get_aux_template_config_key( + input.notetype_id.into(), + input.card_ordinal as usize, + &input.key, + ) + .map(Into::into) } fn get_change_notetype_info( - &self, + &mut self, input: anki_proto::notetypes::GetChangeNotetypeInfoRequest, - ) -> Result { - self.with_col(|col| { - col.notetype_change_info(input.old_notetype_id.into(), input.new_notetype_id.into()) - .map(Into::into) - }) + ) -> error::Result { + self.notetype_change_info(input.old_notetype_id.into(), input.new_notetype_id.into()) + .map(Into::into) } fn change_notetype( - &self, + &mut self, input: anki_proto::notetypes::ChangeNotetypeRequest, - ) -> Result { - self.with_col(|col| col.change_notetype_of_notes(input.into()).map(Into::into)) + ) -> error::Result { + self.change_notetype_of_notes(input.into()).map(Into::into) } fn get_field_names( - &self, + &mut self, input: anki_proto::notetypes::NotetypeId, - ) -> Result { - self.with_col(|col| col.storage.get_field_names(input.into())) - .map(Into::into) + ) -> error::Result { + self.storage.get_field_names(input.into()).map(Into::into) } fn restore_notetype_to_stock( - &self, + &mut self, input: anki_proto::notetypes::RestoreNotetypeToStockRequest, - ) -> Result { + ) -> error::Result { let force_kind = input.force_kind.and_then(StockKind::from_i32); - self.with_col(|col| { - col.restore_notetype_to_stock( - input.notetype_id.or_invalid("missing notetype id")?.into(), - force_kind, - ) - .map(Into::into) - }) + + self.restore_notetype_to_stock( + input.notetype_id.or_invalid("missing notetype id")?.into(), + force_kind, + ) + .map(Into::into) } } diff --git a/rslib/src/scheduler/mod.rs b/rslib/src/scheduler/mod.rs index cd50ec55f..e24053c8d 100644 --- a/rslib/src/scheduler/mod.rs +++ b/rslib/src/scheduler/mod.rs @@ -14,6 +14,7 @@ mod learning; pub mod new; pub(crate) mod queue; mod reviews; +mod service; pub mod states; pub mod timespan; pub mod timing; diff --git a/rslib/src/backend/scheduler/answering.rs b/rslib/src/scheduler/service/answering.rs similarity index 100% rename from rslib/src/backend/scheduler/answering.rs rename to rslib/src/scheduler/service/answering.rs diff --git a/rslib/src/scheduler/service/mod.rs b/rslib/src/scheduler/service/mod.rs new file mode 100644 index 000000000..b59b42df4 --- /dev/null +++ b/rslib/src/scheduler/service/mod.rs @@ -0,0 +1,250 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +mod answering; +mod states; + +use anki_proto::generic; +use anki_proto::scheduler; +use anki_proto::scheduler::SchedulingStatesWithContext; +use anki_proto::scheduler::SetSchedulingStatesRequest; + +use crate::prelude::*; +use crate::scheduler::new::NewCardDueOrder; +use crate::scheduler::states::CardState; +use crate::scheduler::states::SchedulingStates; +use crate::stats::studied_today; + +impl crate::services::SchedulerService for Collection { + /// This behaves like _updateCutoff() in older code - it also unburies at + /// the start of a new day. + fn sched_timing_today(&mut self) -> Result { + let timing = self.timing_today()?; + self.unbury_if_day_rolled_over(timing)?; + Ok(timing.into()) + } + + /// Fetch data from DB and return rendered string. + fn studied_today(&mut self) -> Result { + self.studied_today().map(Into::into) + } + + /// Message rendering only, for old graphs. + fn studied_today_message( + &mut self, + input: scheduler::StudiedTodayMessageRequest, + ) -> Result { + Ok(studied_today(input.cards, input.seconds as f32, &self.tr).into()) + } + + fn update_stats(&mut self, input: scheduler::UpdateStatsRequest) -> Result<()> { + self.transact_no_undo(|col| { + let today = col.current_due_day(0)?; + let usn = col.usn()?; + col.update_deck_stats(today, usn, input).map(Into::into) + }) + } + + fn extend_limits(&mut self, input: scheduler::ExtendLimitsRequest) -> Result<()> { + self.transact_no_undo(|col| { + let today = col.current_due_day(0)?; + let usn = col.usn()?; + col.extend_limits( + today, + usn, + input.deck_id.into(), + input.new_delta, + input.review_delta, + ) + .map(Into::into) + }) + } + + fn counts_for_deck_today( + &mut self, + input: anki_proto::decks::DeckId, + ) -> Result { + self.counts_for_deck_today(input.did.into()) + } + + fn congrats_info(&mut self) -> Result { + self.congrats_info() + } + + fn restore_buried_and_suspended_cards( + &mut self, + input: anki_proto::cards::CardIds, + ) -> Result { + let cids: Vec<_> = input.cids.into_iter().map(CardId).collect(); + self.unbury_or_unsuspend_cards(&cids).map(Into::into) + } + + fn unbury_deck( + &mut self, + input: scheduler::UnburyDeckRequest, + ) -> Result { + self.unbury_deck(input.deck_id.into(), input.mode()) + .map(Into::into) + } + + fn bury_or_suspend_cards( + &mut self, + input: scheduler::BuryOrSuspendCardsRequest, + ) -> Result { + let mode = input.mode(); + let cids = if input.card_ids.is_empty() { + self.storage + .card_ids_of_notes(&input.note_ids.into_newtype(NoteId))? + } else { + input.card_ids.into_newtype(CardId) + }; + self.bury_or_suspend_cards(&cids, mode).map(Into::into) + } + + fn empty_filtered_deck( + &mut self, + input: anki_proto::decks::DeckId, + ) -> Result { + self.empty_filtered_deck(input.did.into()).map(Into::into) + } + + fn rebuild_filtered_deck( + &mut self, + input: anki_proto::decks::DeckId, + ) -> Result { + self.rebuild_filtered_deck(input.did.into()).map(Into::into) + } + + fn schedule_cards_as_new( + &mut self, + input: scheduler::ScheduleCardsAsNewRequest, + ) -> Result { + let cids = input.card_ids.into_newtype(CardId); + self.reschedule_cards_as_new( + &cids, + input.log, + input.restore_position, + input.reset_counts, + input + .context + .and_then(scheduler::schedule_cards_as_new_request::Context::from_i32), + ) + .map(Into::into) + } + + fn schedule_cards_as_new_defaults( + &mut self, + input: scheduler::ScheduleCardsAsNewDefaultsRequest, + ) -> Result { + Ok(Collection::reschedule_cards_as_new_defaults( + self, + input.context(), + )) + } + + fn set_due_date( + &mut self, + input: scheduler::SetDueDateRequest, + ) -> Result { + let config = input.config_key.map(|v| v.key().into()); + let days = input.days; + let cids = input.card_ids.into_newtype(CardId); + self.set_due_date(&cids, &days, config).map(Into::into) + } + + fn sort_cards( + &mut self, + input: scheduler::SortCardsRequest, + ) -> Result { + let cids = input.card_ids.into_newtype(CardId); + let (start, step, random, shift) = ( + input.starting_from, + input.step_size, + input.randomize, + input.shift_existing, + ); + let order = if random { + NewCardDueOrder::Random + } else { + NewCardDueOrder::Preserve + }; + + self.sort_cards(&cids, start, step, order, shift) + .map(Into::into) + } + + fn reposition_defaults(&mut self) -> Result { + Ok(Collection::reposition_defaults(self)) + } + + fn sort_deck( + &mut self, + input: scheduler::SortDeckRequest, + ) -> Result { + self.sort_deck_legacy(input.deck_id.into(), input.randomize) + .map(Into::into) + } + + fn get_scheduling_states( + &mut self, + input: anki_proto::cards::CardId, + ) -> Result { + let cid: CardId = input.into(); + self.get_scheduling_states(cid).map(Into::into) + } + + fn describe_next_states( + &mut self, + input: scheduler::SchedulingStates, + ) -> Result { + let states: SchedulingStates = input.into(); + self.describe_next_states(states).map(Into::into) + } + + fn state_is_leech(&mut self, input: scheduler::SchedulingState) -> Result { + let state: CardState = input.into(); + Ok(state.leeched().into()) + } + + fn answer_card( + &mut self, + input: scheduler::CardAnswer, + ) -> Result { + self.answer_card(&mut input.into()).map(Into::into) + } + + fn upgrade_scheduler(&mut self) -> Result<()> { + self.transact_no_undo(|col| col.upgrade_to_v2_scheduler()) + .map(Into::into) + } + + fn get_queued_cards( + &mut self, + input: scheduler::GetQueuedCardsRequest, + ) -> Result { + self.get_queued_cards(input.fetch_limit as usize, input.intraday_learning_only) + .map(Into::into) + } + + fn custom_study( + &mut self, + input: scheduler::CustomStudyRequest, + ) -> Result { + self.custom_study(input).map(Into::into) + } + + fn custom_study_defaults( + &mut self, + input: scheduler::CustomStudyDefaultsRequest, + ) -> Result { + self.custom_study_defaults(input.deck_id.into()) + } + + fn get_scheduling_states_with_context(&mut self) -> Result { + invalid_input!("the frontend should implement this") + } + + fn set_scheduling_states(&mut self, _input: SetSchedulingStatesRequest) -> Result<()> { + invalid_input!("the frontend should implement this") + } +} diff --git a/rslib/src/backend/scheduler/states/filtered.rs b/rslib/src/scheduler/service/states/filtered.rs similarity index 100% rename from rslib/src/backend/scheduler/states/filtered.rs rename to rslib/src/scheduler/service/states/filtered.rs diff --git a/rslib/src/backend/scheduler/states/learning.rs b/rslib/src/scheduler/service/states/learning.rs similarity index 100% rename from rslib/src/backend/scheduler/states/learning.rs rename to rslib/src/scheduler/service/states/learning.rs diff --git a/rslib/src/backend/scheduler/states/mod.rs b/rslib/src/scheduler/service/states/mod.rs similarity index 100% rename from rslib/src/backend/scheduler/states/mod.rs rename to rslib/src/scheduler/service/states/mod.rs diff --git a/rslib/src/backend/scheduler/states/new.rs b/rslib/src/scheduler/service/states/new.rs similarity index 100% rename from rslib/src/backend/scheduler/states/new.rs rename to rslib/src/scheduler/service/states/new.rs diff --git a/rslib/src/backend/scheduler/states/normal.rs b/rslib/src/scheduler/service/states/normal.rs similarity index 100% rename from rslib/src/backend/scheduler/states/normal.rs rename to rslib/src/scheduler/service/states/normal.rs diff --git a/rslib/src/backend/scheduler/states/preview.rs b/rslib/src/scheduler/service/states/preview.rs similarity index 100% rename from rslib/src/backend/scheduler/states/preview.rs rename to rslib/src/scheduler/service/states/preview.rs diff --git a/rslib/src/backend/scheduler/states/relearning.rs b/rslib/src/scheduler/service/states/relearning.rs similarity index 100% rename from rslib/src/backend/scheduler/states/relearning.rs rename to rslib/src/scheduler/service/states/relearning.rs diff --git a/rslib/src/backend/scheduler/states/rescheduling.rs b/rslib/src/scheduler/service/states/rescheduling.rs similarity index 100% rename from rslib/src/backend/scheduler/states/rescheduling.rs rename to rslib/src/scheduler/service/states/rescheduling.rs diff --git a/rslib/src/backend/scheduler/states/review.rs b/rslib/src/scheduler/service/states/review.rs similarity index 100% rename from rslib/src/backend/scheduler/states/review.rs rename to rslib/src/scheduler/service/states/review.rs diff --git a/rslib/src/search/mod.rs b/rslib/src/search/mod.rs index 3217c6363..bc1fd6fd2 100644 --- a/rslib/src/search/mod.rs +++ b/rslib/src/search/mod.rs @@ -3,6 +3,7 @@ mod builder; mod parser; +mod service; mod sqlwriter; pub(crate) mod writer; diff --git a/rslib/src/backend/search/browser_table.rs b/rslib/src/search/service/browser_table.rs similarity index 100% rename from rslib/src/backend/search/browser_table.rs rename to rslib/src/search/service/browser_table.rs diff --git a/rslib/src/backend/search/mod.rs b/rslib/src/search/service/mod.rs similarity index 65% rename from rslib/src/backend/search/mod.rs rename to rslib/src/search/service/mod.rs index 916dae432..6d95eb681 100644 --- a/rslib/src/backend/search/mod.rs +++ b/rslib/src/search/service/mod.rs @@ -8,24 +8,20 @@ use std::str::FromStr; use std::sync::Arc; use anki_proto::generic; -pub(super) use anki_proto::search::search_service::Service as SearchService; use anki_proto::search::sort_order::Value as SortOrderProto; -use super::notes::to_note_ids; -use super::Backend; -use crate::backend::search::browser_table::string_list_to_browser_columns; use crate::browser_table::Column; +use crate::notes::service::to_note_ids; use crate::prelude::*; use crate::search::replace_search_node; +use crate::search::service::browser_table::string_list_to_browser_columns; use crate::search::JoinSearches; use crate::search::Node; use crate::search::SortMode; -impl SearchService for Backend { - type Error = AnkiError; - +impl crate::services::SearchService for Collection { fn build_search_string( - &self, + &mut self, input: anki_proto::search::SearchNode, ) -> Result { let node: Node = input.try_into()?; @@ -33,33 +29,29 @@ impl SearchService for Backend { } fn search_cards( - &self, + &mut self, input: anki_proto::search::SearchRequest, ) -> Result { - self.with_col(|col| { - let order = input.order.unwrap_or_default().value.into(); - let cids = col.search_cards(&input.search, order)?; - Ok(anki_proto::search::SearchResponse { - ids: cids.into_iter().map(|v| v.0).collect(), - }) + let order = input.order.unwrap_or_default().value.into(); + let cids = self.search_cards(&input.search, order)?; + Ok(anki_proto::search::SearchResponse { + ids: cids.into_iter().map(|v| v.0).collect(), }) } fn search_notes( - &self, + &mut self, input: anki_proto::search::SearchRequest, ) -> Result { - self.with_col(|col| { - let order = input.order.unwrap_or_default().value.into(); - let nids = col.search_notes(&input.search, order)?; - Ok(anki_proto::search::SearchResponse { - ids: nids.into_iter().map(|v| v.0).collect(), - }) + let order = input.order.unwrap_or_default().value.into(); + let nids = self.search_notes(&input.search, order)?; + Ok(anki_proto::search::SearchResponse { + ids: nids.into_iter().map(|v| v.0).collect(), }) } fn join_search_nodes( - &self, + &mut self, input: anki_proto::search::JoinSearchNodesRequest, ) -> Result { let existing_node: Node = input.existing_node.unwrap_or_default().try_into()?; @@ -82,7 +74,7 @@ impl SearchService for Backend { } fn replace_search_node( - &self, + &mut self, input: anki_proto::search::ReplaceSearchNodeRequest, ) -> Result { let existing = { @@ -98,7 +90,7 @@ impl SearchService for Backend { } fn find_and_replace( - &self, + &mut self, input: anki_proto::search::FindAndReplaceRequest, ) -> Result { let mut search = if input.regex { @@ -116,33 +108,28 @@ impl SearchService for Backend { Some(input.field_name) }; let repl = input.replacement; - self.with_col(|col| { - if nids.is_empty() { - nids = col.search_notes_unordered("")? - }; - col.find_and_replace(nids, &search, &repl, field_name) - .map(Into::into) - }) + + if nids.is_empty() { + nids = self.search_notes_unordered("")? + }; + self.find_and_replace(nids, &search, &repl, field_name) + .map(Into::into) } - fn all_browser_columns( - &self, - _input: generic::Empty, - ) -> Result { - self.with_col(|col| Ok(col.all_browser_columns())) + fn all_browser_columns(&mut self) -> Result { + Ok(Collection::all_browser_columns(self)) } - fn set_active_browser_columns(&self, input: generic::StringList) -> Result { - self.with_col(|col| { - col.state.active_browser_columns = - Some(Arc::new(string_list_to_browser_columns(input))); - Ok(()) - }) - .map(Into::into) + fn set_active_browser_columns(&mut self, input: generic::StringList) -> Result<()> { + self.state.active_browser_columns = Some(Arc::new(string_list_to_browser_columns(input))); + Ok(()).map(Into::into) } - fn browser_row_for_id(&self, input: generic::Int64) -> Result { - self.with_col(|col| col.browser_row_for_id(input.val).map(Into::into)) + fn browser_row_for_id( + &mut self, + input: generic::Int64, + ) -> Result { + self.browser_row_for_id(input.val).map(Into::into) } } diff --git a/rslib/src/backend/search/search_node.rs b/rslib/src/search/service/search_node.rs similarity index 100% rename from rslib/src/backend/search/search_node.rs rename to rslib/src/search/service/search_node.rs diff --git a/rslib/src/services.rs b/rslib/src/services.rs new file mode 100644 index 000000000..536507436 --- /dev/null +++ b/rslib/src/services.rs @@ -0,0 +1,6 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +// Includes the automatically-generated *Service and Backend*Service traits, +// and some impls on Backend and Collection. +include!(concat!(env!("OUT_DIR"), "/backend.rs")); diff --git a/rslib/src/stats/mod.rs b/rslib/src/stats/mod.rs index b855d8ce5..eee4ab335 100644 --- a/rslib/src/stats/mod.rs +++ b/rslib/src/stats/mod.rs @@ -3,6 +3,7 @@ mod card; mod graphs; +mod service; mod today; pub use today::studied_today; diff --git a/rslib/src/backend/stats.rs b/rslib/src/stats/service.rs similarity index 55% rename from rslib/src/backend/stats.rs rename to rslib/src/stats/service.rs index be1de00b7..e7b208724 100644 --- a/rslib/src/backend/stats.rs +++ b/rslib/src/stats/service.rs @@ -1,42 +1,33 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -pub(super) use anki_proto::stats::stats_service::Service as StatsService; - -use super::Backend; -use crate::prelude::*; +use crate::collection::Collection; +use crate::error; use crate::revlog::RevlogReviewKind; -impl StatsService for Backend { - type Error = AnkiError; - +impl crate::services::StatsService for Collection { fn card_stats( - &self, + &mut self, input: anki_proto::cards::CardId, - ) -> Result { - self.with_col(|col| col.card_stats(input.cid.into())) + ) -> error::Result { + self.card_stats(input.cid.into()) } fn graphs( - &self, + &mut self, input: anki_proto::stats::GraphsRequest, - ) -> Result { - self.with_col(|col| col.graph_data_for_search(&input.search, input.days)) + ) -> error::Result { + self.graph_data_for_search(&input.search, input.days) } - fn get_graph_preferences( - &self, - _input: anki_proto::generic::Empty, - ) -> Result { - self.with_col(|col| Ok(col.get_graph_preferences())) + fn get_graph_preferences(&mut self) -> error::Result { + Ok(Collection::get_graph_preferences(self)) } fn set_graph_preferences( - &self, + &mut self, input: anki_proto::stats::GraphPreferences, - ) -> Result { - self.with_col(|col| col.set_graph_preferences(input)) - .map(Into::into) + ) -> error::Result<()> { + self.set_graph_preferences(input).map(Into::into) } } diff --git a/rslib/src/tags/mod.rs b/rslib/src/tags/mod.rs index 04f876ad3..c05dacab5 100644 --- a/rslib/src/tags/mod.rs +++ b/rslib/src/tags/mod.rs @@ -10,6 +10,7 @@ mod register; mod remove; mod rename; mod reparent; +mod service; mod tree; pub(crate) mod undo; diff --git a/rslib/src/tags/service.rs b/rslib/src/tags/service.rs new file mode 100644 index 000000000..7f6974fd4 --- /dev/null +++ b/rslib/src/tags/service.rs @@ -0,0 +1,107 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::generic; + +use crate::collection::Collection; +use crate::error; +use crate::notes::service::to_note_ids; + +impl crate::services::TagsService for Collection { + fn clear_unused_tags(&mut self) -> error::Result { + self.clear_unused_tags().map(Into::into) + } + + fn all_tags(&mut self) -> error::Result { + Ok(generic::StringList { + vals: self + .storage + .all_tags()? + .into_iter() + .map(|t| t.name) + .collect(), + }) + } + + fn remove_tags( + &mut self, + tags: generic::String, + ) -> error::Result { + self.remove_tags(tags.val.as_str()).map(Into::into) + } + + fn set_tag_collapsed( + &mut self, + input: anki_proto::tags::SetTagCollapsedRequest, + ) -> error::Result { + self.set_tag_collapsed(&input.name, input.collapsed) + .map(Into::into) + } + + fn tag_tree(&mut self) -> error::Result { + self.tag_tree() + } + + fn reparent_tags( + &mut self, + input: anki_proto::tags::ReparentTagsRequest, + ) -> error::Result { + let source_tags = input.tags; + let target_tag = if input.new_parent.is_empty() { + None + } else { + Some(input.new_parent) + }; + self.reparent_tags(&source_tags, target_tag).map(Into::into) + } + + fn rename_tags( + &mut self, + input: anki_proto::tags::RenameTagsRequest, + ) -> error::Result { + self.rename_tag(&input.current_prefix, &input.new_prefix) + .map(Into::into) + } + + fn add_note_tags( + &mut self, + input: anki_proto::tags::NoteIdsAndTagsRequest, + ) -> error::Result { + self.add_tags_to_notes(&to_note_ids(input.note_ids), &input.tags) + .map(Into::into) + } + + fn remove_note_tags( + &mut self, + input: anki_proto::tags::NoteIdsAndTagsRequest, + ) -> error::Result { + self.remove_tags_from_notes(&to_note_ids(input.note_ids), &input.tags) + .map(Into::into) + } + + fn find_and_replace_tag( + &mut self, + input: anki_proto::tags::FindAndReplaceTagRequest, + ) -> error::Result { + let note_ids = if input.note_ids.is_empty() { + self.search_notes_unordered("")? + } else { + to_note_ids(input.note_ids) + }; + self.find_and_replace_tag( + ¬e_ids, + &input.search, + &input.replacement, + input.regex, + input.match_case, + ) + .map(Into::into) + } + + fn complete_tag( + &mut self, + input: anki_proto::tags::CompleteTagRequest, + ) -> error::Result { + let tags = Collection::complete_tag(self, &input.input, input.match_limit as usize)?; + Ok(anki_proto::tags::CompleteTagResponse { tags }) + } +}