mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 14:02:21 -04:00

* Automatically elide empty inputs and outputs to backend methods * Refactor service generation Despite the fact that the majority of our Protobuf service methods require an open collection, they were not accessible with just a Collection object. To access the methods (e.g. because we haven't gotten around to exposing the correct API in Collection yet), you had to wrap the collection in a Backend object, and pay a mutex-acquisition cost for each call, even if you have exclusive access to the object. This commit migrates the majority of service methods to the Collection, so they can now be used directly, and improves the ergonomics a bit at the same time. The approach taken: - The service generation now happens in rslib instead of anki_proto, which avoids the need for trait constraints and associated types. - Service methods are assumed to be collection-based by default. Instead of implementing the service on Backend, we now implement it on Collection, which means our methods no longer need to use self.with_col(...). - We automatically generate methods in Backend which use self.with_col() to delegate to the Collection method. - For methods that are only appropriate for the backend, we add a flag in the .proto file. The codegen uses this flag to write the method into a BackendFooService instead of FooService, which the backend implements. - The flag can also allows us to define separate implementations for collection and backend, so we can e.g. skip the collection mutex in the i18n service while also providing the service on a collection.
207 lines
5.4 KiB
Protocol Buffer
207 lines
5.4 KiB
Protocol Buffer
// Copyright: Ankitects Pty Ltd and contributors
|
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
|
|
syntax = "proto3";
|
|
|
|
option java_multiple_files = true;
|
|
|
|
package anki.import_export;
|
|
|
|
import "anki/cards.proto";
|
|
import "anki/collection.proto";
|
|
import "anki/notes.proto";
|
|
import "anki/generic.proto";
|
|
import "anki/codegen.proto";
|
|
|
|
service ImportExportService {
|
|
rpc ImportCollectionPackage(ImportCollectionPackageRequest)
|
|
returns (generic.Empty) {
|
|
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
|
|
}
|
|
rpc ExportCollectionPackage(ExportCollectionPackageRequest)
|
|
returns (generic.Empty) {
|
|
option (codegen.rust_methods) = RUST_METHODS_BACKEND_ONLY;
|
|
}
|
|
rpc ImportAnkiPackage(ImportAnkiPackageRequest) returns (ImportResponse);
|
|
rpc ExportAnkiPackage(ExportAnkiPackageRequest) returns (generic.UInt32);
|
|
rpc GetCsvMetadata(CsvMetadataRequest) returns (CsvMetadata);
|
|
rpc ImportCsv(ImportCsvRequest) returns (ImportResponse);
|
|
rpc ExportNoteCsv(ExportNoteCsvRequest) returns (generic.UInt32);
|
|
rpc ExportCardCsv(ExportCardCsvRequest) returns (generic.UInt32);
|
|
rpc ImportJsonFile(generic.String) returns (ImportResponse);
|
|
rpc ImportJsonString(generic.String) returns (ImportResponse);
|
|
}
|
|
|
|
message ImportCollectionPackageRequest {
|
|
string col_path = 1;
|
|
string backup_path = 2;
|
|
string media_folder = 3;
|
|
string media_db = 4;
|
|
}
|
|
|
|
message ExportCollectionPackageRequest {
|
|
string out_path = 1;
|
|
bool include_media = 2;
|
|
bool legacy = 3;
|
|
}
|
|
|
|
message ImportAnkiPackageRequest {
|
|
string package_path = 1;
|
|
}
|
|
|
|
message ImportResponse {
|
|
message Note {
|
|
notes.NoteId id = 1;
|
|
repeated string fields = 2;
|
|
}
|
|
message Log {
|
|
repeated Note new = 1;
|
|
repeated Note updated = 2;
|
|
repeated Note duplicate = 3;
|
|
repeated Note conflicting = 4;
|
|
repeated Note first_field_match = 5;
|
|
repeated Note missing_notetype = 6;
|
|
repeated Note missing_deck = 7;
|
|
repeated Note empty_first_field = 8;
|
|
CsvMetadata.DupeResolution dupe_resolution = 9;
|
|
// Usually the sum of all queues, but may be lower if multiple duplicates
|
|
// have been updated with the same note.
|
|
uint32 found_notes = 10;
|
|
}
|
|
collection.OpChanges changes = 1;
|
|
Log log = 2;
|
|
}
|
|
|
|
message ExportAnkiPackageRequest {
|
|
string out_path = 1;
|
|
bool with_scheduling = 2;
|
|
bool with_media = 3;
|
|
bool legacy = 4;
|
|
ExportLimit limit = 5;
|
|
}
|
|
|
|
message PackageMetadata {
|
|
enum Version {
|
|
VERSION_UNKNOWN = 0;
|
|
// When `meta` missing, and collection.anki2 file present.
|
|
VERSION_LEGACY_1 = 1;
|
|
// When `meta` missing, and collection.anki21 file present.
|
|
VERSION_LEGACY_2 = 2;
|
|
// Implies MediaEntry media map, and zstd compression.
|
|
// collection.21b file
|
|
VERSION_LATEST = 3;
|
|
}
|
|
|
|
Version version = 1;
|
|
}
|
|
|
|
message MediaEntries {
|
|
message MediaEntry {
|
|
string name = 1;
|
|
uint32 size = 2;
|
|
bytes sha1 = 3;
|
|
|
|
/// Legacy media maps may include gaps in the media list, so the original
|
|
/// file index is recorded when importing from a HashMap. This field is not
|
|
/// set when exporting.
|
|
optional uint32 legacy_zip_filename = 255;
|
|
}
|
|
|
|
repeated MediaEntry entries = 1;
|
|
}
|
|
|
|
message ImportCsvRequest {
|
|
string path = 1;
|
|
CsvMetadata metadata = 2;
|
|
}
|
|
|
|
message CsvMetadataRequest {
|
|
string path = 1;
|
|
optional CsvMetadata.Delimiter delimiter = 2;
|
|
optional int64 notetype_id = 3;
|
|
optional int64 deck_id = 4;
|
|
optional bool is_html = 5;
|
|
}
|
|
|
|
// Column indices are 1-based to make working with them in TS easier, where
|
|
// unset numerical fields default to 0.
|
|
message CsvMetadata {
|
|
enum DupeResolution {
|
|
UPDATE = 0;
|
|
PRESERVE = 1;
|
|
DUPLICATE = 2;
|
|
// UPDATE_IF_NEWER = 3;
|
|
}
|
|
// Order roughly in ascending expected frequency in note text, because the
|
|
// delimiter detection algorithm is stupidly picking the first one it
|
|
// encounters.
|
|
enum Delimiter {
|
|
TAB = 0;
|
|
PIPE = 1;
|
|
SEMICOLON = 2;
|
|
COLON = 3;
|
|
COMMA = 4;
|
|
SPACE = 5;
|
|
}
|
|
message MappedNotetype {
|
|
int64 id = 1;
|
|
// Source column indices for note fields. One-based. 0 means n/a.
|
|
repeated uint32 field_columns = 2;
|
|
}
|
|
Delimiter delimiter = 1;
|
|
bool is_html = 2;
|
|
repeated string global_tags = 3;
|
|
repeated string updated_tags = 4;
|
|
// Column names as defined by the file or empty strings otherwise. Also used
|
|
// to determine the number of columns.
|
|
repeated string column_labels = 5;
|
|
oneof deck {
|
|
int64 deck_id = 6;
|
|
// One-based. 0 means n/a.
|
|
uint32 deck_column = 7;
|
|
}
|
|
oneof notetype {
|
|
// One notetype for all rows with given column mapping.
|
|
MappedNotetype global_notetype = 8;
|
|
// Row-specific notetypes with automatic mapping by index.
|
|
// One-based. 0 means n/a.
|
|
uint32 notetype_column = 9;
|
|
}
|
|
enum MatchScope {
|
|
NOTETYPE = 0;
|
|
NOTETYPE_AND_DECK = 1;
|
|
}
|
|
// One-based. 0 means n/a.
|
|
uint32 tags_column = 10;
|
|
bool force_delimiter = 11;
|
|
bool force_is_html = 12;
|
|
repeated generic.StringList preview = 13;
|
|
uint32 guid_column = 14;
|
|
DupeResolution dupe_resolution = 15;
|
|
MatchScope match_scope = 16;
|
|
}
|
|
|
|
message ExportCardCsvRequest {
|
|
string out_path = 1;
|
|
bool with_html = 2;
|
|
ExportLimit limit = 3;
|
|
}
|
|
|
|
message ExportNoteCsvRequest {
|
|
string out_path = 1;
|
|
bool with_html = 2;
|
|
bool with_tags = 3;
|
|
bool with_deck = 4;
|
|
bool with_notetype = 5;
|
|
bool with_guid = 6;
|
|
ExportLimit limit = 7;
|
|
}
|
|
|
|
message ExportLimit {
|
|
oneof limit {
|
|
generic.Empty whole_collection = 1;
|
|
int64 deck_id = 2;
|
|
notes.NoteIds note_ids = 3;
|
|
cards.CardIds card_ids = 4;
|
|
}
|
|
}
|