mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 22:12:21 -04:00
in/out -> request/response
The saved characters weren't worth the increased difficulty when reading, and the fact that we were deviating from protobuf norms.
This commit is contained in:
parent
42c71cd204
commit
c79f8ba88f
74 changed files with 452 additions and 425 deletions
|
@ -6,14 +6,14 @@ persistent = no
|
||||||
ignored-classes=
|
ignored-classes=
|
||||||
BrowserColumns,
|
BrowserColumns,
|
||||||
BrowserRow,
|
BrowserRow,
|
||||||
FormatTimespanIn,
|
FormatTimespanRequest,
|
||||||
CardAnswer,
|
CardAnswer,
|
||||||
QueuedCards,
|
QueuedCards,
|
||||||
UnburyDeckIn,
|
UnburyDeckRequest,
|
||||||
BuryOrSuspendCardsIn,
|
BuryOrSuspendCardsRequest,
|
||||||
NoteFieldsCheckOut,
|
NoteFieldsCheckResponse,
|
||||||
BackendError,
|
BackendError,
|
||||||
SetDeckCollapsedIn,
|
SetDeckCollapsedRequest,
|
||||||
|
|
||||||
[REPORTS]
|
[REPORTS]
|
||||||
output-format=colorized
|
output-format=colorized
|
||||||
|
|
|
@ -132,14 +132,14 @@ class RustBackend(RustBackendGenerated):
|
||||||
|
|
||||||
def translate_string_in(
|
def translate_string_in(
|
||||||
module_index: int, message_index: int, **kwargs: Union[str, int, float]
|
module_index: int, message_index: int, **kwargs: Union[str, int, float]
|
||||||
) -> pb.TranslateStringIn:
|
) -> pb.TranslateStringRequest:
|
||||||
args = {}
|
args = {}
|
||||||
for (k, v) in kwargs.items():
|
for (k, v) in kwargs.items():
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
args[k] = pb.TranslateArgValue(str=v)
|
args[k] = pb.TranslateArgValue(str=v)
|
||||||
else:
|
else:
|
||||||
args[k] = pb.TranslateArgValue(number=v)
|
args[k] = pb.TranslateArgValue(number=v)
|
||||||
return pb.TranslateStringIn(
|
return pb.TranslateStringRequest(
|
||||||
module_index=module_index, message_index=message_index, args=args
|
module_index=module_index, message_index=message_index, args=args
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ def get_input_assign(msg):
|
||||||
def render_method(service_idx, method_idx, method):
|
def render_method(service_idx, method_idx, method):
|
||||||
input_name = method.input_type.name
|
input_name = method.input_type.name
|
||||||
if (
|
if (
|
||||||
(input_name.endswith("In") or len(method.input_type.fields) < 2)
|
(input_name.endswith("Request") or len(method.input_type.fields) < 2)
|
||||||
and not method.input_type.oneofs
|
and not method.input_type.oneofs
|
||||||
and not method.name in SKIP_UNROLL_INPUT
|
and not method.name in SKIP_UNROLL_INPUT
|
||||||
):
|
):
|
||||||
|
|
|
@ -21,9 +21,9 @@ from anki.utils import from_json_bytes, ids2str, intTime, legacy_func, to_json_b
|
||||||
DeckTreeNode = _pb.DeckTreeNode
|
DeckTreeNode = _pb.DeckTreeNode
|
||||||
DeckNameId = _pb.DeckNameId
|
DeckNameId = _pb.DeckNameId
|
||||||
FilteredDeckConfig = _pb.Deck.Filtered
|
FilteredDeckConfig = _pb.Deck.Filtered
|
||||||
DeckCollapseScope = _pb.SetDeckCollapsedIn.Scope
|
DeckCollapseScope = _pb.SetDeckCollapsedRequest.Scope
|
||||||
DeckConfigsForUpdate = _pb.DeckConfigsForUpdate
|
DeckConfigsForUpdate = _pb.DeckConfigsForUpdate
|
||||||
UpdateDeckConfigs = _pb.UpdateDeckConfigsIn
|
UpdateDeckConfigs = _pb.UpdateDeckConfigsRequest
|
||||||
|
|
||||||
# legacy code may pass this in as the type argument to .id()
|
# legacy code may pass this in as the type argument to .id()
|
||||||
defaultDeck = 0
|
defaultDeck = 0
|
||||||
|
|
|
@ -13,7 +13,7 @@ import anki._backend.backend_pb2 as _pb
|
||||||
|
|
||||||
# public exports
|
# public exports
|
||||||
TR = anki._backend.LegacyTranslationEnum
|
TR = anki._backend.LegacyTranslationEnum
|
||||||
FormatTimeSpan = _pb.FormatTimespanIn
|
FormatTimeSpan = _pb.FormatTimespanRequest
|
||||||
|
|
||||||
|
|
||||||
langs = sorted(
|
langs = sorted(
|
||||||
|
|
|
@ -45,7 +45,7 @@ class ExtractedLatexOutput:
|
||||||
latex: List[ExtractedLatex]
|
latex: List[ExtractedLatex]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_proto(proto: _pb.ExtractLatexOut) -> ExtractedLatexOutput:
|
def from_proto(proto: _pb.ExtractLatexResponse) -> ExtractedLatexOutput:
|
||||||
return ExtractedLatexOutput(
|
return ExtractedLatexOutput(
|
||||||
html=proto.text,
|
html=proto.text,
|
||||||
latex=[
|
latex=[
|
||||||
|
|
|
@ -29,7 +29,7 @@ def media_paths_from_col_path(col_path: str) -> Tuple[str, str]:
|
||||||
return (media_folder, media_db)
|
return (media_folder, media_db)
|
||||||
|
|
||||||
|
|
||||||
CheckMediaOut = _pb.CheckMediaOut
|
CheckMediaResponse = _pb.CheckMediaResponse
|
||||||
|
|
||||||
|
|
||||||
# fixme: look into whether we can drop chdir() below
|
# fixme: look into whether we can drop chdir() below
|
||||||
|
@ -212,7 +212,7 @@ class MediaManager:
|
||||||
# Checking media
|
# Checking media
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
def check(self) -> CheckMediaOut:
|
def check(self) -> CheckMediaResponse:
|
||||||
output = self.col._backend.check_media()
|
output = self.col._backend.check_media()
|
||||||
# files may have been renamed on disk, so an undo at this point could
|
# files may have been renamed on disk, so an undo at this point could
|
||||||
# break file references
|
# break file references
|
||||||
|
|
|
@ -24,7 +24,7 @@ NotetypeNameId = _pb.NotetypeNameId
|
||||||
NotetypeNameIdUseCount = _pb.NotetypeNameIdUseCount
|
NotetypeNameIdUseCount = _pb.NotetypeNameIdUseCount
|
||||||
NotetypeNames = _pb.NotetypeNames
|
NotetypeNames = _pb.NotetypeNames
|
||||||
ChangeNotetypeInfo = _pb.ChangeNotetypeInfo
|
ChangeNotetypeInfo = _pb.ChangeNotetypeInfo
|
||||||
ChangeNotetypeIn = _pb.ChangeNotetypeIn
|
ChangeNotetypeRequest = _pb.ChangeNotetypeRequest
|
||||||
|
|
||||||
# legacy types
|
# legacy types
|
||||||
NotetypeDict = Dict[str, Any]
|
NotetypeDict = Dict[str, Any]
|
||||||
|
@ -447,12 +447,12 @@ and notes.mid = ? and cards.ord = ?""",
|
||||||
old_notetype_id=old_notetype_id, new_notetype_id=new_notetype_id
|
old_notetype_id=old_notetype_id, new_notetype_id=new_notetype_id
|
||||||
)
|
)
|
||||||
|
|
||||||
def change_notetype_of_notes(self, input: ChangeNotetypeIn) -> OpChanges:
|
def change_notetype_of_notes(self, input: ChangeNotetypeRequest) -> OpChanges:
|
||||||
"""Assign a new notetype, optionally altering field/template order.
|
"""Assign a new notetype, optionally altering field/template order.
|
||||||
|
|
||||||
To get defaults, use
|
To get defaults, use
|
||||||
|
|
||||||
input = ChangeNotetypeIn()
|
input = ChangeNotetypeRequest()
|
||||||
input.ParseFromString(col.models.change_notetype_info(...))
|
input.ParseFromString(col.models.change_notetype_info(...))
|
||||||
input.note_ids.extend([...])
|
input.note_ids.extend([...])
|
||||||
|
|
||||||
|
@ -482,7 +482,7 @@ and notes.mid = ? and cards.ord = ?""",
|
||||||
template_map = self._convert_legacy_map(cmap, len(newModel["tmpls"]))
|
template_map = self._convert_legacy_map(cmap, len(newModel["tmpls"]))
|
||||||
|
|
||||||
self.col._backend.change_notetype(
|
self.col._backend.change_notetype(
|
||||||
ChangeNotetypeIn(
|
ChangeNotetypeRequest(
|
||||||
note_ids=nids,
|
note_ids=nids,
|
||||||
new_fields=field_map,
|
new_fields=field_map,
|
||||||
new_templates=template_map,
|
new_templates=template_map,
|
||||||
|
|
|
@ -14,8 +14,8 @@ from anki.consts import MODEL_STD
|
||||||
from anki.models import NotetypeDict, NotetypeId, TemplateDict
|
from anki.models import NotetypeDict, NotetypeId, TemplateDict
|
||||||
from anki.utils import joinFields
|
from anki.utils import joinFields
|
||||||
|
|
||||||
DuplicateOrEmptyResult = _pb.NoteFieldsCheckOut.State
|
DuplicateOrEmptyResult = _pb.NoteFieldsCheckResponse.State
|
||||||
NoteFieldsCheckResult = _pb.NoteFieldsCheckOut.State
|
NoteFieldsCheckResult = _pb.NoteFieldsCheckResponse.State
|
||||||
|
|
||||||
# types
|
# types
|
||||||
NoteId = NewType("NoteId", int)
|
NoteId = NewType("NoteId", int)
|
||||||
|
|
|
@ -8,7 +8,7 @@ import anki._backend.backend_pb2 as _pb
|
||||||
from anki.collection import OpChanges, OpChangesWithCount, OpChangesWithId
|
from anki.collection import OpChanges, OpChangesWithCount, OpChangesWithId
|
||||||
from anki.config import Config
|
from anki.config import Config
|
||||||
|
|
||||||
SchedTimingToday = _pb.SchedTimingTodayOut
|
SchedTimingToday = _pb.SchedTimingTodayResponse
|
||||||
|
|
||||||
|
|
||||||
from typing import List, Optional, Sequence
|
from typing import List, Optional, Sequence
|
||||||
|
@ -19,9 +19,9 @@ from anki.decks import DeckConfigDict, DeckId, DeckTreeNode
|
||||||
from anki.notes import NoteId
|
from anki.notes import NoteId
|
||||||
from anki.utils import ids2str, intTime
|
from anki.utils import ids2str, intTime
|
||||||
|
|
||||||
CongratsInfo = _pb.CongratsInfoOut
|
CongratsInfo = _pb.CongratsInfoResponse
|
||||||
UnburyDeck = _pb.UnburyDeckIn
|
UnburyDeck = _pb.UnburyDeckRequest
|
||||||
BuryOrSuspend = _pb.BuryOrSuspendCardsIn
|
BuryOrSuspend = _pb.BuryOrSuspendCardsRequest
|
||||||
FilteredDeckForUpdate = _pb.FilteredDeckForUpdate
|
FilteredDeckForUpdate = _pb.FilteredDeckForUpdate
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -18,8 +18,8 @@ from anki.lang import FormatTimeSpan
|
||||||
from anki.scheduler.legacy import SchedulerBaseWithLegacy
|
from anki.scheduler.legacy import SchedulerBaseWithLegacy
|
||||||
from anki.utils import ids2str, intTime
|
from anki.utils import ids2str, intTime
|
||||||
|
|
||||||
CountsForDeckToday = _pb.CountsForDeckTodayOut
|
CountsForDeckToday = _pb.CountsForDeckTodayResponse
|
||||||
SchedTimingToday = _pb.SchedTimingTodayOut
|
SchedTimingToday = _pb.SchedTimingTodayResponse
|
||||||
|
|
||||||
# legacy type alias
|
# legacy type alias
|
||||||
QueueConfig = Dict[str, Any]
|
QueueConfig = Dict[str, Any]
|
||||||
|
|
|
@ -5,8 +5,8 @@ import anki._backend.backend_pb2 as _pb
|
||||||
|
|
||||||
# public exports
|
# public exports
|
||||||
SyncAuth = _pb.SyncAuth
|
SyncAuth = _pb.SyncAuth
|
||||||
SyncOutput = _pb.SyncCollectionOut
|
SyncOutput = _pb.SyncCollectionResponse
|
||||||
SyncStatus = _pb.SyncStatusOut
|
SyncStatus = _pb.SyncStatusResponse
|
||||||
|
|
||||||
|
|
||||||
# Legacy attributes some add-ons may be using
|
# Legacy attributes some add-ons may be using
|
||||||
|
|
|
@ -27,9 +27,9 @@ except ImportError as e:
|
||||||
from flask import Response
|
from flask import Response
|
||||||
|
|
||||||
from anki import Collection
|
from anki import Collection
|
||||||
from anki._backend.backend_pb2 import SyncServerMethodIn
|
from anki._backend.backend_pb2 import SyncServerMethodRequest
|
||||||
|
|
||||||
Method = SyncServerMethodIn.Method # pylint: disable=no-member
|
Method = SyncServerMethodRequest.Method # pylint: disable=no-member
|
||||||
|
|
||||||
app = flask.Flask(__name__)
|
app = flask.Flask(__name__)
|
||||||
col: Collection
|
col: Collection
|
||||||
|
@ -116,7 +116,7 @@ def after_full_sync() -> None:
|
||||||
|
|
||||||
def get_method(
|
def get_method(
|
||||||
method_str: str,
|
method_str: str,
|
||||||
) -> Optional[SyncServerMethodIn.Method.V]: # pylint: disable=no-member
|
) -> Optional[SyncServerMethodRequest.Method.V]: # pylint: disable=no-member
|
||||||
s = method_str
|
s = method_str
|
||||||
if s == "hostKey":
|
if s == "hostKey":
|
||||||
return Method.HOST_KEY
|
return Method.HOST_KEY
|
||||||
|
|
|
@ -65,7 +65,7 @@ class PartiallyRenderedCard:
|
||||||
latex_svg: bool
|
latex_svg: bool
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_proto(cls, out: _pb.RenderCardOut) -> PartiallyRenderedCard:
|
def from_proto(cls, out: _pb.RenderCardResponse) -> PartiallyRenderedCard:
|
||||||
qnodes = cls.nodes_from_proto(out.question_nodes)
|
qnodes = cls.nodes_from_proto(out.question_nodes)
|
||||||
anodes = cls.nodes_from_proto(out.answer_nodes)
|
anodes = cls.nodes_from_proto(out.answer_nodes)
|
||||||
|
|
||||||
|
|
|
@ -11,10 +11,10 @@ ignored-classes=
|
||||||
SearchNode,
|
SearchNode,
|
||||||
Config,
|
Config,
|
||||||
OpChanges,
|
OpChanges,
|
||||||
UnburyDeckIn,
|
UnburyDeckRequest,
|
||||||
CardAnswer,
|
CardAnswer,
|
||||||
QueuedCards,
|
QueuedCards,
|
||||||
ChangeNotetypeIn,
|
ChangeNotetypeRequest,
|
||||||
|
|
||||||
[REPORTS]
|
[REPORTS]
|
||||||
output-format=colorized
|
output-format=colorized
|
||||||
|
|
|
@ -8,7 +8,7 @@ from typing import Sequence
|
||||||
import aqt
|
import aqt
|
||||||
import aqt.deckconf
|
import aqt.deckconf
|
||||||
from anki.collection import OpChanges
|
from anki.collection import OpChanges
|
||||||
from anki.models import ChangeNotetypeIn, NotetypeId
|
from anki.models import ChangeNotetypeRequest, NotetypeId
|
||||||
from anki.notes import NoteId
|
from anki.notes import NoteId
|
||||||
from aqt.operations.notetype import change_notetype_of_notes
|
from aqt.operations.notetype import change_notetype_of_notes
|
||||||
from aqt.qt import *
|
from aqt.qt import *
|
||||||
|
@ -70,7 +70,7 @@ class ChangeNotetypeDialog(QDialog):
|
||||||
QDialog.reject(self)
|
QDialog.reject(self)
|
||||||
|
|
||||||
def save(self, data: bytes) -> None:
|
def save(self, data: bytes) -> None:
|
||||||
input = ChangeNotetypeIn()
|
input = ChangeNotetypeRequest()
|
||||||
input.ParseFromString(data)
|
input.ParseFromString(data)
|
||||||
|
|
||||||
if not self.mw.confirm_schema_modification():
|
if not self.mw.confirm_schema_modification():
|
||||||
|
|
|
@ -11,7 +11,7 @@ from typing import Iterable, List, Optional, Sequence, TypeVar
|
||||||
import aqt
|
import aqt
|
||||||
from anki.collection import SearchNode
|
from anki.collection import SearchNode
|
||||||
from anki.errors import Interrupted
|
from anki.errors import Interrupted
|
||||||
from anki.media import CheckMediaOut
|
from anki.media import CheckMediaResponse
|
||||||
from aqt.qt import *
|
from aqt.qt import *
|
||||||
from aqt.utils import (
|
from aqt.utils import (
|
||||||
askUser,
|
askUser,
|
||||||
|
@ -80,7 +80,7 @@ class MediaChecker:
|
||||||
|
|
||||||
self.mw.taskman.run_on_main(lambda: self.mw.progress.update(label=label))
|
self.mw.taskman.run_on_main(lambda: self.mw.progress.update(label=label))
|
||||||
|
|
||||||
def _check(self) -> CheckMediaOut:
|
def _check(self) -> CheckMediaResponse:
|
||||||
"Run the check on a background thread."
|
"Run the check on a background thread."
|
||||||
return self.mw.col.media.check()
|
return self.mw.col.media.check()
|
||||||
|
|
||||||
|
@ -93,7 +93,7 @@ class MediaChecker:
|
||||||
if isinstance(exc, Interrupted):
|
if isinstance(exc, Interrupted):
|
||||||
return
|
return
|
||||||
|
|
||||||
output: CheckMediaOut = future.result()
|
output: CheckMediaResponse = future.result()
|
||||||
report = output.report
|
report = output.report
|
||||||
|
|
||||||
# show report and offer to delete
|
# show report and offer to delete
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from anki.collection import OpChanges, OpChangesWithId
|
from anki.collection import OpChanges, OpChangesWithId
|
||||||
from anki.models import ChangeNotetypeIn, NotetypeDict, NotetypeId
|
from anki.models import ChangeNotetypeRequest, NotetypeDict, NotetypeId
|
||||||
from aqt import QWidget
|
from aqt import QWidget
|
||||||
from aqt.operations import CollectionOp
|
from aqt.operations import CollectionOp
|
||||||
|
|
||||||
|
@ -34,6 +34,6 @@ def remove_notetype(
|
||||||
|
|
||||||
|
|
||||||
def change_notetype_of_notes(
|
def change_notetype_of_notes(
|
||||||
*, parent: QWidget, input: ChangeNotetypeIn
|
*, parent: QWidget, input: ChangeNotetypeRequest
|
||||||
) -> CollectionOp[OpChanges]:
|
) -> CollectionOp[OpChanges]:
|
||||||
return CollectionOp(parent, lambda col: col.models.change_notetype_of_notes(input))
|
return CollectionOp(parent, lambda col: col.models.change_notetype_of_notes(input))
|
||||||
|
|
|
@ -115,47 +115,48 @@ enum ServiceIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
service SchedulingService {
|
service SchedulingService {
|
||||||
rpc SchedTimingToday(Empty) returns (SchedTimingTodayOut);
|
rpc SchedTimingToday(Empty) returns (SchedTimingTodayResponse);
|
||||||
rpc StudiedToday(Empty) returns (String);
|
rpc StudiedToday(Empty) returns (String);
|
||||||
rpc StudiedTodayMessage(StudiedTodayMessageIn) returns (String);
|
rpc StudiedTodayMessage(StudiedTodayMessageRequest) returns (String);
|
||||||
rpc UpdateStats(UpdateStatsIn) returns (Empty);
|
rpc UpdateStats(UpdateStatsRequest) returns (Empty);
|
||||||
rpc ExtendLimits(ExtendLimitsIn) returns (Empty);
|
rpc ExtendLimits(ExtendLimitsRequest) returns (Empty);
|
||||||
rpc CountsForDeckToday(DeckId) returns (CountsForDeckTodayOut);
|
rpc CountsForDeckToday(DeckId) returns (CountsForDeckTodayResponse);
|
||||||
rpc CongratsInfo(Empty) returns (CongratsInfoOut);
|
rpc CongratsInfo(Empty) returns (CongratsInfoResponse);
|
||||||
rpc RestoreBuriedAndSuspendedCards(CardIds) returns (OpChanges);
|
rpc RestoreBuriedAndSuspendedCards(CardIds) returns (OpChanges);
|
||||||
rpc UnburyDeck(UnburyDeckIn) returns (OpChanges);
|
rpc UnburyDeck(UnburyDeckRequest) returns (OpChanges);
|
||||||
rpc BuryOrSuspendCards(BuryOrSuspendCardsIn) returns (OpChangesWithCount);
|
rpc BuryOrSuspendCards(BuryOrSuspendCardsRequest)
|
||||||
|
returns (OpChangesWithCount);
|
||||||
rpc EmptyFilteredDeck(DeckId) returns (OpChanges);
|
rpc EmptyFilteredDeck(DeckId) returns (OpChanges);
|
||||||
rpc RebuildFilteredDeck(DeckId) returns (OpChangesWithCount);
|
rpc RebuildFilteredDeck(DeckId) returns (OpChangesWithCount);
|
||||||
rpc ScheduleCardsAsNew(ScheduleCardsAsNewIn) returns (OpChanges);
|
rpc ScheduleCardsAsNew(ScheduleCardsAsNewRequest) returns (OpChanges);
|
||||||
rpc SetDueDate(SetDueDateIn) returns (OpChanges);
|
rpc SetDueDate(SetDueDateRequest) returns (OpChanges);
|
||||||
rpc SortCards(SortCardsIn) returns (OpChangesWithCount);
|
rpc SortCards(SortCardsRequest) returns (OpChangesWithCount);
|
||||||
rpc SortDeck(SortDeckIn) returns (OpChangesWithCount);
|
rpc SortDeck(SortDeckRequest) returns (OpChangesWithCount);
|
||||||
rpc GetNextCardStates(CardId) returns (NextCardStates);
|
rpc GetNextCardStates(CardId) returns (NextCardStates);
|
||||||
rpc DescribeNextStates(NextCardStates) returns (StringList);
|
rpc DescribeNextStates(NextCardStates) returns (StringList);
|
||||||
rpc StateIsLeech(SchedulingState) returns (Bool);
|
rpc StateIsLeech(SchedulingState) returns (Bool);
|
||||||
rpc AnswerCard(CardAnswer) returns (OpChanges);
|
rpc AnswerCard(CardAnswer) returns (OpChanges);
|
||||||
rpc UpgradeScheduler(Empty) returns (Empty);
|
rpc UpgradeScheduler(Empty) returns (Empty);
|
||||||
rpc GetQueuedCards(GetQueuedCardsIn) returns (QueuedCards);
|
rpc GetQueuedCards(GetQueuedCardsRequest) returns (QueuedCards);
|
||||||
}
|
}
|
||||||
|
|
||||||
service DecksService {
|
service DecksService {
|
||||||
rpc AddDeckLegacy(Json) returns (OpChangesWithId);
|
rpc AddDeckLegacy(Json) returns (OpChangesWithId);
|
||||||
rpc AddOrUpdateDeckLegacy(AddOrUpdateDeckLegacyIn) returns (DeckId);
|
rpc AddOrUpdateDeckLegacy(AddOrUpdateDeckLegacyRequest) returns (DeckId);
|
||||||
rpc DeckTree(DeckTreeIn) returns (DeckTreeNode);
|
rpc DeckTree(DeckTreeRequest) returns (DeckTreeNode);
|
||||||
rpc DeckTreeLegacy(Empty) returns (Json);
|
rpc DeckTreeLegacy(Empty) returns (Json);
|
||||||
rpc GetAllDecksLegacy(Empty) returns (Json);
|
rpc GetAllDecksLegacy(Empty) returns (Json);
|
||||||
rpc GetDeckIdByName(String) returns (DeckId);
|
rpc GetDeckIdByName(String) returns (DeckId);
|
||||||
rpc GetDeck(DeckId) returns (Deck);
|
rpc GetDeck(DeckId) returns (Deck);
|
||||||
rpc UpdateDeck(Deck) returns (OpChanges);
|
rpc UpdateDeck(Deck) returns (OpChanges);
|
||||||
rpc UpdateDeckLegacy(Json) returns (OpChanges);
|
rpc UpdateDeckLegacy(Json) returns (OpChanges);
|
||||||
rpc SetDeckCollapsed(SetDeckCollapsedIn) returns (OpChanges);
|
rpc SetDeckCollapsed(SetDeckCollapsedRequest) returns (OpChanges);
|
||||||
rpc GetDeckLegacy(DeckId) returns (Json);
|
rpc GetDeckLegacy(DeckId) returns (Json);
|
||||||
rpc GetDeckNames(GetDeckNamesIn) returns (DeckNames);
|
rpc GetDeckNames(GetDeckNamesRequest) returns (DeckNames);
|
||||||
rpc NewDeckLegacy(Bool) returns (Json);
|
rpc NewDeckLegacy(Bool) returns (Json);
|
||||||
rpc RemoveDecks(DeckIds) returns (OpChangesWithCount);
|
rpc RemoveDecks(DeckIds) returns (OpChangesWithCount);
|
||||||
rpc ReparentDecks(ReparentDecksIn) returns (OpChangesWithCount);
|
rpc ReparentDecks(ReparentDecksRequest) returns (OpChangesWithCount);
|
||||||
rpc RenameDeck(RenameDeckIn) returns (OpChanges);
|
rpc RenameDeck(RenameDeckRequest) returns (OpChanges);
|
||||||
rpc GetOrCreateFilteredDeck(DeckId) returns (FilteredDeckForUpdate);
|
rpc GetOrCreateFilteredDeck(DeckId) returns (FilteredDeckForUpdate);
|
||||||
rpc AddOrUpdateFilteredDeck(FilteredDeckForUpdate) returns (OpChangesWithId);
|
rpc AddOrUpdateFilteredDeck(FilteredDeckForUpdate) returns (OpChangesWithId);
|
||||||
rpc FilteredDeckOrderLabels(Empty) returns (StringList);
|
rpc FilteredDeckOrderLabels(Empty) returns (StringList);
|
||||||
|
@ -165,16 +166,17 @@ service DecksService {
|
||||||
|
|
||||||
service NotesService {
|
service NotesService {
|
||||||
rpc NewNote(NotetypeId) returns (Note);
|
rpc NewNote(NotetypeId) returns (Note);
|
||||||
rpc AddNote(AddNoteIn) returns (AddNoteOut);
|
rpc AddNote(AddNoteRequest) returns (AddNoteResponse);
|
||||||
rpc DefaultsForAdding(DefaultsForAddingIn) returns (DeckAndNotetype);
|
rpc DefaultsForAdding(DefaultsForAddingRequest) returns (DeckAndNotetype);
|
||||||
rpc DefaultDeckForNotetype(NotetypeId) returns (DeckId);
|
rpc DefaultDeckForNotetype(NotetypeId) returns (DeckId);
|
||||||
rpc UpdateNote(UpdateNoteIn) returns (OpChanges);
|
rpc UpdateNote(UpdateNoteRequest) returns (OpChanges);
|
||||||
rpc GetNote(NoteId) returns (Note);
|
rpc GetNote(NoteId) returns (Note);
|
||||||
rpc RemoveNotes(RemoveNotesIn) returns (OpChangesWithCount);
|
rpc RemoveNotes(RemoveNotesRequest) returns (OpChangesWithCount);
|
||||||
rpc ClozeNumbersInNote(Note) returns (ClozeNumbersInNoteOut);
|
rpc ClozeNumbersInNote(Note) returns (ClozeNumbersInNoteResponse);
|
||||||
rpc AfterNoteUpdates(AfterNoteUpdatesIn) returns (OpChangesWithCount);
|
rpc AfterNoteUpdates(AfterNoteUpdatesRequest) returns (OpChangesWithCount);
|
||||||
rpc FieldNamesForNotes(FieldNamesForNotesIn) returns (FieldNamesForNotesOut);
|
rpc FieldNamesForNotes(FieldNamesForNotesRequest)
|
||||||
rpc NoteFieldsCheck(Note) returns (NoteFieldsCheckOut);
|
returns (FieldNamesForNotesResponse);
|
||||||
|
rpc NoteFieldsCheck(Note) returns (NoteFieldsCheckResponse);
|
||||||
rpc CardsOfNote(NoteId) returns (CardIds);
|
rpc CardsOfNote(NoteId) returns (CardIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -183,24 +185,24 @@ service SyncService {
|
||||||
rpc AbortSync(Empty) returns (Empty);
|
rpc AbortSync(Empty) returns (Empty);
|
||||||
rpc AbortMediaSync(Empty) returns (Empty);
|
rpc AbortMediaSync(Empty) returns (Empty);
|
||||||
rpc BeforeUpload(Empty) returns (Empty);
|
rpc BeforeUpload(Empty) returns (Empty);
|
||||||
rpc SyncLogin(SyncLoginIn) returns (SyncAuth);
|
rpc SyncLogin(SyncLoginRequest) returns (SyncAuth);
|
||||||
rpc SyncStatus(SyncAuth) returns (SyncStatusOut);
|
rpc SyncStatus(SyncAuth) returns (SyncStatusResponse);
|
||||||
rpc SyncCollection(SyncAuth) returns (SyncCollectionOut);
|
rpc SyncCollection(SyncAuth) returns (SyncCollectionResponse);
|
||||||
rpc FullUpload(SyncAuth) returns (Empty);
|
rpc FullUpload(SyncAuth) returns (Empty);
|
||||||
rpc FullDownload(SyncAuth) returns (Empty);
|
rpc FullDownload(SyncAuth) returns (Empty);
|
||||||
rpc SyncServerMethod(SyncServerMethodIn) returns (Json);
|
rpc SyncServerMethod(SyncServerMethodRequest) returns (Json);
|
||||||
}
|
}
|
||||||
|
|
||||||
service ConfigService {
|
service ConfigService {
|
||||||
rpc GetConfigJson(String) returns (Json);
|
rpc GetConfigJson(String) returns (Json);
|
||||||
rpc SetConfigJson(SetConfigJsonIn) returns (OpChanges);
|
rpc SetConfigJson(SetConfigJsonRequest) returns (OpChanges);
|
||||||
rpc SetConfigJsonNoUndo(SetConfigJsonIn) returns (Empty);
|
rpc SetConfigJsonNoUndo(SetConfigJsonRequest) returns (Empty);
|
||||||
rpc RemoveConfig(String) returns (OpChanges);
|
rpc RemoveConfig(String) returns (OpChanges);
|
||||||
rpc GetAllConfig(Empty) returns (Json);
|
rpc GetAllConfig(Empty) returns (Json);
|
||||||
rpc GetConfigBool(Config.Bool) returns (Bool);
|
rpc GetConfigBool(Config.Bool) returns (Bool);
|
||||||
rpc SetConfigBool(SetConfigBoolIn) returns (OpChanges);
|
rpc SetConfigBool(SetConfigBoolRequest) returns (OpChanges);
|
||||||
rpc GetConfigString(Config.String) returns (String);
|
rpc GetConfigString(Config.String) returns (String);
|
||||||
rpc SetConfigString(SetConfigStringIn) returns (OpChanges);
|
rpc SetConfigString(SetConfigStringRequest) returns (OpChanges);
|
||||||
rpc GetPreferences(Empty) returns (Preferences);
|
rpc GetPreferences(Empty) returns (Preferences);
|
||||||
rpc SetPreferences(Preferences) returns (OpChanges);
|
rpc SetPreferences(Preferences) returns (OpChanges);
|
||||||
}
|
}
|
||||||
|
@ -210,7 +212,7 @@ service NotetypesService {
|
||||||
rpc UpdateNotetype(Notetype) returns (OpChanges);
|
rpc UpdateNotetype(Notetype) returns (OpChanges);
|
||||||
rpc AddNotetypeLegacy(Json) returns (OpChangesWithId);
|
rpc AddNotetypeLegacy(Json) returns (OpChangesWithId);
|
||||||
rpc UpdateNotetypeLegacy(Json) returns (OpChanges);
|
rpc UpdateNotetypeLegacy(Json) returns (OpChanges);
|
||||||
rpc AddOrUpdateNotetype(AddOrUpdateNotetypeIn) returns (NotetypeId);
|
rpc AddOrUpdateNotetype(AddOrUpdateNotetypeRequest) returns (NotetypeId);
|
||||||
rpc GetStockNotetypeLegacy(StockNotetype) returns (Json);
|
rpc GetStockNotetypeLegacy(StockNotetype) returns (Json);
|
||||||
rpc GetNotetype(NotetypeId) returns (Notetype);
|
rpc GetNotetype(NotetypeId) returns (Notetype);
|
||||||
rpc GetNotetypeLegacy(NotetypeId) returns (Json);
|
rpc GetNotetypeLegacy(NotetypeId) returns (Json);
|
||||||
|
@ -218,24 +220,26 @@ service NotetypesService {
|
||||||
rpc GetNotetypeNamesAndCounts(Empty) returns (NotetypeUseCounts);
|
rpc GetNotetypeNamesAndCounts(Empty) returns (NotetypeUseCounts);
|
||||||
rpc GetNotetypeIdByName(String) returns (NotetypeId);
|
rpc GetNotetypeIdByName(String) returns (NotetypeId);
|
||||||
rpc RemoveNotetype(NotetypeId) returns (OpChanges);
|
rpc RemoveNotetype(NotetypeId) returns (OpChanges);
|
||||||
rpc GetAuxNotetypeConfigKey(GetAuxConfigKeyIn) returns (String);
|
rpc GetAuxNotetypeConfigKey(GetAuxConfigKeyRequest) returns (String);
|
||||||
rpc GetAuxTemplateConfigKey(GetAuxTemplateConfigKeyIn) returns (String);
|
rpc GetAuxTemplateConfigKey(GetAuxTemplateConfigKeyRequest) returns (String);
|
||||||
rpc GetSingleNotetypeOfNotes(NoteIds) returns (NotetypeId);
|
rpc GetSingleNotetypeOfNotes(NoteIds) returns (NotetypeId);
|
||||||
rpc GetChangeNotetypeInfo(GetChangeNotetypeInfoIn)
|
rpc GetChangeNotetypeInfo(GetChangeNotetypeInfoRequest)
|
||||||
returns (ChangeNotetypeInfo);
|
returns (ChangeNotetypeInfo);
|
||||||
rpc ChangeNotetype(ChangeNotetypeIn) returns (OpChanges);
|
rpc ChangeNotetype(ChangeNotetypeRequest) returns (OpChanges);
|
||||||
}
|
}
|
||||||
|
|
||||||
service CardRenderingService {
|
service CardRenderingService {
|
||||||
rpc ExtractAVTags(ExtractAVTagsIn) returns (ExtractAVTagsOut);
|
rpc ExtractAVTags(ExtractAVTagsRequest) returns (ExtractAVTagsResponse);
|
||||||
rpc ExtractLatex(ExtractLatexIn) returns (ExtractLatexOut);
|
rpc ExtractLatex(ExtractLatexRequest) returns (ExtractLatexResponse);
|
||||||
rpc GetEmptyCards(Empty) returns (EmptyCardsReport);
|
rpc GetEmptyCards(Empty) returns (EmptyCardsReport);
|
||||||
rpc RenderExistingCard(RenderExistingCardIn) returns (RenderCardOut);
|
rpc RenderExistingCard(RenderExistingCardRequest)
|
||||||
rpc RenderUncommittedCard(RenderUncommittedCardIn) returns (RenderCardOut);
|
returns (RenderCardResponse);
|
||||||
rpc RenderUncommittedCardLegacy(RenderUncommittedCardLegacyIn)
|
rpc RenderUncommittedCard(RenderUncommittedCardRequest)
|
||||||
returns (RenderCardOut);
|
returns (RenderCardResponse);
|
||||||
|
rpc RenderUncommittedCardLegacy(RenderUncommittedCardLegacyRequest)
|
||||||
|
returns (RenderCardResponse);
|
||||||
rpc StripAVTags(String) returns (String);
|
rpc StripAVTags(String) returns (String);
|
||||||
rpc RenderMarkdown(RenderMarkdownIn) returns (String);
|
rpc RenderMarkdown(RenderMarkdownRequest) returns (String);
|
||||||
}
|
}
|
||||||
|
|
||||||
service DeckConfigService {
|
service DeckConfigService {
|
||||||
|
@ -246,29 +250,29 @@ service DeckConfigService {
|
||||||
rpc NewDeckConfigLegacy(Empty) returns (Json);
|
rpc NewDeckConfigLegacy(Empty) returns (Json);
|
||||||
rpc RemoveDeckConfig(DeckConfigId) returns (Empty);
|
rpc RemoveDeckConfig(DeckConfigId) returns (Empty);
|
||||||
rpc GetDeckConfigsForUpdate(DeckId) returns (DeckConfigsForUpdate);
|
rpc GetDeckConfigsForUpdate(DeckId) returns (DeckConfigsForUpdate);
|
||||||
rpc UpdateDeckConfigs(UpdateDeckConfigsIn) returns (OpChanges);
|
rpc UpdateDeckConfigs(UpdateDeckConfigsRequest) returns (OpChanges);
|
||||||
}
|
}
|
||||||
|
|
||||||
service TagsService {
|
service TagsService {
|
||||||
rpc ClearUnusedTags(Empty) returns (OpChangesWithCount);
|
rpc ClearUnusedTags(Empty) returns (OpChangesWithCount);
|
||||||
rpc AllTags(Empty) returns (StringList);
|
rpc AllTags(Empty) returns (StringList);
|
||||||
rpc RemoveTags(String) returns (OpChangesWithCount);
|
rpc RemoveTags(String) returns (OpChangesWithCount);
|
||||||
rpc SetTagCollapsed(SetTagCollapsedIn) returns (OpChanges);
|
rpc SetTagCollapsed(SetTagCollapsedRequest) returns (OpChanges);
|
||||||
rpc TagTree(Empty) returns (TagTreeNode);
|
rpc TagTree(Empty) returns (TagTreeNode);
|
||||||
rpc ReparentTags(ReparentTagsIn) returns (OpChangesWithCount);
|
rpc ReparentTags(ReparentTagsRequest) returns (OpChangesWithCount);
|
||||||
rpc RenameTags(RenameTagsIn) returns (OpChangesWithCount);
|
rpc RenameTags(RenameTagsRequest) returns (OpChangesWithCount);
|
||||||
rpc AddNoteTags(NoteIdsAndTagsIn) returns (OpChangesWithCount);
|
rpc AddNoteTags(NoteIdsAndTagsRequest) returns (OpChangesWithCount);
|
||||||
rpc RemoveNoteTags(NoteIdsAndTagsIn) returns (OpChangesWithCount);
|
rpc RemoveNoteTags(NoteIdsAndTagsRequest) returns (OpChangesWithCount);
|
||||||
rpc FindAndReplaceTag(FindAndReplaceTagIn) returns (OpChangesWithCount);
|
rpc FindAndReplaceTag(FindAndReplaceTagRequest) returns (OpChangesWithCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
service SearchService {
|
service SearchService {
|
||||||
rpc BuildSearchString(SearchNode) returns (String);
|
rpc BuildSearchString(SearchNode) returns (String);
|
||||||
rpc SearchCards(SearchIn) returns (SearchOut);
|
rpc SearchCards(SearchRequest) returns (SearchResponse);
|
||||||
rpc SearchNotes(SearchIn) returns (SearchOut);
|
rpc SearchNotes(SearchRequest) returns (SearchResponse);
|
||||||
rpc JoinSearchNodes(JoinSearchNodesIn) returns (String);
|
rpc JoinSearchNodes(JoinSearchNodesRequest) returns (String);
|
||||||
rpc ReplaceSearchNode(ReplaceSearchNodeIn) returns (String);
|
rpc ReplaceSearchNode(ReplaceSearchNodeRequest) returns (String);
|
||||||
rpc FindAndReplace(FindAndReplaceIn) returns (OpChangesWithCount);
|
rpc FindAndReplace(FindAndReplaceRequest) returns (OpChangesWithCount);
|
||||||
rpc AllBrowserColumns(Empty) returns (BrowserColumns);
|
rpc AllBrowserColumns(Empty) returns (BrowserColumns);
|
||||||
rpc BrowserRowForId(Int64) returns (BrowserRow);
|
rpc BrowserRowForId(Int64) returns (BrowserRow);
|
||||||
rpc SetActiveBrowserColumns(StringList) returns (Empty);
|
rpc SetActiveBrowserColumns(StringList) returns (Empty);
|
||||||
|
@ -276,29 +280,29 @@ service SearchService {
|
||||||
|
|
||||||
service StatsService {
|
service StatsService {
|
||||||
rpc CardStats(CardId) returns (String);
|
rpc CardStats(CardId) returns (String);
|
||||||
rpc Graphs(GraphsIn) returns (GraphsOut);
|
rpc Graphs(GraphsRequest) returns (GraphsResponse);
|
||||||
rpc GetGraphPreferences(Empty) returns (GraphPreferences);
|
rpc GetGraphPreferences(Empty) returns (GraphPreferences);
|
||||||
rpc SetGraphPreferences(GraphPreferences) returns (Empty);
|
rpc SetGraphPreferences(GraphPreferences) returns (Empty);
|
||||||
}
|
}
|
||||||
|
|
||||||
service MediaService {
|
service MediaService {
|
||||||
rpc CheckMedia(Empty) returns (CheckMediaOut);
|
rpc CheckMedia(Empty) returns (CheckMediaResponse);
|
||||||
rpc TrashMediaFiles(TrashMediaFilesIn) returns (Empty);
|
rpc TrashMediaFiles(TrashMediaFilesRequest) returns (Empty);
|
||||||
rpc AddMediaFile(AddMediaFileIn) returns (String);
|
rpc AddMediaFile(AddMediaFileRequest) returns (String);
|
||||||
rpc EmptyTrash(Empty) returns (Empty);
|
rpc EmptyTrash(Empty) returns (Empty);
|
||||||
rpc RestoreTrash(Empty) returns (Empty);
|
rpc RestoreTrash(Empty) returns (Empty);
|
||||||
}
|
}
|
||||||
|
|
||||||
service I18nService {
|
service I18nService {
|
||||||
rpc TranslateString(TranslateStringIn) returns (String);
|
rpc TranslateString(TranslateStringRequest) returns (String);
|
||||||
rpc FormatTimespan(FormatTimespanIn) returns (String);
|
rpc FormatTimespan(FormatTimespanRequest) returns (String);
|
||||||
rpc I18nResources(I18nResourcesIn) returns (Json);
|
rpc I18nResources(I18nResourcesRequest) returns (Json);
|
||||||
}
|
}
|
||||||
|
|
||||||
service CollectionService {
|
service CollectionService {
|
||||||
rpc OpenCollection(OpenCollectionIn) returns (Empty);
|
rpc OpenCollection(OpenCollectionRequest) returns (Empty);
|
||||||
rpc CloseCollection(CloseCollectionIn) returns (Empty);
|
rpc CloseCollection(CloseCollectionRequest) returns (Empty);
|
||||||
rpc CheckDatabase(Empty) returns (CheckDatabaseOut);
|
rpc CheckDatabase(Empty) returns (CheckDatabaseResponse);
|
||||||
rpc GetUndoStatus(Empty) returns (UndoStatus);
|
rpc GetUndoStatus(Empty) returns (UndoStatus);
|
||||||
rpc Undo(Empty) returns (OpChangesAfterUndo);
|
rpc Undo(Empty) returns (OpChangesAfterUndo);
|
||||||
rpc Redo(Empty) returns (OpChangesAfterUndo);
|
rpc Redo(Empty) returns (OpChangesAfterUndo);
|
||||||
|
@ -310,10 +314,10 @@ service CollectionService {
|
||||||
|
|
||||||
service CardsService {
|
service CardsService {
|
||||||
rpc GetCard(CardId) returns (Card);
|
rpc GetCard(CardId) returns (Card);
|
||||||
rpc UpdateCard(UpdateCardIn) returns (OpChanges);
|
rpc UpdateCard(UpdateCardRequest) returns (OpChanges);
|
||||||
rpc RemoveCards(RemoveCardsIn) returns (Empty);
|
rpc RemoveCards(RemoveCardsRequest) returns (Empty);
|
||||||
rpc SetDeck(SetDeckIn) returns (OpChangesWithCount);
|
rpc SetDeck(SetDeckRequest) returns (OpChangesWithCount);
|
||||||
rpc SetFlag(SetFlagIn) returns (OpChangesWithCount);
|
rpc SetFlag(SetFlagRequest) returns (OpChangesWithCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Protobuf stored in .anki2 files
|
// Protobuf stored in .anki2 files
|
||||||
|
@ -669,12 +673,12 @@ message Progress {
|
||||||
// Messages
|
// Messages
|
||||||
///////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////
|
||||||
|
|
||||||
message SchedTimingTodayOut {
|
message SchedTimingTodayResponse {
|
||||||
uint32 days_elapsed = 1;
|
uint32 days_elapsed = 1;
|
||||||
int64 next_day_at = 2;
|
int64 next_day_at = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message DeckTreeIn {
|
message DeckTreeRequest {
|
||||||
// if non-zero, counts for the provided timestamp will be included
|
// if non-zero, counts for the provided timestamp will be included
|
||||||
int64 now = 1;
|
int64 now = 1;
|
||||||
int64 top_deck_id = 2;
|
int64 top_deck_id = 2;
|
||||||
|
@ -697,26 +701,26 @@ message DeckTreeNode {
|
||||||
repeated DeckTreeNode children = 3;
|
repeated DeckTreeNode children = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message RenderExistingCardIn {
|
message RenderExistingCardRequest {
|
||||||
int64 card_id = 1;
|
int64 card_id = 1;
|
||||||
bool browser = 2;
|
bool browser = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message RenderUncommittedCardIn {
|
message RenderUncommittedCardRequest {
|
||||||
Note note = 1;
|
Note note = 1;
|
||||||
uint32 card_ord = 2;
|
uint32 card_ord = 2;
|
||||||
Notetype.Template template = 3;
|
Notetype.Template template = 3;
|
||||||
bool fill_empty = 4;
|
bool fill_empty = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message RenderUncommittedCardLegacyIn {
|
message RenderUncommittedCardLegacyRequest {
|
||||||
Note note = 1;
|
Note note = 1;
|
||||||
uint32 card_ord = 2;
|
uint32 card_ord = 2;
|
||||||
bytes template = 3;
|
bytes template = 3;
|
||||||
bool fill_empty = 4;
|
bool fill_empty = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message RenderCardOut {
|
message RenderCardResponse {
|
||||||
repeated RenderedTemplateNode question_nodes = 1;
|
repeated RenderedTemplateNode question_nodes = 1;
|
||||||
repeated RenderedTemplateNode answer_nodes = 2;
|
repeated RenderedTemplateNode answer_nodes = 2;
|
||||||
string css = 3;
|
string css = 3;
|
||||||
|
@ -736,12 +740,12 @@ message RenderedTemplateReplacement {
|
||||||
repeated string filters = 3;
|
repeated string filters = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message ExtractAVTagsIn {
|
message ExtractAVTagsRequest {
|
||||||
string text = 1;
|
string text = 1;
|
||||||
bool question_side = 2;
|
bool question_side = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message ExtractAVTagsOut {
|
message ExtractAVTagsResponse {
|
||||||
string text = 1;
|
string text = 1;
|
||||||
repeated AVTag av_tags = 2;
|
repeated AVTag av_tags = 2;
|
||||||
}
|
}
|
||||||
|
@ -761,13 +765,13 @@ message TTSTag {
|
||||||
repeated string other_args = 5;
|
repeated string other_args = 5;
|
||||||
}
|
}
|
||||||
|
|
||||||
message ExtractLatexIn {
|
message ExtractLatexRequest {
|
||||||
string text = 1;
|
string text = 1;
|
||||||
bool svg = 2;
|
bool svg = 2;
|
||||||
bool expand_clozes = 3;
|
bool expand_clozes = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message ExtractLatexOut {
|
message ExtractLatexResponse {
|
||||||
string text = 1;
|
string text = 1;
|
||||||
repeated ExtractedLatex latex = 2;
|
repeated ExtractedLatex latex = 2;
|
||||||
}
|
}
|
||||||
|
@ -777,23 +781,23 @@ message ExtractedLatex {
|
||||||
string latex_body = 2;
|
string latex_body = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message AddMediaFileIn {
|
message AddMediaFileRequest {
|
||||||
string desired_name = 1;
|
string desired_name = 1;
|
||||||
bytes data = 2;
|
bytes data = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message CheckMediaOut {
|
message CheckMediaResponse {
|
||||||
repeated string unused = 1;
|
repeated string unused = 1;
|
||||||
repeated string missing = 2;
|
repeated string missing = 2;
|
||||||
string report = 3;
|
string report = 3;
|
||||||
bool have_trash = 4;
|
bool have_trash = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message TrashMediaFilesIn {
|
message TrashMediaFilesRequest {
|
||||||
repeated string fnames = 1;
|
repeated string fnames = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
message TranslateStringIn {
|
message TranslateStringRequest {
|
||||||
uint32 module_index = 1;
|
uint32 module_index = 1;
|
||||||
uint32 message_index = 2;
|
uint32 message_index = 2;
|
||||||
map<string, TranslateArgValue> args = 3;
|
map<string, TranslateArgValue> args = 3;
|
||||||
|
@ -806,7 +810,7 @@ message TranslateArgValue {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
message FormatTimespanIn {
|
message FormatTimespanRequest {
|
||||||
enum Context {
|
enum Context {
|
||||||
PRECISE = 0;
|
PRECISE = 0;
|
||||||
ANSWER_BUTTONS = 1;
|
ANSWER_BUTTONS = 1;
|
||||||
|
@ -817,33 +821,33 @@ message FormatTimespanIn {
|
||||||
Context context = 2;
|
Context context = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message I18nResourcesIn {
|
message I18nResourcesRequest {
|
||||||
repeated string modules = 1;
|
repeated string modules = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
message StudiedTodayMessageIn {
|
message StudiedTodayMessageRequest {
|
||||||
uint32 cards = 1;
|
uint32 cards = 1;
|
||||||
double seconds = 2;
|
double seconds = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message CongratsLearnMessageIn {
|
message CongratsLearnMessageRequest {
|
||||||
float next_due = 1;
|
float next_due = 1;
|
||||||
uint32 remaining = 2;
|
uint32 remaining = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message OpenCollectionIn {
|
message OpenCollectionRequest {
|
||||||
string collection_path = 1;
|
string collection_path = 1;
|
||||||
string media_folder_path = 2;
|
string media_folder_path = 2;
|
||||||
string media_db_path = 3;
|
string media_db_path = 3;
|
||||||
string log_path = 4;
|
string log_path = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SearchIn {
|
message SearchRequest {
|
||||||
string search = 1;
|
string search = 1;
|
||||||
SortOrder order = 2;
|
SortOrder order = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SearchOut {
|
message SearchResponse {
|
||||||
repeated int64 ids = 1;
|
repeated int64 ids = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -929,18 +933,18 @@ message SearchNode {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
message JoinSearchNodesIn {
|
message JoinSearchNodesRequest {
|
||||||
SearchNode.Group.Joiner joiner = 1;
|
SearchNode.Group.Joiner joiner = 1;
|
||||||
SearchNode existing_node = 2;
|
SearchNode existing_node = 2;
|
||||||
SearchNode additional_node = 3;
|
SearchNode additional_node = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message ReplaceSearchNodeIn {
|
message ReplaceSearchNodeRequest {
|
||||||
SearchNode existing_node = 1;
|
SearchNode existing_node = 1;
|
||||||
SearchNode replacement_node = 2;
|
SearchNode replacement_node = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message CloseCollectionIn {
|
message CloseCollectionRequest {
|
||||||
bool downgrade_to_schema11 = 1;
|
bool downgrade_to_schema11 = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -963,7 +967,7 @@ message DeckConfigsForUpdate {
|
||||||
bool have_addons = 6;
|
bool have_addons = 6;
|
||||||
}
|
}
|
||||||
|
|
||||||
message UpdateDeckConfigsIn {
|
message UpdateDeckConfigsRequest {
|
||||||
int64 target_deck_id = 1;
|
int64 target_deck_id = 1;
|
||||||
/// Unchanged, non-selected configs can be omitted. Deck will
|
/// Unchanged, non-selected configs can be omitted. Deck will
|
||||||
/// be set to whichever entry comes last.
|
/// be set to whichever entry comes last.
|
||||||
|
@ -972,12 +976,12 @@ message UpdateDeckConfigsIn {
|
||||||
bool apply_to_children = 4;
|
bool apply_to_children = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SetTagCollapsedIn {
|
message SetTagCollapsedRequest {
|
||||||
string name = 1;
|
string name = 1;
|
||||||
bool collapsed = 2;
|
bool collapsed = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SetDeckCollapsedIn {
|
message SetDeckCollapsedRequest {
|
||||||
enum Scope {
|
enum Scope {
|
||||||
REVIEWER = 0;
|
REVIEWER = 0;
|
||||||
BROWSER = 1;
|
BROWSER = 1;
|
||||||
|
@ -988,7 +992,7 @@ message SetDeckCollapsedIn {
|
||||||
Scope scope = 3;
|
Scope scope = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message GetChangedTagsOut {
|
message GetChangedTagsResponse {
|
||||||
repeated string tags = 1;
|
repeated string tags = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -999,17 +1003,17 @@ message TagTreeNode {
|
||||||
bool collapsed = 4;
|
bool collapsed = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message ReparentTagsIn {
|
message ReparentTagsRequest {
|
||||||
repeated string tags = 1;
|
repeated string tags = 1;
|
||||||
string new_parent = 2;
|
string new_parent = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message RenameTagsIn {
|
message RenameTagsRequest {
|
||||||
string current_prefix = 1;
|
string current_prefix = 1;
|
||||||
string new_prefix = 2;
|
string new_prefix = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SetConfigJsonIn {
|
message SetConfigJsonRequest {
|
||||||
string key = 1;
|
string key = 1;
|
||||||
bytes value_json = 2;
|
bytes value_json = 2;
|
||||||
bool undoable = 3;
|
bool undoable = 3;
|
||||||
|
@ -1046,27 +1050,27 @@ message NotetypeNameIdUseCount {
|
||||||
uint32 use_count = 3;
|
uint32 use_count = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message AddOrUpdateNotetypeIn {
|
message AddOrUpdateNotetypeRequest {
|
||||||
bytes json = 1;
|
bytes json = 1;
|
||||||
bool preserve_usn_and_mtime = 2;
|
bool preserve_usn_and_mtime = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message AddNoteIn {
|
message AddNoteRequest {
|
||||||
Note note = 1;
|
Note note = 1;
|
||||||
int64 deck_id = 2;
|
int64 deck_id = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message AddNoteOut {
|
message AddNoteResponse {
|
||||||
int64 note_id = 1;
|
int64 note_id = 1;
|
||||||
OpChanges changes = 2;
|
OpChanges changes = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message UpdateNoteIn {
|
message UpdateNoteRequest {
|
||||||
Note note = 1;
|
Note note = 1;
|
||||||
bool skip_undo_entry = 2;
|
bool skip_undo_entry = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message UpdateCardIn {
|
message UpdateCardRequest {
|
||||||
Card card = 1;
|
Card card = 1;
|
||||||
bool skip_undo_entry = 2;
|
bool skip_undo_entry = 2;
|
||||||
}
|
}
|
||||||
|
@ -1090,20 +1094,20 @@ message DeckNameId {
|
||||||
string name = 2;
|
string name = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message AddOrUpdateDeckLegacyIn {
|
message AddOrUpdateDeckLegacyRequest {
|
||||||
bytes deck = 1;
|
bytes deck = 1;
|
||||||
bool preserve_usn_and_mtime = 2;
|
bool preserve_usn_and_mtime = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message FieldNamesForNotesIn {
|
message FieldNamesForNotesRequest {
|
||||||
repeated int64 nids = 1;
|
repeated int64 nids = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
message FieldNamesForNotesOut {
|
message FieldNamesForNotesResponse {
|
||||||
repeated string fields = 1;
|
repeated string fields = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
message FindAndReplaceIn {
|
message FindAndReplaceRequest {
|
||||||
repeated int64 nids = 1;
|
repeated int64 nids = 1;
|
||||||
string search = 2;
|
string search = 2;
|
||||||
string replacement = 3;
|
string replacement = 3;
|
||||||
|
@ -1156,18 +1160,18 @@ message BrowserRow {
|
||||||
uint32 font_size = 4;
|
uint32 font_size = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message AfterNoteUpdatesIn {
|
message AfterNoteUpdatesRequest {
|
||||||
repeated int64 nids = 1;
|
repeated int64 nids = 1;
|
||||||
bool mark_notes_modified = 2;
|
bool mark_notes_modified = 2;
|
||||||
bool generate_cards = 3;
|
bool generate_cards = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message NoteIdsAndTagsIn {
|
message NoteIdsAndTagsRequest {
|
||||||
repeated int64 note_ids = 1;
|
repeated int64 note_ids = 1;
|
||||||
string tags = 2;
|
string tags = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message FindAndReplaceTagIn {
|
message FindAndReplaceTagRequest {
|
||||||
repeated int64 note_ids = 1;
|
repeated int64 note_ids = 1;
|
||||||
string search = 2;
|
string search = 2;
|
||||||
string replacement = 3;
|
string replacement = 3;
|
||||||
|
@ -1175,7 +1179,7 @@ message FindAndReplaceTagIn {
|
||||||
bool match_case = 5;
|
bool match_case = 5;
|
||||||
}
|
}
|
||||||
|
|
||||||
message CheckDatabaseOut {
|
message CheckDatabaseResponse {
|
||||||
repeated string problems = 1;
|
repeated string problems = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1216,22 +1220,22 @@ message Preferences {
|
||||||
Editing editing = 3;
|
Editing editing = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message ClozeNumbersInNoteOut {
|
message ClozeNumbersInNoteResponse {
|
||||||
repeated uint32 numbers = 1;
|
repeated uint32 numbers = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
message GetDeckNamesIn {
|
message GetDeckNamesRequest {
|
||||||
bool skip_empty_default = 1;
|
bool skip_empty_default = 1;
|
||||||
// if unset, implies skip_empty_default
|
// if unset, implies skip_empty_default
|
||||||
bool include_filtered = 2;
|
bool include_filtered = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message ReparentDecksIn {
|
message ReparentDecksRequest {
|
||||||
repeated int64 deck_ids = 1;
|
repeated int64 deck_ids = 1;
|
||||||
int64 new_parent = 2;
|
int64 new_parent = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message NoteFieldsCheckOut {
|
message NoteFieldsCheckResponse {
|
||||||
enum State {
|
enum State {
|
||||||
NORMAL = 0;
|
NORMAL = 0;
|
||||||
EMPTY = 1;
|
EMPTY = 1;
|
||||||
|
@ -1243,12 +1247,12 @@ message NoteFieldsCheckOut {
|
||||||
State state = 1;
|
State state = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SyncLoginIn {
|
message SyncLoginRequest {
|
||||||
string username = 1;
|
string username = 1;
|
||||||
string password = 2;
|
string password = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SyncStatusOut {
|
message SyncStatusResponse {
|
||||||
enum Required {
|
enum Required {
|
||||||
NO_CHANGES = 0;
|
NO_CHANGES = 0;
|
||||||
NORMAL_SYNC = 1;
|
NORMAL_SYNC = 1;
|
||||||
|
@ -1257,7 +1261,7 @@ message SyncStatusOut {
|
||||||
Required required = 1;
|
Required required = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SyncCollectionOut {
|
message SyncCollectionResponse {
|
||||||
enum ChangesRequired {
|
enum ChangesRequired {
|
||||||
NO_CHANGES = 0;
|
NO_CHANGES = 0;
|
||||||
NORMAL_SYNC = 1;
|
NORMAL_SYNC = 1;
|
||||||
|
@ -1278,7 +1282,7 @@ message SyncAuth {
|
||||||
uint32 host_number = 2;
|
uint32 host_number = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SyncServerMethodIn {
|
message SyncServerMethodRequest {
|
||||||
enum Method {
|
enum Method {
|
||||||
HOST_KEY = 0;
|
HOST_KEY = 0;
|
||||||
META = 1;
|
META = 1;
|
||||||
|
@ -1298,39 +1302,39 @@ message SyncServerMethodIn {
|
||||||
bytes data = 2;
|
bytes data = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message RemoveNotesIn {
|
message RemoveNotesRequest {
|
||||||
repeated int64 note_ids = 1;
|
repeated int64 note_ids = 1;
|
||||||
repeated int64 card_ids = 2;
|
repeated int64 card_ids = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message RemoveCardsIn {
|
message RemoveCardsRequest {
|
||||||
repeated int64 card_ids = 1;
|
repeated int64 card_ids = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
message UpdateStatsIn {
|
message UpdateStatsRequest {
|
||||||
int64 deck_id = 1;
|
int64 deck_id = 1;
|
||||||
int32 new_delta = 2;
|
int32 new_delta = 2;
|
||||||
int32 review_delta = 4;
|
int32 review_delta = 4;
|
||||||
int32 millisecond_delta = 5;
|
int32 millisecond_delta = 5;
|
||||||
}
|
}
|
||||||
|
|
||||||
message ExtendLimitsIn {
|
message ExtendLimitsRequest {
|
||||||
int64 deck_id = 1;
|
int64 deck_id = 1;
|
||||||
int32 new_delta = 2;
|
int32 new_delta = 2;
|
||||||
int32 review_delta = 3;
|
int32 review_delta = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message CountsForDeckTodayOut {
|
message CountsForDeckTodayResponse {
|
||||||
int32 new = 1;
|
int32 new = 1;
|
||||||
int32 review = 2;
|
int32 review = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message GraphsIn {
|
message GraphsRequest {
|
||||||
string search = 1;
|
string search = 1;
|
||||||
uint32 days = 2;
|
uint32 days = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message GraphsOut {
|
message GraphsResponse {
|
||||||
repeated Card cards = 1;
|
repeated Card cards = 1;
|
||||||
repeated RevlogEntry revlog = 2;
|
repeated RevlogEntry revlog = 2;
|
||||||
uint32 days_elapsed = 3;
|
uint32 days_elapsed = 3;
|
||||||
|
@ -1373,7 +1377,7 @@ message RevlogEntry {
|
||||||
ReviewKind review_kind = 9;
|
ReviewKind review_kind = 9;
|
||||||
}
|
}
|
||||||
|
|
||||||
message CongratsInfoOut {
|
message CongratsInfoResponse {
|
||||||
uint32 learn_remaining = 1;
|
uint32 learn_remaining = 1;
|
||||||
uint32 secs_until_next_learn = 2;
|
uint32 secs_until_next_learn = 2;
|
||||||
bool review_remaining = 3;
|
bool review_remaining = 3;
|
||||||
|
@ -1385,7 +1389,7 @@ message CongratsInfoOut {
|
||||||
string deck_description = 9;
|
string deck_description = 9;
|
||||||
}
|
}
|
||||||
|
|
||||||
message UnburyDeckIn {
|
message UnburyDeckRequest {
|
||||||
enum Mode {
|
enum Mode {
|
||||||
ALL = 0;
|
ALL = 0;
|
||||||
SCHED_ONLY = 1;
|
SCHED_ONLY = 1;
|
||||||
|
@ -1395,7 +1399,7 @@ message UnburyDeckIn {
|
||||||
Mode mode = 2;
|
Mode mode = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message BuryOrSuspendCardsIn {
|
message BuryOrSuspendCardsRequest {
|
||||||
enum Mode {
|
enum Mode {
|
||||||
SUSPEND = 0;
|
SUSPEND = 0;
|
||||||
BURY_SCHED = 1;
|
BURY_SCHED = 1;
|
||||||
|
@ -1406,18 +1410,18 @@ message BuryOrSuspendCardsIn {
|
||||||
Mode mode = 3;
|
Mode mode = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message ScheduleCardsAsNewIn {
|
message ScheduleCardsAsNewRequest {
|
||||||
repeated int64 card_ids = 1;
|
repeated int64 card_ids = 1;
|
||||||
bool log = 2;
|
bool log = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SetDueDateIn {
|
message SetDueDateRequest {
|
||||||
repeated int64 card_ids = 1;
|
repeated int64 card_ids = 1;
|
||||||
string days = 2;
|
string days = 2;
|
||||||
Config.String config_key = 3;
|
Config.String config_key = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SortCardsIn {
|
message SortCardsRequest {
|
||||||
repeated int64 card_ids = 1;
|
repeated int64 card_ids = 1;
|
||||||
uint32 starting_from = 2;
|
uint32 starting_from = 2;
|
||||||
uint32 step_size = 3;
|
uint32 step_size = 3;
|
||||||
|
@ -1425,12 +1429,12 @@ message SortCardsIn {
|
||||||
bool shift_existing = 5;
|
bool shift_existing = 5;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SortDeckIn {
|
message SortDeckRequest {
|
||||||
int64 deck_id = 1;
|
int64 deck_id = 1;
|
||||||
bool randomize = 2;
|
bool randomize = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SetDeckIn {
|
message SetDeckRequest {
|
||||||
repeated int64 card_ids = 1;
|
repeated int64 card_ids = 1;
|
||||||
int64 deck_id = 2;
|
int64 deck_id = 2;
|
||||||
}
|
}
|
||||||
|
@ -1469,19 +1473,19 @@ message Config {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
message SetConfigBoolIn {
|
message SetConfigBoolRequest {
|
||||||
Config.Bool.Key key = 1;
|
Config.Bool.Key key = 1;
|
||||||
bool value = 2;
|
bool value = 2;
|
||||||
bool undoable = 3;
|
bool undoable = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SetConfigStringIn {
|
message SetConfigStringRequest {
|
||||||
Config.String.Key key = 1;
|
Config.String.Key key = 1;
|
||||||
string value = 2;
|
string value = 2;
|
||||||
bool undoable = 3;
|
bool undoable = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message RenderMarkdownIn {
|
message RenderMarkdownRequest {
|
||||||
string markdown = 1;
|
string markdown = 1;
|
||||||
bool sanitize = 2;
|
bool sanitize = 2;
|
||||||
}
|
}
|
||||||
|
@ -1557,7 +1561,7 @@ message CardAnswer {
|
||||||
uint32 milliseconds_taken = 6;
|
uint32 milliseconds_taken = 6;
|
||||||
}
|
}
|
||||||
|
|
||||||
message GetQueuedCardsIn {
|
message GetQueuedCardsRequest {
|
||||||
uint32 fetch_limit = 1;
|
uint32 fetch_limit = 1;
|
||||||
bool intraday_learning_only = 2;
|
bool intraday_learning_only = 2;
|
||||||
}
|
}
|
||||||
|
@ -1612,7 +1616,7 @@ message OpChangesAfterUndo {
|
||||||
uint32 counter = 5;
|
uint32 counter = 5;
|
||||||
}
|
}
|
||||||
|
|
||||||
message DefaultsForAddingIn {
|
message DefaultsForAddingRequest {
|
||||||
int64 home_deck_of_current_review_card = 1;
|
int64 home_deck_of_current_review_card = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1621,7 +1625,7 @@ message DeckAndNotetype {
|
||||||
int64 notetype_id = 2;
|
int64 notetype_id = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message RenameDeckIn {
|
message RenameDeckRequest {
|
||||||
int64 deck_id = 1;
|
int64 deck_id = 1;
|
||||||
string new_name = 2;
|
string new_name = 2;
|
||||||
}
|
}
|
||||||
|
@ -1632,28 +1636,28 @@ message FilteredDeckForUpdate {
|
||||||
Deck.Filtered config = 3;
|
Deck.Filtered config = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SetFlagIn {
|
message SetFlagRequest {
|
||||||
repeated int64 card_ids = 1;
|
repeated int64 card_ids = 1;
|
||||||
uint32 flag = 2;
|
uint32 flag = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message GetAuxConfigKeyIn {
|
message GetAuxConfigKeyRequest {
|
||||||
int64 id = 1;
|
int64 id = 1;
|
||||||
string key = 2;
|
string key = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message GetAuxTemplateConfigKeyIn {
|
message GetAuxTemplateConfigKeyRequest {
|
||||||
int64 notetype_id = 1;
|
int64 notetype_id = 1;
|
||||||
uint32 card_ordinal = 2;
|
uint32 card_ordinal = 2;
|
||||||
string key = 3;
|
string key = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message GetChangeNotetypeInfoIn {
|
message GetChangeNotetypeInfoRequest {
|
||||||
int64 old_notetype_id = 1;
|
int64 old_notetype_id = 1;
|
||||||
int64 new_notetype_id = 2;
|
int64 new_notetype_id = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message ChangeNotetypeIn {
|
message ChangeNotetypeRequest {
|
||||||
repeated int64 note_ids = 1;
|
repeated int64 note_ids = 1;
|
||||||
// -1 is used to represent null, as nullable repeated fields
|
// -1 is used to represent null, as nullable repeated fields
|
||||||
// are unwieldy in protobuf
|
// are unwieldy in protobuf
|
||||||
|
@ -1669,5 +1673,5 @@ message ChangeNotetypeInfo {
|
||||||
repeated string old_template_names = 2;
|
repeated string old_template_names = 2;
|
||||||
repeated string new_field_names = 3;
|
repeated string new_field_names = 3;
|
||||||
repeated string new_template_names = 4;
|
repeated string new_template_names = 4;
|
||||||
ChangeNotetypeIn input = 5;
|
ChangeNotetypeRequest input = 5;
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ impl CardsService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_card(&self, input: pb::UpdateCardIn) -> Result<pb::OpChanges> {
|
fn update_card(&self, input: pb::UpdateCardRequest) -> Result<pb::OpChanges> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let mut card: Card = input.card.ok_or(AnkiError::NotFound)?.try_into()?;
|
let mut card: Card = input.card.ok_or(AnkiError::NotFound)?.try_into()?;
|
||||||
col.update_card_maybe_undoable(&mut card, !input.skip_undo_entry)
|
col.update_card_maybe_undoable(&mut card, !input.skip_undo_entry)
|
||||||
|
@ -29,7 +29,7 @@ impl CardsService for Backend {
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_cards(&self, input: pb::RemoveCardsIn) -> Result<pb::Empty> {
|
fn remove_cards(&self, input: pb::RemoveCardsRequest) -> Result<pb::Empty> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.transact_no_undo(|col| {
|
col.transact_no_undo(|col| {
|
||||||
col.remove_cards_and_orphaned_notes(
|
col.remove_cards_and_orphaned_notes(
|
||||||
|
@ -44,13 +44,13 @@ impl CardsService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_deck(&self, input: pb::SetDeckIn) -> Result<pb::OpChangesWithCount> {
|
fn set_deck(&self, input: pb::SetDeckRequest) -> Result<pb::OpChangesWithCount> {
|
||||||
let cids: Vec<_> = input.card_ids.into_iter().map(CardId).collect();
|
let cids: Vec<_> = input.card_ids.into_iter().map(CardId).collect();
|
||||||
let deck_id = input.deck_id.into();
|
let deck_id = input.deck_id.into();
|
||||||
self.with_col(|col| col.set_deck(&cids, deck_id).map(Into::into))
|
self.with_col(|col| col.set_deck(&cids, deck_id).map(Into::into))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_flag(&self, input: pb::SetFlagIn) -> Result<pb::OpChangesWithCount> {
|
fn set_flag(&self, input: pb::SetFlagRequest) -> Result<pb::OpChangesWithCount> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.set_card_flag(&to_card_ids(input.card_ids), input.flag)
|
col.set_card_flag(&to_card_ids(input.card_ids), input.flag)
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
|
|
|
@ -14,7 +14,10 @@ use crate::{
|
||||||
};
|
};
|
||||||
|
|
||||||
impl CardRenderingService for Backend {
|
impl CardRenderingService for Backend {
|
||||||
fn extract_av_tags(&self, input: pb::ExtractAvTagsIn) -> Result<pb::ExtractAvTagsOut> {
|
fn extract_av_tags(
|
||||||
|
&self,
|
||||||
|
input: pb::ExtractAvTagsRequest,
|
||||||
|
) -> Result<pb::ExtractAvTagsResponse> {
|
||||||
let (text, tags) = extract_av_tags(&input.text, input.question_side);
|
let (text, tags) = extract_av_tags(&input.text, input.question_side);
|
||||||
let pt_tags = tags
|
let pt_tags = tags
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -40,13 +43,13 @@ impl CardRenderingService for Backend {
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(pb::ExtractAvTagsOut {
|
Ok(pb::ExtractAvTagsResponse {
|
||||||
text: text.into(),
|
text: text.into(),
|
||||||
av_tags: pt_tags,
|
av_tags: pt_tags,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_latex(&self, input: pb::ExtractLatexIn) -> Result<pb::ExtractLatexOut> {
|
fn extract_latex(&self, input: pb::ExtractLatexRequest) -> Result<pb::ExtractLatexResponse> {
|
||||||
let func = if input.expand_clozes {
|
let func = if input.expand_clozes {
|
||||||
extract_latex_expanding_clozes
|
extract_latex_expanding_clozes
|
||||||
} else {
|
} else {
|
||||||
|
@ -54,7 +57,7 @@ impl CardRenderingService for Backend {
|
||||||
};
|
};
|
||||||
let (text, extracted) = func(&input.text, input.svg);
|
let (text, extracted) = func(&input.text, input.svg);
|
||||||
|
|
||||||
Ok(pb::ExtractLatexOut {
|
Ok(pb::ExtractLatexResponse {
|
||||||
text,
|
text,
|
||||||
latex: extracted
|
latex: extracted
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -88,7 +91,10 @@ impl CardRenderingService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_existing_card(&self, input: pb::RenderExistingCardIn) -> Result<pb::RenderCardOut> {
|
fn render_existing_card(
|
||||||
|
&self,
|
||||||
|
input: pb::RenderExistingCardRequest,
|
||||||
|
) -> Result<pb::RenderCardResponse> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.render_existing_card(CardId(input.card_id), input.browser)
|
col.render_existing_card(CardId(input.card_id), input.browser)
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
|
@ -97,8 +103,8 @@ impl CardRenderingService for Backend {
|
||||||
|
|
||||||
fn render_uncommitted_card(
|
fn render_uncommitted_card(
|
||||||
&self,
|
&self,
|
||||||
input: pb::RenderUncommittedCardIn,
|
input: pb::RenderUncommittedCardRequest,
|
||||||
) -> Result<pb::RenderCardOut> {
|
) -> Result<pb::RenderCardResponse> {
|
||||||
let template = input.template.ok_or(AnkiError::NotFound)?.into();
|
let template = input.template.ok_or(AnkiError::NotFound)?.into();
|
||||||
let mut note = input
|
let mut note = input
|
||||||
.note
|
.note
|
||||||
|
@ -114,8 +120,8 @@ impl CardRenderingService for Backend {
|
||||||
|
|
||||||
fn render_uncommitted_card_legacy(
|
fn render_uncommitted_card_legacy(
|
||||||
&self,
|
&self,
|
||||||
input: pb::RenderUncommittedCardLegacyIn,
|
input: pb::RenderUncommittedCardLegacyRequest,
|
||||||
) -> Result<pb::RenderCardOut> {
|
) -> Result<pb::RenderCardResponse> {
|
||||||
let schema11: CardTemplateSchema11 = serde_json::from_slice(&input.template)?;
|
let schema11: CardTemplateSchema11 = serde_json::from_slice(&input.template)?;
|
||||||
let template = schema11.into();
|
let template = schema11.into();
|
||||||
let mut note = input
|
let mut note = input
|
||||||
|
@ -136,7 +142,7 @@ impl CardRenderingService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_markdown(&self, input: pb::RenderMarkdownIn) -> Result<pb::String> {
|
fn render_markdown(&self, input: pb::RenderMarkdownRequest) -> Result<pb::String> {
|
||||||
let mut text = render_markdown(&input.markdown);
|
let mut text = render_markdown(&input.markdown);
|
||||||
if input.sanitize {
|
if input.sanitize {
|
||||||
// currently no images
|
// currently no images
|
||||||
|
@ -170,9 +176,9 @@ fn rendered_node_to_proto(node: RenderedNode) -> pb::rendered_template_node::Val
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<RenderCardOutput> for pb::RenderCardOut {
|
impl From<RenderCardOutput> for pb::RenderCardResponse {
|
||||||
fn from(o: RenderCardOutput) -> Self {
|
fn from(o: RenderCardOutput) -> Self {
|
||||||
pb::RenderCardOut {
|
pb::RenderCardResponse {
|
||||||
question_nodes: rendered_nodes_to_proto(o.qnodes),
|
question_nodes: rendered_nodes_to_proto(o.qnodes),
|
||||||
answer_nodes: rendered_nodes_to_proto(o.anodes),
|
answer_nodes: rendered_nodes_to_proto(o.anodes),
|
||||||
css: o.css,
|
css: o.css,
|
||||||
|
|
|
@ -24,7 +24,7 @@ impl CollectionService for Backend {
|
||||||
Ok(().into())
|
Ok(().into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn open_collection(&self, input: pb::OpenCollectionIn) -> Result<pb::Empty> {
|
fn open_collection(&self, input: pb::OpenCollectionRequest) -> Result<pb::Empty> {
|
||||||
let mut col = self.col.lock().unwrap();
|
let mut col = self.col.lock().unwrap();
|
||||||
if col.is_some() {
|
if col.is_some() {
|
||||||
return Err(AnkiError::CollectionAlreadyOpen);
|
return Err(AnkiError::CollectionAlreadyOpen);
|
||||||
|
@ -53,7 +53,7 @@ impl CollectionService for Backend {
|
||||||
Ok(().into())
|
Ok(().into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn close_collection(&self, input: pb::CloseCollectionIn) -> Result<pb::Empty> {
|
fn close_collection(&self, input: pb::CloseCollectionRequest) -> Result<pb::Empty> {
|
||||||
self.abort_media_sync_and_wait();
|
self.abort_media_sync_and_wait();
|
||||||
|
|
||||||
let mut col = self.col.lock().unwrap();
|
let mut col = self.col.lock().unwrap();
|
||||||
|
@ -72,14 +72,14 @@ impl CollectionService for Backend {
|
||||||
Ok(().into())
|
Ok(().into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_database(&self, _input: pb::Empty) -> Result<pb::CheckDatabaseOut> {
|
fn check_database(&self, _input: pb::Empty) -> Result<pb::CheckDatabaseResponse> {
|
||||||
let mut handler = self.new_progress_handler();
|
let mut handler = self.new_progress_handler();
|
||||||
let progress_fn = move |progress, throttle| {
|
let progress_fn = move |progress, throttle| {
|
||||||
handler.update(Progress::DatabaseCheck(progress), throttle);
|
handler.update(Progress::DatabaseCheck(progress), throttle);
|
||||||
};
|
};
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.check_database(progress_fn)
|
col.check_database(progress_fn)
|
||||||
.map(|problems| pb::CheckDatabaseOut {
|
.map(|problems| pb::CheckDatabaseResponse {
|
||||||
problems: problems.to_i18n_strings(&col.tr),
|
problems: problems.to_i18n_strings(&col.tr),
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -62,7 +62,7 @@ impl ConfigService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_config_json(&self, input: pb::SetConfigJsonIn) -> Result<pb::OpChanges> {
|
fn set_config_json(&self, input: pb::SetConfigJsonRequest) -> Result<pb::OpChanges> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let val: Value = serde_json::from_slice(&input.value_json)?;
|
let val: Value = serde_json::from_slice(&input.value_json)?;
|
||||||
col.set_config_json(input.key.as_str(), &val, input.undoable)
|
col.set_config_json(input.key.as_str(), &val, input.undoable)
|
||||||
|
@ -70,7 +70,7 @@ impl ConfigService for Backend {
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_config_json_no_undo(&self, input: pb::SetConfigJsonIn) -> Result<pb::Empty> {
|
fn set_config_json_no_undo(&self, input: pb::SetConfigJsonRequest) -> Result<pb::Empty> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let val: Value = serde_json::from_slice(&input.value_json)?;
|
let val: Value = serde_json::from_slice(&input.value_json)?;
|
||||||
col.transact_no_undo(|col| col.set_config(input.key.as_str(), &val).map(|_| ()))
|
col.transact_no_undo(|col| col.set_config(input.key.as_str(), &val).map(|_| ()))
|
||||||
|
@ -99,7 +99,7 @@ impl ConfigService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_config_bool(&self, input: pb::SetConfigBoolIn) -> Result<pb::OpChanges> {
|
fn set_config_bool(&self, input: pb::SetConfigBoolRequest) -> Result<pb::OpChanges> {
|
||||||
self.with_col(|col| col.set_config_bool(input.key().into(), input.value, input.undoable))
|
self.with_col(|col| col.set_config_bool(input.key().into(), input.value, input.undoable))
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
@ -112,7 +112,7 @@ impl ConfigService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_config_string(&self, input: pb::SetConfigStringIn) -> Result<pb::OpChanges> {
|
fn set_config_string(&self, input: pb::SetConfigStringRequest) -> Result<pb::OpChanges> {
|
||||||
self.with_col(|col| col.set_config_string(input.key().into(), &input.value, input.undoable))
|
self.with_col(|col| col.set_config_string(input.key().into(), &input.value, input.undoable))
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ use super::Backend;
|
||||||
pub(super) use crate::backend_proto::deckconfig_service::Service as DeckConfigService;
|
pub(super) use crate::backend_proto::deckconfig_service::Service as DeckConfigService;
|
||||||
use crate::{
|
use crate::{
|
||||||
backend_proto as pb,
|
backend_proto as pb,
|
||||||
deckconfig::{DeckConfSchema11, DeckConfig, UpdateDeckConfigsIn},
|
deckconfig::{DeckConfSchema11, DeckConfig, UpdateDeckConfigsRequest},
|
||||||
prelude::*,
|
prelude::*,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -63,7 +63,7 @@ impl DeckConfigService for Backend {
|
||||||
self.with_col(|col| col.get_deck_configs_for_update(input.into()))
|
self.with_col(|col| col.get_deck_configs_for_update(input.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_deck_configs(&self, input: pb::UpdateDeckConfigsIn) -> Result<pb::OpChanges> {
|
fn update_deck_configs(&self, input: pb::UpdateDeckConfigsRequest) -> Result<pb::OpChanges> {
|
||||||
self.with_col(|col| col.update_deck_configs(input.into()))
|
self.with_col(|col| col.update_deck_configs(input.into()))
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
@ -81,9 +81,9 @@ impl From<DeckConfig> for pb::DeckConfig {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<pb::UpdateDeckConfigsIn> for UpdateDeckConfigsIn {
|
impl From<pb::UpdateDeckConfigsRequest> for UpdateDeckConfigsRequest {
|
||||||
fn from(c: pb::UpdateDeckConfigsIn) -> Self {
|
fn from(c: pb::UpdateDeckConfigsRequest) -> Self {
|
||||||
UpdateDeckConfigsIn {
|
UpdateDeckConfigsRequest {
|
||||||
target_deck_id: c.target_deck_id.into(),
|
target_deck_id: c.target_deck_id.into(),
|
||||||
configs: c.configs.into_iter().map(Into::into).collect(),
|
configs: c.configs.into_iter().map(Into::into).collect(),
|
||||||
removed_config_ids: c.removed_config_ids.into_iter().map(Into::into).collect(),
|
removed_config_ids: c.removed_config_ids.into_iter().map(Into::into).collect(),
|
||||||
|
|
|
@ -22,7 +22,10 @@ impl DecksService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_or_update_deck_legacy(&self, input: pb::AddOrUpdateDeckLegacyIn) -> Result<pb::DeckId> {
|
fn add_or_update_deck_legacy(
|
||||||
|
&self,
|
||||||
|
input: pb::AddOrUpdateDeckLegacyRequest,
|
||||||
|
) -> Result<pb::DeckId> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?;
|
let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?;
|
||||||
let mut deck: Deck = schema11.into();
|
let mut deck: Deck = schema11.into();
|
||||||
|
@ -38,7 +41,7 @@ impl DecksService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn deck_tree(&self, input: pb::DeckTreeIn) -> Result<pb::DeckTreeNode> {
|
fn deck_tree(&self, input: pb::DeckTreeRequest) -> Result<pb::DeckTreeNode> {
|
||||||
let lim = if input.top_deck_id > 0 {
|
let lim = if input.top_deck_id > 0 {
|
||||||
Some(DeckId(input.top_deck_id))
|
Some(DeckId(input.top_deck_id))
|
||||||
} else {
|
} else {
|
||||||
|
@ -118,7 +121,7 @@ impl DecksService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_deck_names(&self, input: pb::GetDeckNamesIn) -> Result<pb::DeckNames> {
|
fn get_deck_names(&self, input: pb::GetDeckNamesRequest) -> Result<pb::DeckNames> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let names = if input.include_filtered {
|
let names = if input.include_filtered {
|
||||||
col.get_all_deck_names(input.skip_empty_default)?
|
col.get_all_deck_names(input.skip_empty_default)?
|
||||||
|
@ -151,7 +154,7 @@ impl DecksService for Backend {
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reparent_decks(&self, input: pb::ReparentDecksIn) -> Result<pb::OpChangesWithCount> {
|
fn reparent_decks(&self, input: pb::ReparentDecksRequest) -> Result<pb::OpChangesWithCount> {
|
||||||
let deck_ids: Vec<_> = input.deck_ids.into_iter().map(Into::into).collect();
|
let deck_ids: Vec<_> = input.deck_ids.into_iter().map(Into::into).collect();
|
||||||
let new_parent = if input.new_parent == 0 {
|
let new_parent = if input.new_parent == 0 {
|
||||||
None
|
None
|
||||||
|
@ -162,7 +165,7 @@ impl DecksService for Backend {
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rename_deck(&self, input: pb::RenameDeckIn) -> Result<pb::OpChanges> {
|
fn rename_deck(&self, input: pb::RenameDeckRequest) -> Result<pb::OpChanges> {
|
||||||
self.with_col(|col| col.rename_deck(input.deck_id.into(), &input.new_name))
|
self.with_col(|col| col.rename_deck(input.deck_id.into(), &input.new_name))
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
@ -185,7 +188,7 @@ impl DecksService for Backend {
|
||||||
Ok(FilteredSearchOrder::labels(&self.tr).into())
|
Ok(FilteredSearchOrder::labels(&self.tr).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_deck_collapsed(&self, input: pb::SetDeckCollapsedIn) -> Result<pb::OpChanges> {
|
fn set_deck_collapsed(&self, input: pb::SetDeckCollapsedRequest) -> Result<pb::OpChanges> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.set_deck_collapsed(input.deck_id.into(), input.collapsed, input.scope())
|
col.set_deck_collapsed(input.deck_id.into(), input.collapsed, input.scope())
|
||||||
})
|
})
|
||||||
|
|
|
@ -14,7 +14,7 @@ use crate::{
|
||||||
};
|
};
|
||||||
|
|
||||||
impl I18nService for Backend {
|
impl I18nService for Backend {
|
||||||
fn translate_string(&self, input: pb::TranslateStringIn) -> Result<pb::String> {
|
fn translate_string(&self, input: pb::TranslateStringRequest) -> Result<pb::String> {
|
||||||
let args = build_fluent_args(input.args);
|
let args = build_fluent_args(input.args);
|
||||||
|
|
||||||
Ok(self
|
Ok(self
|
||||||
|
@ -27,8 +27,8 @@ impl I18nService for Backend {
|
||||||
.into())
|
.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_timespan(&self, input: pb::FormatTimespanIn) -> Result<pb::String> {
|
fn format_timespan(&self, input: pb::FormatTimespanRequest) -> Result<pb::String> {
|
||||||
use pb::format_timespan_in::Context;
|
use pb::format_timespan_request::Context;
|
||||||
Ok(match input.context() {
|
Ok(match input.context() {
|
||||||
Context::Precise => time_span(input.seconds, &self.tr, true),
|
Context::Precise => time_span(input.seconds, &self.tr, true),
|
||||||
Context::Intervals => time_span(input.seconds, &self.tr, false),
|
Context::Intervals => time_span(input.seconds, &self.tr, false),
|
||||||
|
@ -37,7 +37,7 @@ impl I18nService for Backend {
|
||||||
.into())
|
.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn i18n_resources(&self, input: pb::I18nResourcesIn) -> Result<pb::Json> {
|
fn i18n_resources(&self, input: pb::I18nResourcesRequest) -> Result<pb::Json> {
|
||||||
serde_json::to_vec(&self.tr.resources_for_js(&input.modules))
|
serde_json::to_vec(&self.tr.resources_for_js(&input.modules))
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
|
|
|
@ -13,7 +13,7 @@ impl MediaService for Backend {
|
||||||
// media
|
// media
|
||||||
//-----------------------------------------------
|
//-----------------------------------------------
|
||||||
|
|
||||||
fn check_media(&self, _input: pb::Empty) -> Result<pb::CheckMediaOut> {
|
fn check_media(&self, _input: pb::Empty) -> Result<pb::CheckMediaResponse> {
|
||||||
let mut handler = self.new_progress_handler();
|
let mut handler = self.new_progress_handler();
|
||||||
let progress_fn =
|
let progress_fn =
|
||||||
move |progress| handler.update(Progress::MediaCheck(progress as u32), true);
|
move |progress| handler.update(Progress::MediaCheck(progress as u32), true);
|
||||||
|
@ -25,7 +25,7 @@ impl MediaService for Backend {
|
||||||
|
|
||||||
let report = checker.summarize_output(&mut output);
|
let report = checker.summarize_output(&mut output);
|
||||||
|
|
||||||
Ok(pb::CheckMediaOut {
|
Ok(pb::CheckMediaResponse {
|
||||||
unused: output.unused,
|
unused: output.unused,
|
||||||
missing: output.missing,
|
missing: output.missing,
|
||||||
report,
|
report,
|
||||||
|
@ -35,7 +35,7 @@ impl MediaService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn trash_media_files(&self, input: pb::TrashMediaFilesIn) -> Result<pb::Empty> {
|
fn trash_media_files(&self, input: pb::TrashMediaFilesRequest) -> Result<pb::Empty> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let mgr = MediaManager::new(&col.media_folder, &col.media_db)?;
|
let mgr = MediaManager::new(&col.media_folder, &col.media_db)?;
|
||||||
let mut ctx = mgr.dbctx();
|
let mut ctx = mgr.dbctx();
|
||||||
|
@ -44,7 +44,7 @@ impl MediaService for Backend {
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_media_file(&self, input: pb::AddMediaFileIn) -> Result<pb::String> {
|
fn add_media_file(&self, input: pb::AddMediaFileRequest) -> Result<pb::String> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let mgr = MediaManager::new(&col.media_folder, &col.media_db)?;
|
let mgr = MediaManager::new(&col.media_folder, &col.media_db)?;
|
||||||
let mut ctx = mgr.dbctx();
|
let mut ctx = mgr.dbctx();
|
||||||
|
|
|
@ -19,18 +19,21 @@ impl NotesService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_note(&self, input: pb::AddNoteIn) -> Result<pb::AddNoteOut> {
|
fn add_note(&self, input: pb::AddNoteRequest) -> Result<pb::AddNoteResponse> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let mut note: Note = input.note.ok_or(AnkiError::NotFound)?.into();
|
let mut note: Note = input.note.ok_or(AnkiError::NotFound)?.into();
|
||||||
let changes = col.add_note(&mut note, DeckId(input.deck_id))?;
|
let changes = col.add_note(&mut note, DeckId(input.deck_id))?;
|
||||||
Ok(pb::AddNoteOut {
|
Ok(pb::AddNoteResponse {
|
||||||
note_id: note.id.0,
|
note_id: note.id.0,
|
||||||
changes: Some(changes.into()),
|
changes: Some(changes.into()),
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn defaults_for_adding(&self, input: pb::DefaultsForAddingIn) -> Result<pb::DeckAndNotetype> {
|
fn defaults_for_adding(
|
||||||
|
&self,
|
||||||
|
input: pb::DefaultsForAddingRequest,
|
||||||
|
) -> Result<pb::DeckAndNotetype> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let home_deck: DeckId = input.home_deck_of_current_review_card.into();
|
let home_deck: DeckId = input.home_deck_of_current_review_card.into();
|
||||||
col.defaults_for_adding(home_deck).map(Into::into)
|
col.defaults_for_adding(home_deck).map(Into::into)
|
||||||
|
@ -46,7 +49,7 @@ impl NotesService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_note(&self, input: pb::UpdateNoteIn) -> Result<pb::OpChanges> {
|
fn update_note(&self, input: pb::UpdateNoteRequest) -> Result<pb::OpChanges> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let mut note: Note = input.note.ok_or(AnkiError::NotFound)?.into();
|
let mut note: Note = input.note.ok_or(AnkiError::NotFound)?.into();
|
||||||
col.update_note_maybe_undoable(&mut note, !input.skip_undo_entry)
|
col.update_note_maybe_undoable(&mut note, !input.skip_undo_entry)
|
||||||
|
@ -63,7 +66,7 @@ impl NotesService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_notes(&self, input: pb::RemoveNotesIn) -> Result<pb::OpChangesWithCount> {
|
fn remove_notes(&self, input: pb::RemoveNotesRequest) -> Result<pb::OpChangesWithCount> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
if !input.note_ids.is_empty() {
|
if !input.note_ids.is_empty() {
|
||||||
col.remove_notes(
|
col.remove_notes(
|
||||||
|
@ -87,17 +90,20 @@ impl NotesService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cloze_numbers_in_note(&self, note: pb::Note) -> Result<pb::ClozeNumbersInNoteOut> {
|
fn cloze_numbers_in_note(&self, note: pb::Note) -> Result<pb::ClozeNumbersInNoteResponse> {
|
||||||
let mut set = HashSet::with_capacity(4);
|
let mut set = HashSet::with_capacity(4);
|
||||||
for field in ¬e.fields {
|
for field in ¬e.fields {
|
||||||
add_cloze_numbers_in_string(field, &mut set);
|
add_cloze_numbers_in_string(field, &mut set);
|
||||||
}
|
}
|
||||||
Ok(pb::ClozeNumbersInNoteOut {
|
Ok(pb::ClozeNumbersInNoteResponse {
|
||||||
numbers: set.into_iter().map(|n| n as u32).collect(),
|
numbers: set.into_iter().map(|n| n as u32).collect(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn after_note_updates(&self, input: pb::AfterNoteUpdatesIn) -> Result<pb::OpChangesWithCount> {
|
fn after_note_updates(
|
||||||
|
&self,
|
||||||
|
input: pb::AfterNoteUpdatesRequest,
|
||||||
|
) -> Result<pb::OpChangesWithCount> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.after_note_updates(
|
col.after_note_updates(
|
||||||
&to_note_ids(input.nids),
|
&to_note_ids(input.nids),
|
||||||
|
@ -110,21 +116,21 @@ impl NotesService for Backend {
|
||||||
|
|
||||||
fn field_names_for_notes(
|
fn field_names_for_notes(
|
||||||
&self,
|
&self,
|
||||||
input: pb::FieldNamesForNotesIn,
|
input: pb::FieldNamesForNotesRequest,
|
||||||
) -> Result<pb::FieldNamesForNotesOut> {
|
) -> Result<pb::FieldNamesForNotesResponse> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let nids: Vec<_> = input.nids.into_iter().map(NoteId).collect();
|
let nids: Vec<_> = input.nids.into_iter().map(NoteId).collect();
|
||||||
col.storage
|
col.storage
|
||||||
.field_names_for_notes(&nids)
|
.field_names_for_notes(&nids)
|
||||||
.map(|fields| pb::FieldNamesForNotesOut { fields })
|
.map(|fields| pb::FieldNamesForNotesResponse { fields })
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn note_fields_check(&self, input: pb::Note) -> Result<pb::NoteFieldsCheckOut> {
|
fn note_fields_check(&self, input: pb::Note) -> Result<pb::NoteFieldsCheckResponse> {
|
||||||
let note: Note = input.into();
|
let note: Note = input.into();
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.note_fields_check(¬e)
|
col.note_fields_check(¬e)
|
||||||
.map(|r| pb::NoteFieldsCheckOut { state: r as i32 })
|
.map(|r| pb::NoteFieldsCheckResponse { state: r as i32 })
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -47,7 +47,10 @@ impl NotetypesService for Backend {
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_or_update_notetype(&self, input: pb::AddOrUpdateNotetypeIn) -> Result<pb::NotetypeId> {
|
fn add_or_update_notetype(
|
||||||
|
&self,
|
||||||
|
input: pb::AddOrUpdateNotetypeRequest,
|
||||||
|
) -> Result<pb::NotetypeId> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
|
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
|
||||||
let mut nt: Notetype = legacy.into();
|
let mut nt: Notetype = legacy.into();
|
||||||
|
@ -138,13 +141,13 @@ impl NotetypesService for Backend {
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_aux_notetype_config_key(&self, input: pb::GetAuxConfigKeyIn) -> Result<pb::String> {
|
fn get_aux_notetype_config_key(&self, input: pb::GetAuxConfigKeyRequest) -> Result<pb::String> {
|
||||||
Ok(get_aux_notetype_config_key(input.id.into(), &input.key).into())
|
Ok(get_aux_notetype_config_key(input.id.into(), &input.key).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_aux_template_config_key(
|
fn get_aux_template_config_key(
|
||||||
&self,
|
&self,
|
||||||
input: pb::GetAuxTemplateConfigKeyIn,
|
input: pb::GetAuxTemplateConfigKeyRequest,
|
||||||
) -> Result<pb::String> {
|
) -> Result<pb::String> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.get_aux_template_config_key(
|
col.get_aux_template_config_key(
|
||||||
|
@ -165,14 +168,14 @@ impl NotetypesService for Backend {
|
||||||
|
|
||||||
fn get_change_notetype_info(
|
fn get_change_notetype_info(
|
||||||
&self,
|
&self,
|
||||||
input: pb::GetChangeNotetypeInfoIn,
|
input: pb::GetChangeNotetypeInfoRequest,
|
||||||
) -> Result<pb::ChangeNotetypeInfo> {
|
) -> Result<pb::ChangeNotetypeInfo> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.notetype_change_info(input.old_notetype_id.into(), input.new_notetype_id.into())
|
col.notetype_change_info(input.old_notetype_id.into(), input.new_notetype_id.into())
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn change_notetype(&self, input: pb::ChangeNotetypeIn) -> Result<pb::OpChanges> {
|
fn change_notetype(&self, input: pb::ChangeNotetypeRequest) -> Result<pb::OpChanges> {
|
||||||
self.with_col(|col| col.change_notetype_of_notes(input.into()).map(Into::into))
|
self.with_col(|col| col.change_notetype_of_notes(input.into()).map(Into::into))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -203,8 +206,8 @@ impl From<NotetypeChangeInfo> for pb::ChangeNotetypeInfo {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<pb::ChangeNotetypeIn> for ChangeNotetypeInput {
|
impl From<pb::ChangeNotetypeRequest> for ChangeNotetypeInput {
|
||||||
fn from(i: pb::ChangeNotetypeIn) -> Self {
|
fn from(i: pb::ChangeNotetypeRequest) -> Self {
|
||||||
ChangeNotetypeInput {
|
ChangeNotetypeInput {
|
||||||
current_schema: i.current_schema.into(),
|
current_schema: i.current_schema.into(),
|
||||||
note_ids: i.note_ids.into_newtype(NoteId),
|
note_ids: i.note_ids.into_newtype(NoteId),
|
||||||
|
@ -231,9 +234,9 @@ impl From<pb::ChangeNotetypeIn> for ChangeNotetypeInput {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ChangeNotetypeInput> for pb::ChangeNotetypeIn {
|
impl From<ChangeNotetypeInput> for pb::ChangeNotetypeRequest {
|
||||||
fn from(i: ChangeNotetypeInput) -> Self {
|
fn from(i: ChangeNotetypeInput) -> Self {
|
||||||
pb::ChangeNotetypeIn {
|
pb::ChangeNotetypeRequest {
|
||||||
current_schema: i.current_schema.into(),
|
current_schema: i.current_schema.into(),
|
||||||
note_ids: i.note_ids.into_iter().map(Into::into).collect(),
|
note_ids: i.note_ids.into_iter().map(Into::into).collect(),
|
||||||
old_notetype_id: i.old_notetype_id.into(),
|
old_notetype_id: i.old_notetype_id.into(),
|
||||||
|
|
|
@ -19,7 +19,7 @@ use crate::{
|
||||||
impl SchedulingService for Backend {
|
impl SchedulingService for Backend {
|
||||||
/// This behaves like _updateCutoff() in older code - it also unburies at the start of
|
/// This behaves like _updateCutoff() in older code - it also unburies at the start of
|
||||||
/// a new day.
|
/// a new day.
|
||||||
fn sched_timing_today(&self, _input: pb::Empty) -> Result<pb::SchedTimingTodayOut> {
|
fn sched_timing_today(&self, _input: pb::Empty) -> Result<pb::SchedTimingTodayResponse> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let timing = col.timing_today()?;
|
let timing = col.timing_today()?;
|
||||||
col.unbury_if_day_rolled_over(timing)?;
|
col.unbury_if_day_rolled_over(timing)?;
|
||||||
|
@ -33,11 +33,11 @@ impl SchedulingService for Backend {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Message rendering only, for old graphs.
|
/// Message rendering only, for old graphs.
|
||||||
fn studied_today_message(&self, input: pb::StudiedTodayMessageIn) -> Result<pb::String> {
|
fn studied_today_message(&self, input: pb::StudiedTodayMessageRequest) -> Result<pb::String> {
|
||||||
Ok(studied_today(input.cards, input.seconds as f32, &self.tr).into())
|
Ok(studied_today(input.cards, input.seconds as f32, &self.tr).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_stats(&self, input: pb::UpdateStatsIn) -> Result<pb::Empty> {
|
fn update_stats(&self, input: pb::UpdateStatsRequest) -> Result<pb::Empty> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.transact_no_undo(|col| {
|
col.transact_no_undo(|col| {
|
||||||
let today = col.current_due_day(0)?;
|
let today = col.current_due_day(0)?;
|
||||||
|
@ -47,7 +47,7 @@ impl SchedulingService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extend_limits(&self, input: pb::ExtendLimitsIn) -> Result<pb::Empty> {
|
fn extend_limits(&self, input: pb::ExtendLimitsRequest) -> Result<pb::Empty> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.transact_no_undo(|col| {
|
col.transact_no_undo(|col| {
|
||||||
let today = col.current_due_day(0)?;
|
let today = col.current_due_day(0)?;
|
||||||
|
@ -64,11 +64,11 @@ impl SchedulingService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn counts_for_deck_today(&self, input: pb::DeckId) -> Result<pb::CountsForDeckTodayOut> {
|
fn counts_for_deck_today(&self, input: pb::DeckId) -> Result<pb::CountsForDeckTodayResponse> {
|
||||||
self.with_col(|col| col.counts_for_deck_today(input.did.into()))
|
self.with_col(|col| col.counts_for_deck_today(input.did.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn congrats_info(&self, _input: pb::Empty) -> Result<pb::CongratsInfoOut> {
|
fn congrats_info(&self, _input: pb::Empty) -> Result<pb::CongratsInfoResponse> {
|
||||||
self.with_col(|col| col.congrats_info())
|
self.with_col(|col| col.congrats_info())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ impl SchedulingService for Backend {
|
||||||
self.with_col(|col| col.unbury_or_unsuspend_cards(&cids).map(Into::into))
|
self.with_col(|col| col.unbury_or_unsuspend_cards(&cids).map(Into::into))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unbury_deck(&self, input: pb::UnburyDeckIn) -> Result<pb::OpChanges> {
|
fn unbury_deck(&self, input: pb::UnburyDeckRequest) -> Result<pb::OpChanges> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.unbury_deck(input.deck_id.into(), input.mode())
|
col.unbury_deck(input.deck_id.into(), input.mode())
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
|
@ -86,7 +86,7 @@ impl SchedulingService for Backend {
|
||||||
|
|
||||||
fn bury_or_suspend_cards(
|
fn bury_or_suspend_cards(
|
||||||
&self,
|
&self,
|
||||||
input: pb::BuryOrSuspendCardsIn,
|
input: pb::BuryOrSuspendCardsRequest,
|
||||||
) -> Result<pb::OpChangesWithCount> {
|
) -> Result<pb::OpChangesWithCount> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let mode = input.mode();
|
let mode = input.mode();
|
||||||
|
@ -108,7 +108,7 @@ impl SchedulingService for Backend {
|
||||||
self.with_col(|col| col.rebuild_filtered_deck(input.did.into()).map(Into::into))
|
self.with_col(|col| col.rebuild_filtered_deck(input.did.into()).map(Into::into))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn schedule_cards_as_new(&self, input: pb::ScheduleCardsAsNewIn) -> Result<pb::OpChanges> {
|
fn schedule_cards_as_new(&self, input: pb::ScheduleCardsAsNewRequest) -> Result<pb::OpChanges> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let cids = input.card_ids.into_newtype(CardId);
|
let cids = input.card_ids.into_newtype(CardId);
|
||||||
let log = input.log;
|
let log = input.log;
|
||||||
|
@ -116,14 +116,14 @@ impl SchedulingService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_due_date(&self, input: pb::SetDueDateIn) -> Result<pb::OpChanges> {
|
fn set_due_date(&self, input: pb::SetDueDateRequest) -> Result<pb::OpChanges> {
|
||||||
let config = input.config_key.map(Into::into);
|
let config = input.config_key.map(Into::into);
|
||||||
let days = input.days;
|
let days = input.days;
|
||||||
let cids = input.card_ids.into_newtype(CardId);
|
let cids = input.card_ids.into_newtype(CardId);
|
||||||
self.with_col(|col| col.set_due_date(&cids, &days, config).map(Into::into))
|
self.with_col(|col| col.set_due_date(&cids, &days, config).map(Into::into))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sort_cards(&self, input: pb::SortCardsIn) -> Result<pb::OpChangesWithCount> {
|
fn sort_cards(&self, input: pb::SortCardsRequest) -> Result<pb::OpChangesWithCount> {
|
||||||
let cids = input.card_ids.into_newtype(CardId);
|
let cids = input.card_ids.into_newtype(CardId);
|
||||||
let (start, step, random, shift) = (
|
let (start, step, random, shift) = (
|
||||||
input.starting_from,
|
input.starting_from,
|
||||||
|
@ -142,7 +142,7 @@ impl SchedulingService for Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sort_deck(&self, input: pb::SortDeckIn) -> Result<pb::OpChangesWithCount> {
|
fn sort_deck(&self, input: pb::SortDeckRequest) -> Result<pb::OpChangesWithCount> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.sort_deck_legacy(input.deck_id.into(), input.randomize)
|
col.sort_deck_legacy(input.deck_id.into(), input.randomize)
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
|
@ -176,7 +176,7 @@ impl SchedulingService for Backend {
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_queued_cards(&self, input: pb::GetQueuedCardsIn) -> Result<pb::QueuedCards> {
|
fn get_queued_cards(&self, input: pb::GetQueuedCardsRequest) -> Result<pb::QueuedCards> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.get_queued_cards(input.fetch_limit as usize, input.intraday_learning_only)
|
col.get_queued_cards(input.fetch_limit as usize, input.intraday_learning_only)
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
|
@ -184,9 +184,9 @@ impl SchedulingService for Backend {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<crate::scheduler::timing::SchedTimingToday> for pb::SchedTimingTodayOut {
|
impl From<crate::scheduler::timing::SchedTimingToday> for pb::SchedTimingTodayResponse {
|
||||||
fn from(t: crate::scheduler::timing::SchedTimingToday) -> pb::SchedTimingTodayOut {
|
fn from(t: crate::scheduler::timing::SchedTimingToday) -> pb::SchedTimingTodayResponse {
|
||||||
pb::SchedTimingTodayOut {
|
pb::SchedTimingTodayResponse {
|
||||||
days_elapsed: t.days_elapsed,
|
days_elapsed: t.days_elapsed,
|
||||||
next_day_at: t.next_day_at.0,
|
next_day_at: t.next_day_at.0,
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,27 +22,27 @@ impl SearchService for Backend {
|
||||||
Ok(write_nodes(&node.into_node_list()).into())
|
Ok(write_nodes(&node.into_node_list()).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_cards(&self, input: pb::SearchIn) -> Result<pb::SearchOut> {
|
fn search_cards(&self, input: pb::SearchRequest) -> Result<pb::SearchResponse> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let order = input.order.unwrap_or_default().value.into();
|
let order = input.order.unwrap_or_default().value.into();
|
||||||
let cids = col.search_cards(&input.search, order)?;
|
let cids = col.search_cards(&input.search, order)?;
|
||||||
Ok(pb::SearchOut {
|
Ok(pb::SearchResponse {
|
||||||
ids: cids.into_iter().map(|v| v.0).collect(),
|
ids: cids.into_iter().map(|v| v.0).collect(),
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_notes(&self, input: pb::SearchIn) -> Result<pb::SearchOut> {
|
fn search_notes(&self, input: pb::SearchRequest) -> Result<pb::SearchResponse> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let order = input.order.unwrap_or_default().value.into();
|
let order = input.order.unwrap_or_default().value.into();
|
||||||
let nids = col.search_notes(&input.search, order)?;
|
let nids = col.search_notes(&input.search, order)?;
|
||||||
Ok(pb::SearchOut {
|
Ok(pb::SearchResponse {
|
||||||
ids: nids.into_iter().map(|v| v.0).collect(),
|
ids: nids.into_iter().map(|v| v.0).collect(),
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn join_search_nodes(&self, input: pb::JoinSearchNodesIn) -> Result<pb::String> {
|
fn join_search_nodes(&self, input: pb::JoinSearchNodesRequest) -> Result<pb::String> {
|
||||||
let sep = input.joiner().into();
|
let sep = input.joiner().into();
|
||||||
let existing_nodes = {
|
let existing_nodes = {
|
||||||
let node: Node = input.existing_node.unwrap_or_default().try_into()?;
|
let node: Node = input.existing_node.unwrap_or_default().try_into()?;
|
||||||
|
@ -52,7 +52,7 @@ impl SearchService for Backend {
|
||||||
Ok(concatenate_searches(sep, existing_nodes, additional_node).into())
|
Ok(concatenate_searches(sep, existing_nodes, additional_node).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn replace_search_node(&self, input: pb::ReplaceSearchNodeIn) -> Result<pb::String> {
|
fn replace_search_node(&self, input: pb::ReplaceSearchNodeRequest) -> Result<pb::String> {
|
||||||
let existing = {
|
let existing = {
|
||||||
let node = input.existing_node.unwrap_or_default().try_into()?;
|
let node = input.existing_node.unwrap_or_default().try_into()?;
|
||||||
if let Node::Group(nodes) = node {
|
if let Node::Group(nodes) = node {
|
||||||
|
@ -65,7 +65,7 @@ impl SearchService for Backend {
|
||||||
Ok(replace_search_node(existing, replacement).into())
|
Ok(replace_search_node(existing, replacement).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_and_replace(&self, input: pb::FindAndReplaceIn) -> Result<pb::OpChangesWithCount> {
|
fn find_and_replace(&self, input: pb::FindAndReplaceRequest) -> Result<pb::OpChangesWithCount> {
|
||||||
let mut search = if input.regex {
|
let mut search = if input.regex {
|
||||||
input.search
|
input.search
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -11,7 +11,7 @@ impl StatsService for Backend {
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn graphs(&self, input: pb::GraphsIn) -> Result<pb::GraphsOut> {
|
fn graphs(&self, input: pb::GraphsRequest) -> Result<pb::GraphsResponse> {
|
||||||
self.with_col(|col| col.graph_data_for_search(&input.search, input.days))
|
self.with_col(|col| col.graph_data_for_search(&input.search, input.days))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -31,39 +31,39 @@ pub(super) struct SyncState {
|
||||||
#[derive(Default, Debug)]
|
#[derive(Default, Debug)]
|
||||||
pub(super) struct RemoteSyncStatus {
|
pub(super) struct RemoteSyncStatus {
|
||||||
pub last_check: TimestampSecs,
|
pub last_check: TimestampSecs,
|
||||||
pub last_response: pb::sync_status_out::Required,
|
pub last_response: pb::sync_status_response::Required,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RemoteSyncStatus {
|
impl RemoteSyncStatus {
|
||||||
pub(super) fn update(&mut self, required: pb::sync_status_out::Required) {
|
pub(super) fn update(&mut self, required: pb::sync_status_response::Required) {
|
||||||
self.last_check = TimestampSecs::now();
|
self.last_check = TimestampSecs::now();
|
||||||
self.last_response = required
|
self.last_response = required
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<SyncOutput> for pb::SyncCollectionOut {
|
impl From<SyncOutput> for pb::SyncCollectionResponse {
|
||||||
fn from(o: SyncOutput) -> Self {
|
fn from(o: SyncOutput) -> Self {
|
||||||
pb::SyncCollectionOut {
|
pb::SyncCollectionResponse {
|
||||||
host_number: o.host_number,
|
host_number: o.host_number,
|
||||||
server_message: o.server_message,
|
server_message: o.server_message,
|
||||||
required: match o.required {
|
required: match o.required {
|
||||||
SyncActionRequired::NoChanges => {
|
SyncActionRequired::NoChanges => {
|
||||||
pb::sync_collection_out::ChangesRequired::NoChanges as i32
|
pb::sync_collection_response::ChangesRequired::NoChanges as i32
|
||||||
}
|
}
|
||||||
SyncActionRequired::FullSyncRequired {
|
SyncActionRequired::FullSyncRequired {
|
||||||
upload_ok,
|
upload_ok,
|
||||||
download_ok,
|
download_ok,
|
||||||
} => {
|
} => {
|
||||||
if !upload_ok {
|
if !upload_ok {
|
||||||
pb::sync_collection_out::ChangesRequired::FullDownload as i32
|
pb::sync_collection_response::ChangesRequired::FullDownload as i32
|
||||||
} else if !download_ok {
|
} else if !download_ok {
|
||||||
pb::sync_collection_out::ChangesRequired::FullUpload as i32
|
pb::sync_collection_response::ChangesRequired::FullUpload as i32
|
||||||
} else {
|
} else {
|
||||||
pb::sync_collection_out::ChangesRequired::FullSync as i32
|
pb::sync_collection_response::ChangesRequired::FullSync as i32
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
SyncActionRequired::NormalSyncRequired => {
|
SyncActionRequired::NormalSyncRequired => {
|
||||||
pb::sync_collection_out::ChangesRequired::NormalSync as i32
|
pb::sync_collection_response::ChangesRequired::NormalSync as i32
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -104,15 +104,15 @@ impl SyncService for Backend {
|
||||||
self.with_col(|col| col.before_upload().map(Into::into))
|
self.with_col(|col| col.before_upload().map(Into::into))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sync_login(&self, input: pb::SyncLoginIn) -> Result<pb::SyncAuth> {
|
fn sync_login(&self, input: pb::SyncLoginRequest) -> Result<pb::SyncAuth> {
|
||||||
self.sync_login_inner(input)
|
self.sync_login_inner(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sync_status(&self, input: pb::SyncAuth) -> Result<pb::SyncStatusOut> {
|
fn sync_status(&self, input: pb::SyncAuth) -> Result<pb::SyncStatusResponse> {
|
||||||
self.sync_status_inner(input)
|
self.sync_status_inner(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sync_collection(&self, input: pb::SyncAuth) -> Result<pb::SyncCollectionOut> {
|
fn sync_collection(&self, input: pb::SyncAuth) -> Result<pb::SyncCollectionResponse> {
|
||||||
self.sync_collection_inner(input)
|
self.sync_collection_inner(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -126,7 +126,7 @@ impl SyncService for Backend {
|
||||||
Ok(().into())
|
Ok(().into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sync_server_method(&self, input: pb::SyncServerMethodIn) -> Result<pb::Json> {
|
fn sync_server_method(&self, input: pb::SyncServerMethodRequest) -> Result<pb::Json> {
|
||||||
let req = SyncRequest::from_method_and_data(input.method(), input.data)?;
|
let req = SyncRequest::from_method_and_data(input.method(), input.data)?;
|
||||||
self.sync_server_method_inner(req).map(Into::into)
|
self.sync_server_method_inner(req).map(Into::into)
|
||||||
}
|
}
|
||||||
|
@ -221,7 +221,7 @@ impl Backend {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn sync_login_inner(&self, input: pb::SyncLoginIn) -> Result<pb::SyncAuth> {
|
pub(super) fn sync_login_inner(&self, input: pb::SyncLoginRequest) -> Result<pb::SyncAuth> {
|
||||||
let (_guard, abort_reg) = self.sync_abort_handle()?;
|
let (_guard, abort_reg) = self.sync_abort_handle()?;
|
||||||
|
|
||||||
let rt = self.runtime_handle();
|
let rt = self.runtime_handle();
|
||||||
|
@ -237,10 +237,10 @@ impl Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn sync_status_inner(&self, input: pb::SyncAuth) -> Result<pb::SyncStatusOut> {
|
pub(super) fn sync_status_inner(&self, input: pb::SyncAuth) -> Result<pb::SyncStatusResponse> {
|
||||||
// any local changes mean we can skip the network round-trip
|
// any local changes mean we can skip the network round-trip
|
||||||
let req = self.with_col(|col| col.get_local_sync_status())?;
|
let req = self.with_col(|col| col.get_local_sync_status())?;
|
||||||
if req != pb::sync_status_out::Required::NoChanges {
|
if req != pb::sync_status_response::Required::NoChanges {
|
||||||
return Ok(req.into());
|
return Ok(req.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -275,7 +275,7 @@ impl Backend {
|
||||||
pub(super) fn sync_collection_inner(
|
pub(super) fn sync_collection_inner(
|
||||||
&self,
|
&self,
|
||||||
input: pb::SyncAuth,
|
input: pb::SyncAuth,
|
||||||
) -> Result<pb::SyncCollectionOut> {
|
) -> Result<pb::SyncCollectionResponse> {
|
||||||
let (_guard, abort_reg) = self.sync_abort_handle()?;
|
let (_guard, abort_reg) = self.sync_abort_handle()?;
|
||||||
|
|
||||||
let rt = self.runtime_handle();
|
let rt = self.runtime_handle();
|
||||||
|
@ -367,7 +367,7 @@ impl Backend {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.sync
|
.sync
|
||||||
.remote_sync_status
|
.remote_sync_status
|
||||||
.update(pb::sync_status_out::Required::NoChanges);
|
.update(pb::sync_status_response::Required::NoChanges);
|
||||||
}
|
}
|
||||||
sync_result
|
sync_result
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,10 +11,10 @@ use crate::{
|
||||||
prelude::*,
|
prelude::*,
|
||||||
sync::{
|
sync::{
|
||||||
http::{
|
http::{
|
||||||
ApplyChangesIn, ApplyChunkIn, ApplyGravesIn, HostKeyIn, HostKeyOut, MetaIn,
|
ApplyChangesRequest, ApplyChunkRequest, ApplyGravesRequest, HostKeyRequest,
|
||||||
SanityCheckIn, StartIn, SyncRequest,
|
HostKeyResponse, MetaRequest, SanityCheckRequest, StartRequest, SyncRequest,
|
||||||
},
|
},
|
||||||
Chunk, Graves, LocalServer, SanityCheckOut, SanityCheckStatus, SyncMeta, SyncServer,
|
Chunk, Graves, LocalServer, SanityCheckResponse, SanityCheckStatus, SyncMeta, SyncServer,
|
||||||
UnchunkedChanges, SYNC_VERSION_MAX, SYNC_VERSION_MIN,
|
UnchunkedChanges, SYNC_VERSION_MAX, SYNC_VERSION_MIN,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -39,13 +39,13 @@ impl Backend {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gives out a dummy hkey - auth should be implemented at a higher layer.
|
/// Gives out a dummy hkey - auth should be implemented at a higher layer.
|
||||||
fn host_key(&self, _input: HostKeyIn) -> Result<HostKeyOut> {
|
fn host_key(&self, _input: HostKeyRequest) -> Result<HostKeyResponse> {
|
||||||
Ok(HostKeyOut {
|
Ok(HostKeyResponse {
|
||||||
key: "unimplemented".into(),
|
key: "unimplemented".into(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn meta(&self, input: MetaIn) -> Result<SyncMeta> {
|
fn meta(&self, input: MetaRequest) -> Result<SyncMeta> {
|
||||||
if input.sync_version < SYNC_VERSION_MIN || input.sync_version > SYNC_VERSION_MAX {
|
if input.sync_version < SYNC_VERSION_MIN || input.sync_version > SYNC_VERSION_MAX {
|
||||||
return Ok(SyncMeta {
|
return Ok(SyncMeta {
|
||||||
server_message: "Your Anki version is either too old, or too new.".into(),
|
server_message: "Your Anki version is either too old, or too new.".into(),
|
||||||
|
@ -86,7 +86,7 @@ impl Backend {
|
||||||
.ok_or_else(|| AnkiError::sync_error("", SyncErrorKind::SyncNotStarted))
|
.ok_or_else(|| AnkiError::sync_error("", SyncErrorKind::SyncNotStarted))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn start(&self, input: StartIn) -> Result<Graves> {
|
fn start(&self, input: StartRequest) -> Result<Graves> {
|
||||||
// place col into new server
|
// place col into new server
|
||||||
let server = self.col_into_server()?;
|
let server = self.col_into_server()?;
|
||||||
let mut state_guard = self.state.lock().unwrap();
|
let mut state_guard = self.state.lock().unwrap();
|
||||||
|
@ -103,14 +103,14 @@ impl Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_graves(&self, input: ApplyGravesIn) -> Result<()> {
|
fn apply_graves(&self, input: ApplyGravesRequest) -> Result<()> {
|
||||||
self.with_sync_server(|server| {
|
self.with_sync_server(|server| {
|
||||||
let rt = Runtime::new().unwrap();
|
let rt = Runtime::new().unwrap();
|
||||||
rt.block_on(server.apply_graves(input.chunk))
|
rt.block_on(server.apply_graves(input.chunk))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_changes(&self, input: ApplyChangesIn) -> Result<UnchunkedChanges> {
|
fn apply_changes(&self, input: ApplyChangesRequest) -> Result<UnchunkedChanges> {
|
||||||
self.with_sync_server(|server| {
|
self.with_sync_server(|server| {
|
||||||
let rt = Runtime::new().unwrap();
|
let rt = Runtime::new().unwrap();
|
||||||
rt.block_on(server.apply_changes(input.changes))
|
rt.block_on(server.apply_changes(input.changes))
|
||||||
|
@ -124,14 +124,14 @@ impl Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_chunk(&self, input: ApplyChunkIn) -> Result<()> {
|
fn apply_chunk(&self, input: ApplyChunkRequest) -> Result<()> {
|
||||||
self.with_sync_server(|server| {
|
self.with_sync_server(|server| {
|
||||||
let rt = Runtime::new().unwrap();
|
let rt = Runtime::new().unwrap();
|
||||||
rt.block_on(server.apply_chunk(input.chunk))
|
rt.block_on(server.apply_chunk(input.chunk))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sanity_check(&self, input: SanityCheckIn) -> Result<SanityCheckOut> {
|
fn sanity_check(&self, input: SanityCheckRequest) -> Result<SanityCheckResponse> {
|
||||||
self.with_sync_server(|server| {
|
self.with_sync_server(|server| {
|
||||||
let rt = Runtime::new().unwrap();
|
let rt = Runtime::new().unwrap();
|
||||||
rt.block_on(server.sanity_check(input.client))
|
rt.block_on(server.sanity_check(input.client))
|
||||||
|
|
|
@ -27,7 +27,7 @@ impl TagsService for Backend {
|
||||||
self.with_col(|col| col.remove_tags(tags.val.as_str()).map(Into::into))
|
self.with_col(|col| col.remove_tags(tags.val.as_str()).map(Into::into))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_tag_collapsed(&self, input: pb::SetTagCollapsedIn) -> Result<pb::OpChanges> {
|
fn set_tag_collapsed(&self, input: pb::SetTagCollapsedRequest) -> Result<pb::OpChanges> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.set_tag_collapsed(&input.name, input.collapsed)
|
col.set_tag_collapsed(&input.name, input.collapsed)
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
|
@ -38,7 +38,7 @@ impl TagsService for Backend {
|
||||||
self.with_col(|col| col.tag_tree())
|
self.with_col(|col| col.tag_tree())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reparent_tags(&self, input: pb::ReparentTagsIn) -> Result<pb::OpChangesWithCount> {
|
fn reparent_tags(&self, input: pb::ReparentTagsRequest) -> Result<pb::OpChangesWithCount> {
|
||||||
let source_tags = input.tags;
|
let source_tags = input.tags;
|
||||||
let target_tag = if input.new_parent.is_empty() {
|
let target_tag = if input.new_parent.is_empty() {
|
||||||
None
|
None
|
||||||
|
@ -49,19 +49,19 @@ impl TagsService for Backend {
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rename_tags(&self, input: pb::RenameTagsIn) -> Result<pb::OpChangesWithCount> {
|
fn rename_tags(&self, input: pb::RenameTagsRequest) -> Result<pb::OpChangesWithCount> {
|
||||||
self.with_col(|col| col.rename_tag(&input.current_prefix, &input.new_prefix))
|
self.with_col(|col| col.rename_tag(&input.current_prefix, &input.new_prefix))
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_note_tags(&self, input: pb::NoteIdsAndTagsIn) -> Result<pb::OpChangesWithCount> {
|
fn add_note_tags(&self, input: pb::NoteIdsAndTagsRequest) -> Result<pb::OpChangesWithCount> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.add_tags_to_notes(&to_note_ids(input.note_ids), &input.tags)
|
col.add_tags_to_notes(&to_note_ids(input.note_ids), &input.tags)
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_note_tags(&self, input: pb::NoteIdsAndTagsIn) -> Result<pb::OpChangesWithCount> {
|
fn remove_note_tags(&self, input: pb::NoteIdsAndTagsRequest) -> Result<pb::OpChangesWithCount> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.remove_tags_from_notes(&to_note_ids(input.note_ids), &input.tags)
|
col.remove_tags_from_notes(&to_note_ids(input.note_ids), &input.tags)
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
|
@ -70,7 +70,7 @@ impl TagsService for Backend {
|
||||||
|
|
||||||
fn find_and_replace_tag(
|
fn find_and_replace_tag(
|
||||||
&self,
|
&self,
|
||||||
input: pb::FindAndReplaceTagIn,
|
input: pb::FindAndReplaceTagRequest,
|
||||||
) -> Result<pb::OpChangesWithCount> {
|
) -> Result<pb::OpChangesWithCount> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.find_and_replace_tag(
|
col.find_and_replace_tag(
|
||||||
|
|
|
@ -6,7 +6,7 @@ pub(crate) mod undo;
|
||||||
mod update;
|
mod update;
|
||||||
|
|
||||||
pub use schema11::{DeckConfSchema11, NewCardOrderSchema11};
|
pub use schema11::{DeckConfSchema11, NewCardOrderSchema11};
|
||||||
pub use update::UpdateDeckConfigsIn;
|
pub use update::UpdateDeckConfigsRequest;
|
||||||
|
|
||||||
pub use crate::backend_proto::deck_config::{
|
pub use crate::backend_proto::deck_config::{
|
||||||
config::{
|
config::{
|
||||||
|
|
|
@ -15,7 +15,7 @@ use crate::{
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct UpdateDeckConfigsIn {
|
pub struct UpdateDeckConfigsRequest {
|
||||||
pub target_deck_id: DeckId,
|
pub target_deck_id: DeckId,
|
||||||
/// Deck will be set to last provided deck config.
|
/// Deck will be set to last provided deck config.
|
||||||
pub configs: Vec<DeckConfig>,
|
pub configs: Vec<DeckConfig>,
|
||||||
|
@ -43,7 +43,7 @@ impl Collection {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Information required for the deck options screen.
|
/// Information required for the deck options screen.
|
||||||
pub fn update_deck_configs(&mut self, input: UpdateDeckConfigsIn) -> Result<OpOutput<()>> {
|
pub fn update_deck_configs(&mut self, input: UpdateDeckConfigsRequest) -> Result<OpOutput<()>> {
|
||||||
self.transact(Op::UpdateDeckConfig, |col| {
|
self.transact(Op::UpdateDeckConfig, |col| {
|
||||||
col.update_deck_configs_inner(input)
|
col.update_deck_configs_inner(input)
|
||||||
})
|
})
|
||||||
|
@ -106,7 +106,7 @@ impl Collection {
|
||||||
.collect())
|
.collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_deck_configs_inner(&mut self, mut input: UpdateDeckConfigsIn) -> Result<()> {
|
fn update_deck_configs_inner(&mut self, mut input: UpdateDeckConfigsRequest) -> Result<()> {
|
||||||
if input.configs.is_empty() {
|
if input.configs.is_empty() {
|
||||||
return Err(AnkiError::invalid_input("config not provided"));
|
return Err(AnkiError::invalid_input("config not provided"));
|
||||||
}
|
}
|
||||||
|
@ -219,7 +219,7 @@ mod test {
|
||||||
|
|
||||||
// if nothing changed, no changes should be made
|
// if nothing changed, no changes should be made
|
||||||
let output = col.get_deck_configs_for_update(DeckId(1))?;
|
let output = col.get_deck_configs_for_update(DeckId(1))?;
|
||||||
let mut input = UpdateDeckConfigsIn {
|
let mut input = UpdateDeckConfigsRequest {
|
||||||
target_deck_id: DeckId(1),
|
target_deck_id: DeckId(1),
|
||||||
configs: output
|
configs: output
|
||||||
.all_config
|
.all_config
|
||||||
|
|
|
@ -44,11 +44,11 @@ impl Collection {
|
||||||
pub(crate) fn counts_for_deck_today(
|
pub(crate) fn counts_for_deck_today(
|
||||||
&mut self,
|
&mut self,
|
||||||
did: DeckId,
|
did: DeckId,
|
||||||
) -> Result<pb::CountsForDeckTodayOut> {
|
) -> Result<pb::CountsForDeckTodayResponse> {
|
||||||
let today = self.current_due_day(0)?;
|
let today = self.current_due_day(0)?;
|
||||||
let mut deck = self.storage.get_deck(did)?.ok_or(AnkiError::NotFound)?;
|
let mut deck = self.storage.get_deck(did)?.ok_or(AnkiError::NotFound)?;
|
||||||
deck.reset_stats_if_day_changed(today);
|
deck.reset_stats_if_day_changed(today);
|
||||||
Ok(pb::CountsForDeckTodayOut {
|
Ok(pb::CountsForDeckTodayResponse {
|
||||||
new: deck.common.new_studied,
|
new: deck.common.new_studied,
|
||||||
review: deck.common.review_studied,
|
review: deck.common.review_studied,
|
||||||
})
|
})
|
||||||
|
|
|
@ -23,7 +23,7 @@ impl Collection {
|
||||||
&mut self,
|
&mut self,
|
||||||
today: u32,
|
today: u32,
|
||||||
usn: Usn,
|
usn: Usn,
|
||||||
input: pb::UpdateStatsIn,
|
input: pb::UpdateStatsRequest,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let did = input.deck_id.into();
|
let did = input.deck_id.into();
|
||||||
let mutator = |c: &mut DeckCommon| {
|
let mutator = |c: &mut DeckCommon| {
|
||||||
|
|
|
@ -13,7 +13,7 @@ use super::{
|
||||||
limits::{remaining_limits_map, RemainingLimits},
|
limits::{remaining_limits_map, RemainingLimits},
|
||||||
DueCounts,
|
DueCounts,
|
||||||
};
|
};
|
||||||
pub use crate::backend_proto::set_deck_collapsed_in::Scope as DeckCollapseScope;
|
pub use crate::backend_proto::set_deck_collapsed_request::Scope as DeckCollapseScope;
|
||||||
use crate::{
|
use crate::{
|
||||||
backend_proto::DeckTreeNode, config::SchedulerVersion, ops::OpOutput, prelude::*, undo::Op,
|
backend_proto::DeckTreeNode, config::SchedulerVersion, ops::OpOutput, prelude::*, undo::Op,
|
||||||
};
|
};
|
||||||
|
|
|
@ -14,7 +14,7 @@ use num_integer::Integer;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
backend_proto as pb,
|
backend_proto as pb,
|
||||||
backend_proto::note_fields_check_out::State as NoteFieldsState,
|
backend_proto::note_fields_check_response::State as NoteFieldsState,
|
||||||
cloze::contains_cloze,
|
cloze::contains_cloze,
|
||||||
decks::DeckId,
|
decks::DeckId,
|
||||||
define_newtype,
|
define_newtype,
|
||||||
|
|
|
@ -320,7 +320,7 @@ impl Collection {
|
||||||
self.update_deck_stats(
|
self.update_deck_stats(
|
||||||
updater.timing.days_elapsed,
|
updater.timing.days_elapsed,
|
||||||
usn,
|
usn,
|
||||||
backend_proto::UpdateStatsIn {
|
backend_proto::UpdateStatsRequest {
|
||||||
deck_id: updater.deck.id.0,
|
deck_id: updater.deck.id.0,
|
||||||
new_delta,
|
new_delta,
|
||||||
review_delta,
|
review_delta,
|
||||||
|
|
|
@ -4,7 +4,8 @@
|
||||||
use super::timing::SchedTimingToday;
|
use super::timing::SchedTimingToday;
|
||||||
use crate::{
|
use crate::{
|
||||||
backend_proto::{
|
backend_proto::{
|
||||||
bury_or_suspend_cards_in::Mode as BuryOrSuspendMode, unbury_deck_in::Mode as UnburyDeckMode,
|
bury_or_suspend_cards_request::Mode as BuryOrSuspendMode,
|
||||||
|
unbury_deck_request::Mode as UnburyDeckMode,
|
||||||
},
|
},
|
||||||
card::CardQueue,
|
card::CardQueue,
|
||||||
config::SchedulerVersion,
|
config::SchedulerVersion,
|
||||||
|
|
|
@ -14,7 +14,7 @@ pub(crate) struct CongratsInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
pub fn congrats_info(&mut self) -> Result<pb::CongratsInfoOut> {
|
pub fn congrats_info(&mut self) -> Result<pb::CongratsInfoResponse> {
|
||||||
let did = self.get_current_deck_id();
|
let did = self.get_current_deck_id();
|
||||||
let deck = self.get_deck(did)?.ok_or(AnkiError::NotFound)?;
|
let deck = self.get_deck(did)?.ok_or(AnkiError::NotFound)?;
|
||||||
let today = self.timing_today()?.days_elapsed;
|
let today = self.timing_today()?.days_elapsed;
|
||||||
|
@ -25,7 +25,7 @@ impl Collection {
|
||||||
- self.learn_ahead_secs() as i64
|
- self.learn_ahead_secs() as i64
|
||||||
- TimestampSecs::now().0)
|
- TimestampSecs::now().0)
|
||||||
.max(0) as u32;
|
.max(0) as u32;
|
||||||
Ok(pb::CongratsInfoOut {
|
Ok(pb::CongratsInfoResponse {
|
||||||
learn_remaining: info.learn_count,
|
learn_remaining: info.learn_count,
|
||||||
review_remaining: info.review_remaining,
|
review_remaining: info.review_remaining,
|
||||||
new_remaining: info.new_remaining,
|
new_remaining: info.new_remaining,
|
||||||
|
@ -49,7 +49,7 @@ mod test {
|
||||||
let info = col.congrats_info().unwrap();
|
let info = col.congrats_info().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
info,
|
info,
|
||||||
crate::backend_proto::CongratsInfoOut {
|
crate::backend_proto::CongratsInfoResponse {
|
||||||
learn_remaining: 0,
|
learn_remaining: 0,
|
||||||
review_remaining: false,
|
review_remaining: false,
|
||||||
new_remaining: false,
|
new_remaining: false,
|
||||||
|
|
|
@ -14,13 +14,13 @@ impl Collection {
|
||||||
&mut self,
|
&mut self,
|
||||||
search: &str,
|
search: &str,
|
||||||
days: u32,
|
days: u32,
|
||||||
) -> Result<pb::GraphsOut> {
|
) -> Result<pb::GraphsResponse> {
|
||||||
self.search_cards_into_table(search, SortMode::NoOrder)?;
|
self.search_cards_into_table(search, SortMode::NoOrder)?;
|
||||||
let all = search.trim().is_empty();
|
let all = search.trim().is_empty();
|
||||||
self.graph_data(all, days)
|
self.graph_data(all, days)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn graph_data(&mut self, all: bool, days: u32) -> Result<pb::GraphsOut> {
|
fn graph_data(&mut self, all: bool, days: u32) -> Result<pb::GraphsResponse> {
|
||||||
let timing = self.timing_today()?;
|
let timing = self.timing_today()?;
|
||||||
let revlog_start = if days > 0 {
|
let revlog_start = if days > 0 {
|
||||||
timing
|
timing
|
||||||
|
@ -43,7 +43,7 @@ impl Collection {
|
||||||
|
|
||||||
self.storage.clear_searched_cards_table()?;
|
self.storage.clear_searched_cards_table()?;
|
||||||
|
|
||||||
Ok(pb::GraphsOut {
|
Ok(pb::GraphsResponse {
|
||||||
cards: cards.into_iter().map(Into::into).collect(),
|
cards: cards.into_iter().map(Into::into).collect(),
|
||||||
revlog,
|
revlog,
|
||||||
days_elapsed: timing.days_elapsed,
|
days_elapsed: timing.days_elapsed,
|
||||||
|
|
|
@ -6,19 +6,19 @@ use std::{fs, path::PathBuf};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use super::{Chunk, Graves, SanityCheckCounts, UnchunkedChanges};
|
use super::{Chunk, Graves, SanityCheckCounts, UnchunkedChanges};
|
||||||
use crate::{backend_proto::sync_server_method_in::Method, prelude::*};
|
use crate::{backend_proto::sync_server_method_request::Method, prelude::*};
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub enum SyncRequest {
|
pub enum SyncRequest {
|
||||||
HostKey(HostKeyIn),
|
HostKey(HostKeyRequest),
|
||||||
Meta(MetaIn),
|
Meta(MetaRequest),
|
||||||
Start(StartIn),
|
Start(StartRequest),
|
||||||
ApplyGraves(ApplyGravesIn),
|
ApplyGraves(ApplyGravesRequest),
|
||||||
ApplyChanges(ApplyChangesIn),
|
ApplyChanges(ApplyChangesRequest),
|
||||||
Chunk,
|
Chunk,
|
||||||
ApplyChunk(ApplyChunkIn),
|
ApplyChunk(ApplyChunkRequest),
|
||||||
#[serde(rename = "sanityCheck2")]
|
#[serde(rename = "sanityCheck2")]
|
||||||
SanityCheck(SanityCheckIn),
|
SanityCheck(SanityCheckRequest),
|
||||||
Finish,
|
Finish,
|
||||||
Abort,
|
Abort,
|
||||||
#[serde(rename = "upload")]
|
#[serde(rename = "upload")]
|
||||||
|
@ -73,19 +73,19 @@ impl SyncRequest {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct HostKeyIn {
|
pub struct HostKeyRequest {
|
||||||
#[serde(rename = "u")]
|
#[serde(rename = "u")]
|
||||||
pub username: String,
|
pub username: String,
|
||||||
#[serde(rename = "p")]
|
#[serde(rename = "p")]
|
||||||
pub password: String,
|
pub password: String,
|
||||||
}
|
}
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct HostKeyOut {
|
pub struct HostKeyResponse {
|
||||||
pub key: String,
|
pub key: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct MetaIn {
|
pub struct MetaRequest {
|
||||||
#[serde(rename = "v")]
|
#[serde(rename = "v")]
|
||||||
pub sync_version: u8,
|
pub sync_version: u8,
|
||||||
#[serde(rename = "cv")]
|
#[serde(rename = "cv")]
|
||||||
|
@ -93,7 +93,7 @@ pub struct MetaIn {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct StartIn {
|
pub struct StartRequest {
|
||||||
#[serde(rename = "minUsn")]
|
#[serde(rename = "minUsn")]
|
||||||
pub client_usn: Usn,
|
pub client_usn: Usn,
|
||||||
#[serde(rename = "lnewer")]
|
#[serde(rename = "lnewer")]
|
||||||
|
@ -104,21 +104,21 @@ pub struct StartIn {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct ApplyGravesIn {
|
pub struct ApplyGravesRequest {
|
||||||
pub chunk: Graves,
|
pub chunk: Graves,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct ApplyChangesIn {
|
pub struct ApplyChangesRequest {
|
||||||
pub changes: UnchunkedChanges,
|
pub changes: UnchunkedChanges,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct ApplyChunkIn {
|
pub struct ApplyChunkRequest {
|
||||||
pub chunk: Chunk,
|
pub chunk: Chunk,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct SanityCheckIn {
|
pub struct SanityCheckRequest {
|
||||||
pub client: SanityCheckCounts,
|
pub client: SanityCheckCounts,
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,12 +13,12 @@ use tempfile::NamedTempFile;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
http::{
|
http::{
|
||||||
ApplyChangesIn, ApplyChunkIn, ApplyGravesIn, HostKeyIn, HostKeyOut, MetaIn, SanityCheckIn,
|
ApplyChangesRequest, ApplyChunkRequest, ApplyGravesRequest, HostKeyRequest,
|
||||||
StartIn, SyncRequest,
|
HostKeyResponse, MetaRequest, SanityCheckRequest, StartRequest, SyncRequest,
|
||||||
},
|
},
|
||||||
server::SyncServer,
|
server::SyncServer,
|
||||||
Chunk, FullSyncProgress, Graves, SanityCheckCounts, SanityCheckOut, SyncMeta, UnchunkedChanges,
|
Chunk, FullSyncProgress, Graves, SanityCheckCounts, SanityCheckResponse, SyncMeta,
|
||||||
SYNC_VERSION_MAX,
|
UnchunkedChanges, SYNC_VERSION_MAX,
|
||||||
};
|
};
|
||||||
use crate::{error::SyncErrorKind, notes::guid, prelude::*, version::sync_client_version};
|
use crate::{error::SyncErrorKind, notes::guid, prelude::*, version::sync_client_version};
|
||||||
|
|
||||||
|
@ -60,7 +60,7 @@ impl Timeouts {
|
||||||
#[async_trait(?Send)]
|
#[async_trait(?Send)]
|
||||||
impl SyncServer for HttpSyncClient {
|
impl SyncServer for HttpSyncClient {
|
||||||
async fn meta(&self) -> Result<SyncMeta> {
|
async fn meta(&self) -> Result<SyncMeta> {
|
||||||
let input = SyncRequest::Meta(MetaIn {
|
let input = SyncRequest::Meta(MetaRequest {
|
||||||
sync_version: SYNC_VERSION_MAX,
|
sync_version: SYNC_VERSION_MAX,
|
||||||
client_version: sync_client_version().to_string(),
|
client_version: sync_client_version().to_string(),
|
||||||
});
|
});
|
||||||
|
@ -73,7 +73,7 @@ impl SyncServer for HttpSyncClient {
|
||||||
local_is_newer: bool,
|
local_is_newer: bool,
|
||||||
deprecated_client_graves: Option<Graves>,
|
deprecated_client_graves: Option<Graves>,
|
||||||
) -> Result<Graves> {
|
) -> Result<Graves> {
|
||||||
let input = SyncRequest::Start(StartIn {
|
let input = SyncRequest::Start(StartRequest {
|
||||||
client_usn,
|
client_usn,
|
||||||
local_is_newer,
|
local_is_newer,
|
||||||
deprecated_client_graves,
|
deprecated_client_graves,
|
||||||
|
@ -82,12 +82,12 @@ impl SyncServer for HttpSyncClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn apply_graves(&mut self, chunk: Graves) -> Result<()> {
|
async fn apply_graves(&mut self, chunk: Graves) -> Result<()> {
|
||||||
let input = SyncRequest::ApplyGraves(ApplyGravesIn { chunk });
|
let input = SyncRequest::ApplyGraves(ApplyGravesRequest { chunk });
|
||||||
self.json_request(input).await
|
self.json_request(input).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn apply_changes(&mut self, changes: UnchunkedChanges) -> Result<UnchunkedChanges> {
|
async fn apply_changes(&mut self, changes: UnchunkedChanges) -> Result<UnchunkedChanges> {
|
||||||
let input = SyncRequest::ApplyChanges(ApplyChangesIn { changes });
|
let input = SyncRequest::ApplyChanges(ApplyChangesRequest { changes });
|
||||||
self.json_request(input).await
|
self.json_request(input).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,12 +97,12 @@ impl SyncServer for HttpSyncClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn apply_chunk(&mut self, chunk: Chunk) -> Result<()> {
|
async fn apply_chunk(&mut self, chunk: Chunk) -> Result<()> {
|
||||||
let input = SyncRequest::ApplyChunk(ApplyChunkIn { chunk });
|
let input = SyncRequest::ApplyChunk(ApplyChunkRequest { chunk });
|
||||||
self.json_request(input).await
|
self.json_request(input).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn sanity_check(&mut self, client: SanityCheckCounts) -> Result<SanityCheckOut> {
|
async fn sanity_check(&mut self, client: SanityCheckCounts) -> Result<SanityCheckResponse> {
|
||||||
let input = SyncRequest::SanityCheck(SanityCheckIn { client });
|
let input = SyncRequest::SanityCheck(SanityCheckRequest { client });
|
||||||
self.json_request(input).await
|
self.json_request(input).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -249,11 +249,11 @@ impl HttpSyncClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn login<S: Into<String>>(&mut self, username: S, password: S) -> Result<()> {
|
pub(crate) async fn login<S: Into<String>>(&mut self, username: S, password: S) -> Result<()> {
|
||||||
let input = SyncRequest::HostKey(HostKeyIn {
|
let input = SyncRequest::HostKey(HostKeyRequest {
|
||||||
username: username.into(),
|
username: username.into(),
|
||||||
password: password.into(),
|
password: password.into(),
|
||||||
});
|
});
|
||||||
let output: HostKeyOut = self.json_request(input).await?;
|
let output: HostKeyResponse = self.json_request(input).await?;
|
||||||
self.hkey = Some(output.key);
|
self.hkey = Some(output.key);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -16,7 +16,7 @@ use serde_tuple::Serialize_tuple;
|
||||||
pub(crate) use server::{LocalServer, SyncServer};
|
pub(crate) use server::{LocalServer, SyncServer};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
backend_proto::{sync_status_out, SyncStatusOut},
|
backend_proto::{sync_status_response, SyncStatusResponse},
|
||||||
card::{Card, CardQueue, CardType},
|
card::{Card, CardQueue, CardType},
|
||||||
deckconfig::DeckConfSchema11,
|
deckconfig::DeckConfSchema11,
|
||||||
decks::DeckSchema11,
|
decks::DeckSchema11,
|
||||||
|
@ -164,7 +164,7 @@ pub struct CardEntry {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct SanityCheckOut {
|
pub struct SanityCheckResponse {
|
||||||
pub status: SanityCheckStatus,
|
pub status: SanityCheckStatus,
|
||||||
#[serde(rename = "c", default, deserialize_with = "default_on_invalid")]
|
#[serde(rename = "c", default, deserialize_with = "default_on_invalid")]
|
||||||
pub client: Option<SanityCheckCounts>,
|
pub client: Option<SanityCheckCounts>,
|
||||||
|
@ -545,7 +545,7 @@ where
|
||||||
self.col.log,
|
self.col.log,
|
||||||
"gathered local counts; waiting for server reply"
|
"gathered local counts; waiting for server reply"
|
||||||
);
|
);
|
||||||
let out: SanityCheckOut = self.remote.sanity_check(local_counts).await?;
|
let out: SanityCheckResponse = self.remote.sanity_check(local_counts).await?;
|
||||||
debug!(self.col.log, "got server reply");
|
debug!(self.col.log, "got server reply");
|
||||||
if out.status != SanityCheckStatus::Ok {
|
if out.status != SanityCheckStatus::Ok {
|
||||||
Err(AnkiError::sync_error(
|
Err(AnkiError::sync_error(
|
||||||
|
@ -609,20 +609,20 @@ pub(crate) async fn get_remote_sync_meta(auth: SyncAuth) -> Result<SyncMeta> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
pub fn get_local_sync_status(&mut self) -> Result<sync_status_out::Required> {
|
pub fn get_local_sync_status(&mut self) -> Result<sync_status_response::Required> {
|
||||||
let stamps = self.storage.get_collection_timestamps()?;
|
let stamps = self.storage.get_collection_timestamps()?;
|
||||||
let required = if stamps.schema_changed_since_sync() {
|
let required = if stamps.schema_changed_since_sync() {
|
||||||
sync_status_out::Required::FullSync
|
sync_status_response::Required::FullSync
|
||||||
} else if stamps.collection_changed_since_sync() {
|
} else if stamps.collection_changed_since_sync() {
|
||||||
sync_status_out::Required::NormalSync
|
sync_status_response::Required::NormalSync
|
||||||
} else {
|
} else {
|
||||||
sync_status_out::Required::NoChanges
|
sync_status_response::Required::NoChanges
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(required)
|
Ok(required)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_sync_status(&self, remote: SyncMeta) -> Result<sync_status_out::Required> {
|
pub fn get_sync_status(&self, remote: SyncMeta) -> Result<sync_status_response::Required> {
|
||||||
Ok(self.sync_meta()?.compared_to_remote(remote).required.into())
|
Ok(self.sync_meta()?.compared_to_remote(remote).required.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1173,18 +1173,18 @@ impl From<SyncState> for SyncOutput {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<sync_status_out::Required> for SyncStatusOut {
|
impl From<sync_status_response::Required> for SyncStatusResponse {
|
||||||
fn from(r: sync_status_out::Required) -> Self {
|
fn from(r: sync_status_response::Required) -> Self {
|
||||||
SyncStatusOut { required: r.into() }
|
SyncStatusResponse { required: r.into() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<SyncActionRequired> for sync_status_out::Required {
|
impl From<SyncActionRequired> for sync_status_response::Required {
|
||||||
fn from(r: SyncActionRequired) -> Self {
|
fn from(r: SyncActionRequired) -> Self {
|
||||||
match r {
|
match r {
|
||||||
SyncActionRequired::NoChanges => sync_status_out::Required::NoChanges,
|
SyncActionRequired::NoChanges => sync_status_response::Required::NoChanges,
|
||||||
SyncActionRequired::FullSyncRequired { .. } => sync_status_out::Required::FullSync,
|
SyncActionRequired::FullSyncRequired { .. } => sync_status_response::Required::FullSync,
|
||||||
SyncActionRequired::NormalSyncRequired => sync_status_out::Required::NormalSync,
|
SyncActionRequired::NormalSyncRequired => sync_status_response::Required::NormalSync,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1415,7 +1415,7 @@ mod test {
|
||||||
// and sync our changes
|
// and sync our changes
|
||||||
let remote_meta = ctx.server().meta().await.unwrap();
|
let remote_meta = ctx.server().meta().await.unwrap();
|
||||||
let out = col1.get_sync_status(remote_meta)?;
|
let out = col1.get_sync_status(remote_meta)?;
|
||||||
assert_eq!(out, sync_status_out::Required::NormalSync);
|
assert_eq!(out, sync_status_response::Required::NormalSync);
|
||||||
|
|
||||||
let out = ctx.normal_sync(&mut col1).await;
|
let out = ctx.normal_sync(&mut col1).await;
|
||||||
assert_eq!(out.required, SyncActionRequired::NoChanges);
|
assert_eq!(out.required, SyncActionRequired::NoChanges);
|
||||||
|
|
|
@ -11,7 +11,7 @@ use crate::{
|
||||||
prelude::*,
|
prelude::*,
|
||||||
storage::open_and_check_sqlite_file,
|
storage::open_and_check_sqlite_file,
|
||||||
sync::{
|
sync::{
|
||||||
Chunk, Graves, SanityCheckCounts, SanityCheckOut, SanityCheckStatus, SyncMeta,
|
Chunk, Graves, SanityCheckCounts, SanityCheckResponse, SanityCheckStatus, SyncMeta,
|
||||||
UnchunkedChanges, Usn,
|
UnchunkedChanges, Usn,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -29,7 +29,7 @@ pub trait SyncServer {
|
||||||
-> Result<UnchunkedChanges>;
|
-> Result<UnchunkedChanges>;
|
||||||
async fn chunk(&mut self) -> Result<Chunk>;
|
async fn chunk(&mut self) -> Result<Chunk>;
|
||||||
async fn apply_chunk(&mut self, client_chunk: Chunk) -> Result<()>;
|
async fn apply_chunk(&mut self, client_chunk: Chunk) -> Result<()>;
|
||||||
async fn sanity_check(&mut self, client: SanityCheckCounts) -> Result<SanityCheckOut>;
|
async fn sanity_check(&mut self, client: SanityCheckCounts) -> Result<SanityCheckResponse>;
|
||||||
async fn finish(&mut self) -> Result<TimestampMillis>;
|
async fn finish(&mut self) -> Result<TimestampMillis>;
|
||||||
async fn abort(&mut self) -> Result<()>;
|
async fn abort(&mut self) -> Result<()>;
|
||||||
|
|
||||||
|
@ -148,10 +148,10 @@ impl SyncServer for LocalServer {
|
||||||
self.col.apply_chunk(client_chunk, self.client_usn)
|
self.col.apply_chunk(client_chunk, self.client_usn)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn sanity_check(&mut self, mut client: SanityCheckCounts) -> Result<SanityCheckOut> {
|
async fn sanity_check(&mut self, mut client: SanityCheckCounts) -> Result<SanityCheckResponse> {
|
||||||
client.counts = Default::default();
|
client.counts = Default::default();
|
||||||
let server = self.col.storage.sanity_check_info()?;
|
let server = self.col.storage.sanity_check_info()?;
|
||||||
Ok(SanityCheckOut {
|
Ok(SanityCheckResponse {
|
||||||
status: if client == server {
|
status: if client == server {
|
||||||
SanityCheckStatus::Ok
|
SanityCheckStatus::Ok
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -29,9 +29,10 @@ export async function getChangeNotetypeInfo(
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function changeNotetype(
|
export async function changeNotetype(
|
||||||
input: pb.BackendProto.ChangeNotetypeIn
|
input: pb.BackendProto.ChangeNotetypeRequest
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const data: Uint8Array = pb.BackendProto.ChangeNotetypeIn.encode(input).finish();
|
const data: Uint8Array =
|
||||||
|
pb.BackendProto.ChangeNotetypeRequest.encode(input).finish();
|
||||||
await postRequest("/_anki/changeNotetype", data);
|
await postRequest("/_anki/changeNotetype", data);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -113,13 +114,13 @@ export class ChangeNotetypeInfoWrapper {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
input(): pb.BackendProto.ChangeNotetypeIn {
|
input(): pb.BackendProto.ChangeNotetypeRequest {
|
||||||
return this.info.input as pb.BackendProto.ChangeNotetypeIn;
|
return this.info.input as pb.BackendProto.ChangeNotetypeRequest;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Pack changes back into input message for saving.
|
/// Pack changes back into input message for saving.
|
||||||
intoInput(): pb.BackendProto.ChangeNotetypeIn {
|
intoInput(): pb.BackendProto.ChangeNotetypeRequest {
|
||||||
const input = this.info.input as pb.BackendProto.ChangeNotetypeIn;
|
const input = this.info.input as pb.BackendProto.ChangeNotetypeRequest;
|
||||||
input.newFields = nullToNegativeOne(this.fields);
|
input.newFields = nullToNegativeOne(this.fields);
|
||||||
if (this.templates) {
|
if (this.templates) {
|
||||||
input.newTemplates = nullToNegativeOne(this.templates);
|
input.newTemplates = nullToNegativeOne(this.templates);
|
||||||
|
@ -202,7 +203,7 @@ export class ChangeNotetypeState {
|
||||||
await changeNotetype(this.dataForSaving());
|
await changeNotetype(this.dataForSaving());
|
||||||
}
|
}
|
||||||
|
|
||||||
dataForSaving(): pb.BackendProto.ChangeNotetypeIn {
|
dataForSaving(): pb.BackendProto.ChangeNotetypeRequest {
|
||||||
return this.info_.intoInput();
|
return this.info_.intoInput();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
import { buildNextLearnMsg } from "./lib";
|
import { buildNextLearnMsg } from "./lib";
|
||||||
import { bridgeLink } from "lib/bridgecommand";
|
import { bridgeLink } from "lib/bridgecommand";
|
||||||
|
|
||||||
export let info: pb.BackendProto.CongratsInfoOut;
|
export let info: pb.BackendProto.CongratsInfoResponse;
|
||||||
import * as tr from "lib/i18n";
|
import * as tr from "lib/i18n";
|
||||||
|
|
||||||
const congrats = tr.schedulingCongratulationsFinished();
|
const congrats = tr.schedulingCongratulationsFinished();
|
||||||
|
|
|
@ -7,13 +7,13 @@ import { naturalUnit, unitAmount, unitName } from "lib/time";
|
||||||
|
|
||||||
import * as tr from "lib/i18n";
|
import * as tr from "lib/i18n";
|
||||||
|
|
||||||
export async function getCongratsInfo(): Promise<pb.BackendProto.CongratsInfoOut> {
|
export async function getCongratsInfo(): Promise<pb.BackendProto.CongratsInfoResponse> {
|
||||||
return pb.BackendProto.CongratsInfoOut.decode(
|
return pb.BackendProto.CongratsInfoResponse.decode(
|
||||||
await postRequest("/_anki/congratsInfo", "")
|
await postRequest("/_anki/congratsInfo", "")
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function buildNextLearnMsg(info: pb.BackendProto.CongratsInfoOut): string {
|
export function buildNextLearnMsg(info: pb.BackendProto.CongratsInfoResponse): string {
|
||||||
const secsUntil = info.secsUntilNextLearn;
|
const secsUntil = info.secsUntilNextLearn;
|
||||||
// next learning card not due (/ until tomorrow)?
|
// next learning card not due (/ until tomorrow)?
|
||||||
if (secsUntil == 0 || secsUntil > 86_400) {
|
if (secsUntil == 0 || secsUntil > 86_400) {
|
||||||
|
|
|
@ -21,9 +21,10 @@ export async function getDeckOptionsInfo(
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function saveDeckOptions(
|
export async function saveDeckOptions(
|
||||||
input: pb.BackendProto.UpdateDeckConfigsIn
|
input: pb.BackendProto.UpdateDeckConfigsRequest
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const data: Uint8Array = pb.BackendProto.UpdateDeckConfigsIn.encode(input).finish();
|
const data: Uint8Array =
|
||||||
|
pb.BackendProto.UpdateDeckConfigsRequest.encode(input).finish();
|
||||||
await postRequest("/_anki/updateDeckConfigs", data);
|
await postRequest("/_anki/updateDeckConfigs", data);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -190,7 +191,7 @@ export class DeckOptionsState {
|
||||||
this.updateConfigList();
|
this.updateConfigList();
|
||||||
}
|
}
|
||||||
|
|
||||||
dataForSaving(applyToChildren: boolean): pb.BackendProto.UpdateDeckConfigsIn {
|
dataForSaving(applyToChildren: boolean): pb.BackendProto.UpdateDeckConfigsRequest {
|
||||||
const modifiedConfigsExcludingCurrent = this.configs
|
const modifiedConfigsExcludingCurrent = this.configs
|
||||||
.map((c) => c.config)
|
.map((c) => c.config)
|
||||||
.filter((c, idx) => {
|
.filter((c, idx) => {
|
||||||
|
@ -204,7 +205,7 @@ export class DeckOptionsState {
|
||||||
// current must come last, even if unmodified
|
// current must come last, even if unmodified
|
||||||
this.configs[this.selectedIdx].config,
|
this.configs[this.selectedIdx].config,
|
||||||
];
|
];
|
||||||
return pb.BackendProto.UpdateDeckConfigsIn.create({
|
return pb.BackendProto.UpdateDeckConfigsRequest.create({
|
||||||
targetDeckId: this.targetDeckId,
|
targetDeckId: this.targetDeckId,
|
||||||
removedConfigIds: this.removedConfigs,
|
removedConfigIds: this.removedConfigs,
|
||||||
configs,
|
configs,
|
||||||
|
|
|
@ -19,7 +19,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
import { gatherData, buildHistogram } from "./added";
|
import { gatherData, buildHistogram } from "./added";
|
||||||
import type { GraphData } from "./added";
|
import type { GraphData } from "./added";
|
||||||
|
|
||||||
export let sourceData: pb.BackendProto.GraphsOut | null = null;
|
export let sourceData: pb.BackendProto.GraphsResponse | null = null;
|
||||||
import * as tr from "lib/i18n";
|
import * as tr from "lib/i18n";
|
||||||
export let preferences: PreferenceStore<pb.BackendProto.GraphPreferences>;
|
export let preferences: PreferenceStore<pb.BackendProto.GraphPreferences>;
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
import { renderButtons } from "./buttons";
|
import { renderButtons } from "./buttons";
|
||||||
import { defaultGraphBounds, GraphRange, RevlogRange } from "./graph-helpers";
|
import { defaultGraphBounds, GraphRange, RevlogRange } from "./graph-helpers";
|
||||||
|
|
||||||
export let sourceData: pb.BackendProto.GraphsOut | null = null;
|
export let sourceData: pb.BackendProto.GraphsResponse | null = null;
|
||||||
import * as tr from "lib/i18n";
|
import * as tr from "lib/i18n";
|
||||||
export let revlogRange: RevlogRange;
|
export let revlogRange: RevlogRange;
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
import { gatherData, renderCalendar } from "./calendar";
|
import { gatherData, renderCalendar } from "./calendar";
|
||||||
import type { GraphData } from "./calendar";
|
import type { GraphData } from "./calendar";
|
||||||
|
|
||||||
export let sourceData: pb.BackendProto.GraphsOut;
|
export let sourceData: pb.BackendProto.GraphsResponse;
|
||||||
export let preferences: PreferenceStore<pb.BackendProto.GraphPreferences>;
|
export let preferences: PreferenceStore<pb.BackendProto.GraphPreferences>;
|
||||||
export let revlogRange: RevlogRange;
|
export let revlogRange: RevlogRange;
|
||||||
import * as tr from "lib/i18n";
|
import * as tr from "lib/i18n";
|
||||||
|
|
|
@ -15,7 +15,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
import { gatherData, renderCards } from "./card-counts";
|
import { gatherData, renderCards } from "./card-counts";
|
||||||
import type { GraphData, TableDatum } from "./card-counts";
|
import type { GraphData, TableDatum } from "./card-counts";
|
||||||
|
|
||||||
export let sourceData: pb.BackendProto.GraphsOut;
|
export let sourceData: pb.BackendProto.GraphsResponse;
|
||||||
import * as tr2 from "lib/i18n";
|
import * as tr2 from "lib/i18n";
|
||||||
export let preferences: PreferenceStore<pb.BackendProto.GraphPreferences>;
|
export let preferences: PreferenceStore<pb.BackendProto.GraphPreferences>;
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
import { gatherData, prepareData } from "./ease";
|
import { gatherData, prepareData } from "./ease";
|
||||||
import type { TableDatum, SearchEventMap } from "./graph-helpers";
|
import type { TableDatum, SearchEventMap } from "./graph-helpers";
|
||||||
|
|
||||||
export let sourceData: pb.BackendProto.GraphsOut | null = null;
|
export let sourceData: pb.BackendProto.GraphsResponse | null = null;
|
||||||
export let preferences: PreferenceStore<pb.BackendProto.GraphPreferences>;
|
export let preferences: PreferenceStore<pb.BackendProto.GraphPreferences>;
|
||||||
|
|
||||||
const dispatch = createEventDispatcher<SearchEventMap>();
|
const dispatch = createEventDispatcher<SearchEventMap>();
|
||||||
|
|
|
@ -20,7 +20,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
import { gatherData, buildHistogram } from "./future-due";
|
import { gatherData, buildHistogram } from "./future-due";
|
||||||
import type { GraphData } from "./future-due";
|
import type { GraphData } from "./future-due";
|
||||||
|
|
||||||
export let sourceData: pb.BackendProto.GraphsOut | null = null;
|
export let sourceData: pb.BackendProto.GraphsResponse | null = null;
|
||||||
import * as tr from "lib/i18n";
|
import * as tr from "lib/i18n";
|
||||||
export let preferences: PreferenceStore<pb.BackendProto.GraphPreferences>;
|
export let preferences: PreferenceStore<pb.BackendProto.GraphPreferences>;
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
import { defaultGraphBounds, RevlogRange, GraphRange } from "./graph-helpers";
|
import { defaultGraphBounds, RevlogRange, GraphRange } from "./graph-helpers";
|
||||||
import { renderHours } from "./hours";
|
import { renderHours } from "./hours";
|
||||||
|
|
||||||
export let sourceData: pb.BackendProto.GraphsOut | null = null;
|
export let sourceData: pb.BackendProto.GraphsResponse | null = null;
|
||||||
import * as tr from "lib/i18n";
|
import * as tr from "lib/i18n";
|
||||||
export let revlogRange: RevlogRange;
|
export let revlogRange: RevlogRange;
|
||||||
let graphRange: GraphRange = GraphRange.Year;
|
let graphRange: GraphRange = GraphRange.Year;
|
||||||
|
|
|
@ -23,7 +23,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
import type { IntervalGraphData } from "./intervals";
|
import type { IntervalGraphData } from "./intervals";
|
||||||
import type { TableDatum, SearchEventMap } from "./graph-helpers";
|
import type { TableDatum, SearchEventMap } from "./graph-helpers";
|
||||||
|
|
||||||
export let sourceData: pb.BackendProto.GraphsOut | null = null;
|
export let sourceData: pb.BackendProto.GraphsResponse | null = null;
|
||||||
import * as tr from "lib/i18n";
|
import * as tr from "lib/i18n";
|
||||||
export let preferences: PreferenceStore<pb.BackendProto.GraphPreferences>;
|
export let preferences: PreferenceStore<pb.BackendProto.GraphPreferences>;
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
import { gatherData, renderReviews } from "./reviews";
|
import { gatherData, renderReviews } from "./reviews";
|
||||||
import type { GraphData } from "./reviews";
|
import type { GraphData } from "./reviews";
|
||||||
|
|
||||||
export let sourceData: pb.BackendProto.GraphsOut | null = null;
|
export let sourceData: pb.BackendProto.GraphsResponse | null = null;
|
||||||
export let revlogRange: RevlogRange;
|
export let revlogRange: RevlogRange;
|
||||||
import * as tr from "lib/i18n";
|
import * as tr from "lib/i18n";
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
import type { TodayData } from "./today";
|
import type { TodayData } from "./today";
|
||||||
import { gatherData } from "./today";
|
import { gatherData } from "./today";
|
||||||
|
|
||||||
export let sourceData: pb.BackendProto.GraphsOut | null = null;
|
export let sourceData: pb.BackendProto.GraphsResponse | null = null;
|
||||||
|
|
||||||
let todayData: TodayData | null = null;
|
let todayData: TodayData | null = null;
|
||||||
$: if (sourceData) {
|
$: if (sourceData) {
|
||||||
|
|
|
@ -21,8 +21,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
async function getGraphData(
|
async function getGraphData(
|
||||||
search: string,
|
search: string,
|
||||||
days: number
|
days: number
|
||||||
): Promise<pb.BackendProto.GraphsOut> {
|
): Promise<pb.BackendProto.GraphsResponse> {
|
||||||
return pb.BackendProto.GraphsOut.decode(
|
return pb.BackendProto.GraphsResponse.decode(
|
||||||
await postRequest("/_anki/graphData", JSON.stringify({ search, days }))
|
await postRequest("/_anki/graphData", JSON.stringify({ search, days }))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ export interface GraphData {
|
||||||
daysAdded: number[];
|
daysAdded: number[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function gatherData(data: pb.BackendProto.GraphsOut): GraphData {
|
export function gatherData(data: pb.BackendProto.GraphsResponse): GraphData {
|
||||||
const daysAdded = (data.cards as pb.BackendProto.Card[]).map((card) => {
|
const daysAdded = (data.cards as pb.BackendProto.Card[]).map((card) => {
|
||||||
const elapsedSecs = (card.id as number) / 1000 - data.nextDayAtSecs;
|
const elapsedSecs = (card.id as number) / 1000 - data.nextDayAtSecs;
|
||||||
return Math.ceil(elapsedSecs / 86400);
|
return Math.ceil(elapsedSecs / 86400);
|
||||||
|
|
|
@ -39,7 +39,7 @@ export interface GraphData {
|
||||||
const ReviewKind = pb.BackendProto.RevlogEntry.ReviewKind;
|
const ReviewKind = pb.BackendProto.RevlogEntry.ReviewKind;
|
||||||
|
|
||||||
export function gatherData(
|
export function gatherData(
|
||||||
data: pb.BackendProto.GraphsOut,
|
data: pb.BackendProto.GraphsResponse,
|
||||||
range: GraphRange
|
range: GraphRange
|
||||||
): GraphData {
|
): GraphData {
|
||||||
const cutoff = millisecondCutoffForRange(range, data.nextDayAtSecs);
|
const cutoff = millisecondCutoffForRange(range, data.nextDayAtSecs);
|
||||||
|
@ -99,7 +99,7 @@ interface TotalCorrect {
|
||||||
export function renderButtons(
|
export function renderButtons(
|
||||||
svgElem: SVGElement,
|
svgElem: SVGElement,
|
||||||
bounds: GraphBounds,
|
bounds: GraphBounds,
|
||||||
origData: pb.BackendProto.GraphsOut,
|
origData: pb.BackendProto.GraphsResponse,
|
||||||
range: GraphRange
|
range: GraphRange
|
||||||
): void {
|
): void {
|
||||||
const sourceData = gatherData(origData, range);
|
const sourceData = gatherData(origData, range);
|
||||||
|
|
|
@ -53,7 +53,7 @@ type WeekdayType = pb.BackendProto.GraphPreferences.Weekday;
|
||||||
const Weekday = pb.BackendProto.GraphPreferences.Weekday; /* enum */
|
const Weekday = pb.BackendProto.GraphPreferences.Weekday; /* enum */
|
||||||
|
|
||||||
export function gatherData(
|
export function gatherData(
|
||||||
data: pb.BackendProto.GraphsOut,
|
data: pb.BackendProto.GraphsResponse,
|
||||||
firstDayOfWeek: WeekdayType
|
firstDayOfWeek: WeekdayType
|
||||||
): GraphData {
|
): GraphData {
|
||||||
const reviewCount = new Map<number, number>();
|
const reviewCount = new Map<number, number>();
|
||||||
|
|
|
@ -127,7 +127,7 @@ function countCards(
|
||||||
}
|
}
|
||||||
|
|
||||||
export function gatherData(
|
export function gatherData(
|
||||||
data: pb.BackendProto.GraphsOut,
|
data: pb.BackendProto.GraphsResponse,
|
||||||
separateInactive: boolean
|
separateInactive: boolean
|
||||||
): GraphData {
|
): GraphData {
|
||||||
const totalCards = data.cards.length;
|
const totalCards = data.cards.length;
|
||||||
|
|
|
@ -26,7 +26,7 @@ export interface GraphData {
|
||||||
eases: number[];
|
eases: number[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function gatherData(data: pb.BackendProto.GraphsOut): GraphData {
|
export function gatherData(data: pb.BackendProto.GraphsResponse): GraphData {
|
||||||
const eases = (data.cards as pb.BackendProto.Card[])
|
const eases = (data.cards as pb.BackendProto.Card[])
|
||||||
.filter((c) => [CardType.Review, CardType.Relearn].includes(c.ctype))
|
.filter((c) => [CardType.Review, CardType.Relearn].includes(c.ctype))
|
||||||
.map((c) => c.easeFactor / 10);
|
.map((c) => c.easeFactor / 10);
|
||||||
|
|
|
@ -30,7 +30,7 @@ export interface GraphData {
|
||||||
haveBacklog: boolean;
|
haveBacklog: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function gatherData(data: pb.BackendProto.GraphsOut): GraphData {
|
export function gatherData(data: pb.BackendProto.GraphsResponse): GraphData {
|
||||||
const isLearning = (card: pb.BackendProto.Card): boolean =>
|
const isLearning = (card: pb.BackendProto.Card): boolean =>
|
||||||
[CardQueue.Learn, CardQueue.PreviewRepeat].includes(card.queue);
|
[CardQueue.Learn, CardQueue.PreviewRepeat].includes(card.queue);
|
||||||
|
|
||||||
|
@ -75,7 +75,7 @@ function binValue(d: Bin<Map<number, number>, number>): number {
|
||||||
return sum(d, (d) => d[1]);
|
return sum(d, (d) => d[1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface FutureDueOut {
|
export interface FutureDueResponse {
|
||||||
histogramData: HistogramData | null;
|
histogramData: HistogramData | null;
|
||||||
tableData: TableDatum[];
|
tableData: TableDatum[];
|
||||||
}
|
}
|
||||||
|
@ -96,7 +96,7 @@ export function buildHistogram(
|
||||||
backlog: boolean,
|
backlog: boolean,
|
||||||
dispatch: SearchDispatch,
|
dispatch: SearchDispatch,
|
||||||
browserLinksSupported: boolean
|
browserLinksSupported: boolean
|
||||||
): FutureDueOut {
|
): FutureDueResponse {
|
||||||
const output = { histogramData: null, tableData: [] };
|
const output = { histogramData: null, tableData: [] };
|
||||||
// get min/max
|
// get min/max
|
||||||
const data = sourceData.dueCounts;
|
const data = sourceData.dueCounts;
|
||||||
|
|
|
@ -39,7 +39,7 @@ interface Hour {
|
||||||
|
|
||||||
const ReviewKind = pb.BackendProto.RevlogEntry.ReviewKind;
|
const ReviewKind = pb.BackendProto.RevlogEntry.ReviewKind;
|
||||||
|
|
||||||
function gatherData(data: pb.BackendProto.GraphsOut, range: GraphRange): Hour[] {
|
function gatherData(data: pb.BackendProto.GraphsResponse, range: GraphRange): Hour[] {
|
||||||
const hours = [...Array(24)].map((_n, idx: number) => {
|
const hours = [...Array(24)].map((_n, idx: number) => {
|
||||||
return { hour: idx, totalCount: 0, correctCount: 0 } as Hour;
|
return { hour: idx, totalCount: 0, correctCount: 0 } as Hour;
|
||||||
});
|
});
|
||||||
|
@ -74,7 +74,7 @@ function gatherData(data: pb.BackendProto.GraphsOut, range: GraphRange): Hour[]
|
||||||
export function renderHours(
|
export function renderHours(
|
||||||
svgElem: SVGElement,
|
svgElem: SVGElement,
|
||||||
bounds: GraphBounds,
|
bounds: GraphBounds,
|
||||||
origData: pb.BackendProto.GraphsOut,
|
origData: pb.BackendProto.GraphsResponse,
|
||||||
range: GraphRange
|
range: GraphRange
|
||||||
): void {
|
): void {
|
||||||
const data = gatherData(origData, range);
|
const data = gatherData(origData, range);
|
||||||
|
|
|
@ -36,7 +36,9 @@ export enum IntervalRange {
|
||||||
All = 3,
|
All = 3,
|
||||||
}
|
}
|
||||||
|
|
||||||
export function gatherIntervalData(data: pb.BackendProto.GraphsOut): IntervalGraphData {
|
export function gatherIntervalData(
|
||||||
|
data: pb.BackendProto.GraphsResponse
|
||||||
|
): IntervalGraphData {
|
||||||
const intervals = (data.cards as pb.BackendProto.Card[])
|
const intervals = (data.cards as pb.BackendProto.Card[])
|
||||||
.filter((c) => [CardType.Review, CardType.Relearn].includes(c.ctype))
|
.filter((c) => [CardType.Review, CardType.Relearn].includes(c.ctype))
|
||||||
.map((c) => c.interval);
|
.map((c) => c.interval);
|
||||||
|
|
|
@ -53,7 +53,7 @@ export interface GraphData {
|
||||||
const ReviewKind = pb.BackendProto.RevlogEntry.ReviewKind;
|
const ReviewKind = pb.BackendProto.RevlogEntry.ReviewKind;
|
||||||
type BinType = Bin<Map<number, Reviews[]>, number>;
|
type BinType = Bin<Map<number, Reviews[]>, number>;
|
||||||
|
|
||||||
export function gatherData(data: pb.BackendProto.GraphsOut): GraphData {
|
export function gatherData(data: pb.BackendProto.GraphsResponse): GraphData {
|
||||||
const reviewCount = new Map<number, Reviews>();
|
const reviewCount = new Map<number, Reviews>();
|
||||||
const reviewTime = new Map<number, Reviews>();
|
const reviewTime = new Map<number, Reviews>();
|
||||||
const empty = { mature: 0, young: 0, learn: 0, relearn: 0, early: 0 };
|
const empty = { mature: 0, young: 0, learn: 0, relearn: 0, early: 0 };
|
||||||
|
|
|
@ -13,7 +13,7 @@ export interface TodayData {
|
||||||
|
|
||||||
const ReviewKind = pb.BackendProto.RevlogEntry.ReviewKind;
|
const ReviewKind = pb.BackendProto.RevlogEntry.ReviewKind;
|
||||||
|
|
||||||
export function gatherData(data: pb.BackendProto.GraphsOut): TodayData {
|
export function gatherData(data: pb.BackendProto.GraphsResponse): TodayData {
|
||||||
let answerCount = 0;
|
let answerCount = 0;
|
||||||
let answerMillis = 0;
|
let answerMillis = 0;
|
||||||
let correctCount = 0;
|
let correctCount = 0;
|
||||||
|
|
Loading…
Reference in a new issue