mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 14:02:21 -04:00
Backend Custom Study (#1600)
* Implement custom study on backend * Switch frontend to backend custom study * Skip typecheck for new pb classes * Build tag search string on backend Also fixes escaping of special characters in tag names. * `cram.cards` -> `cram.card_limit` * Assign more meaningful names in `TagLimit` * Broaden rustfmt glob * Use `invalid_input()` helper * Assign `FilteredDeckForUpdate` to temp var * Implement `SearchBuilder` * Rewrite `custom_study()` with `SearchBuilder` * Replace match macros with `SearchBuilder` * Remove `into_nodes_list` & `concatenate_searches`
This commit is contained in:
parent
ce966b13ca
commit
9aca778a93
34 changed files with 616 additions and 344 deletions
|
@ -55,6 +55,7 @@ message BackendError {
|
|||
EXISTS = 12;
|
||||
FILTERED_DECK_ERROR = 13;
|
||||
SEARCH_ERROR = 14;
|
||||
CUSTOM_STUDY_ERROR = 15;
|
||||
}
|
||||
|
||||
// localized error description suitable for displaying to the user
|
||||
|
|
|
@ -37,6 +37,7 @@ service SchedulerService {
|
|||
rpc DescribeNextStates(NextCardStates) returns (generic.StringList);
|
||||
rpc StateIsLeech(SchedulingState) returns (generic.Bool);
|
||||
rpc UpgradeScheduler(generic.Empty) returns (generic.Empty);
|
||||
rpc CustomStudy(CustomStudyRequest) returns (collection.OpChanges);
|
||||
}
|
||||
|
||||
message SchedulingState {
|
||||
|
@ -217,3 +218,38 @@ message CardAnswer {
|
|||
int64 answered_at_millis = 5;
|
||||
uint32 milliseconds_taken = 6;
|
||||
}
|
||||
|
||||
message CustomStudyRequest {
|
||||
message Cram {
|
||||
enum CramKind {
|
||||
// due cards in due order
|
||||
CRAM_KIND_DUE = 0;
|
||||
// new cards in added order
|
||||
CRAM_KIND_NEW = 1;
|
||||
// review cards in random order
|
||||
CRAM_KIND_REVIEW = 2;
|
||||
// all cards in random order; no rescheduling
|
||||
CRAM_KIND_ALL = 3;
|
||||
}
|
||||
CramKind kind = 1;
|
||||
// the maximimum number of cards
|
||||
uint32 card_limit = 2;
|
||||
// cards must match one of these, if unempty
|
||||
repeated string tags_to_include = 3;
|
||||
// cards must not match any of these
|
||||
repeated string tags_to_exclude = 4;
|
||||
}
|
||||
oneof value {
|
||||
// increase new limit by x
|
||||
int32 new_limit_delta = 1;
|
||||
// increase review limit by x
|
||||
int32 review_limit_delta = 2;
|
||||
// repeat cards forgotten in the last x days
|
||||
uint32 forgot_days = 3;
|
||||
// review cards due in the next x days
|
||||
uint32 review_ahead_days = 4;
|
||||
// preview new cards added in the last x days
|
||||
uint32 preview_days = 5;
|
||||
Cram cram = 6;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,9 @@ ignored-classes=
|
|||
SetDeckCollapsedRequest,
|
||||
ConfigKey,
|
||||
HelpPageLinkRequest,
|
||||
StripHtmlRequest
|
||||
StripHtmlRequest,
|
||||
CustomStudyRequest,
|
||||
Cram,
|
||||
|
||||
[REPORTS]
|
||||
output-format=colorized
|
||||
|
|
|
@ -20,6 +20,7 @@ from anki.utils import from_json_bytes, to_json_bytes
|
|||
|
||||
from ..errors import (
|
||||
BackendIOError,
|
||||
CustomStudyError,
|
||||
DBError,
|
||||
ExistsError,
|
||||
FilteredDeckError,
|
||||
|
@ -218,6 +219,9 @@ def backend_exception_to_pylib(err: backend_pb2.BackendError) -> Exception:
|
|||
elif val == kind.UNDO_EMPTY:
|
||||
return UndoEmpty()
|
||||
|
||||
elif val == kind.CUSTOM_STUDY_ERROR:
|
||||
return CustomStudyError(err.localized)
|
||||
|
||||
else:
|
||||
# sadly we can't do exhaustiveness checking on protobuf enums
|
||||
# assert_exhaustive(val)
|
||||
|
|
|
@ -40,6 +40,10 @@ class BackendIOError(LocalizedError):
|
|||
pass
|
||||
|
||||
|
||||
class CustomStudyError(LocalizedError):
|
||||
pass
|
||||
|
||||
|
||||
class DBError(LocalizedError):
|
||||
pass
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ UnburyDeck = _base.UnburyDeck
|
|||
CongratsInfo = _base.CongratsInfo
|
||||
BuryOrSuspend = _base.BuryOrSuspend
|
||||
FilteredDeckForUpdate = _base.FilteredDeckForUpdate
|
||||
CustomStudyRequest = _base.CustomStudyRequest
|
||||
|
||||
# add aliases to the legacy pathnames
|
||||
import anki.scheduler.v1
|
||||
|
|
|
@ -13,6 +13,7 @@ SchedTimingToday = scheduler_pb2.SchedTimingTodayResponse
|
|||
CongratsInfo = scheduler_pb2.CongratsInfoResponse
|
||||
UnburyDeck = scheduler_pb2.UnburyDeckRequest
|
||||
BuryOrSuspend = scheduler_pb2.BuryOrSuspendCardsRequest
|
||||
CustomStudyRequest = scheduler_pb2.CustomStudyRequest
|
||||
FilteredDeckForUpdate = decks_pb2.FilteredDeckForUpdate
|
||||
|
||||
|
||||
|
@ -68,6 +69,9 @@ class SchedulerBase(DeprecatedNamesMixin):
|
|||
info = self.congratulations_info()
|
||||
return info.have_sched_buried or info.have_user_buried
|
||||
|
||||
def custom_study(self, request: CustomStudyRequest) -> OpChanges:
|
||||
return self.col._backend.custom_study(request)
|
||||
|
||||
def extend_limits(self, new: int, rev: int) -> None:
|
||||
did = self.col.decks.current()["id"]
|
||||
self.col._backend.extend_limits(deck_id=did, new_delta=new, review_delta=rev)
|
||||
|
|
|
@ -15,6 +15,8 @@ ignored-classes=
|
|||
CardAnswer,
|
||||
QueuedCards,
|
||||
ChangeNotetypeRequest,
|
||||
CustomStudyRequest,
|
||||
Cram,
|
||||
|
||||
[REPORTS]
|
||||
output-format=colorized
|
||||
|
|
|
@ -2,10 +2,12 @@
|
|||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import aqt
|
||||
from anki.collection import SearchNode
|
||||
from anki.consts import *
|
||||
from anki.scheduler import CustomStudyRequest
|
||||
from aqt.operations.scheduling import custom_study
|
||||
from aqt.qt import *
|
||||
from aqt.utils import disable_help_button, showInfo, showWarning, tr
|
||||
from aqt.taglimit import TagLimit
|
||||
from aqt.utils import disable_help_button, tr
|
||||
|
||||
RADIO_NEW = 1
|
||||
RADIO_REV = 2
|
||||
|
@ -120,98 +122,39 @@ class CustomStudy(QDialog):
|
|||
self.radioIdx = idx
|
||||
|
||||
def accept(self) -> None:
|
||||
f = self.form
|
||||
i = self.radioIdx
|
||||
spin = f.spin.value()
|
||||
if i == RADIO_NEW:
|
||||
self.deck["extendNew"] = spin
|
||||
self.mw.col.decks.save(self.deck)
|
||||
self.mw.col.sched.extend_limits(spin, 0)
|
||||
self.mw.reset()
|
||||
QDialog.accept(self)
|
||||
return
|
||||
elif i == RADIO_REV:
|
||||
self.deck["extendRev"] = spin
|
||||
self.mw.col.decks.save(self.deck)
|
||||
self.mw.col.sched.extend_limits(0, spin)
|
||||
self.mw.reset()
|
||||
QDialog.accept(self)
|
||||
return
|
||||
elif i == RADIO_CRAM:
|
||||
tags = self._getTags()
|
||||
# the rest create a filtered deck
|
||||
cur = self.mw.col.decks.by_name(tr.custom_study_custom_study_session())
|
||||
if cur:
|
||||
if not cur["dyn"]:
|
||||
showInfo(tr.custom_study_must_rename_deck())
|
||||
QDialog.accept(self)
|
||||
return
|
||||
else:
|
||||
# safe to empty
|
||||
self.mw.col.sched.empty_filtered_deck(cur["id"])
|
||||
# reuse; don't delete as it may have children
|
||||
dyn = cur
|
||||
self.mw.col.decks.select(cur["id"])
|
||||
request = CustomStudyRequest()
|
||||
if self.radioIdx == RADIO_NEW:
|
||||
request.new_limit_delta = self.form.spin.value()
|
||||
elif self.radioIdx == RADIO_REV:
|
||||
request.review_limit_delta = self.form.spin.value()
|
||||
elif self.radioIdx == RADIO_FORGOT:
|
||||
request.forgot_days = self.form.spin.value()
|
||||
elif self.radioIdx == RADIO_AHEAD:
|
||||
request.review_ahead_days = self.form.spin.value()
|
||||
elif self.radioIdx == RADIO_PREVIEW:
|
||||
request.preview_days = self.form.spin.value()
|
||||
else:
|
||||
did = self.mw.col.decks.new_filtered(tr.custom_study_custom_study_session())
|
||||
dyn = self.mw.col.decks.get(did)
|
||||
# and then set various options
|
||||
if i == RADIO_FORGOT:
|
||||
search = self.mw.col.build_search_string(
|
||||
SearchNode(
|
||||
rated=SearchNode.Rated(days=spin, rating=SearchNode.RATING_AGAIN)
|
||||
)
|
||||
)
|
||||
dyn["terms"][0] = [search, DYN_MAX_SIZE, DYN_RANDOM]
|
||||
dyn["resched"] = False
|
||||
elif i == RADIO_AHEAD:
|
||||
search = self.mw.col.build_search_string(SearchNode(due_in_days=spin))
|
||||
dyn["terms"][0] = [search, DYN_MAX_SIZE, DYN_DUE]
|
||||
dyn["resched"] = True
|
||||
elif i == RADIO_PREVIEW:
|
||||
search = self.mw.col.build_search_string(
|
||||
SearchNode(card_state=SearchNode.CARD_STATE_NEW),
|
||||
SearchNode(added_in_days=spin),
|
||||
)
|
||||
dyn["terms"][0] = [search, DYN_MAX_SIZE, DYN_OLDEST]
|
||||
dyn["resched"] = False
|
||||
elif i == RADIO_CRAM:
|
||||
type = f.cardType.currentRow()
|
||||
if type == TYPE_NEW:
|
||||
terms = self.mw.col.build_search_string(
|
||||
SearchNode(card_state=SearchNode.CARD_STATE_NEW)
|
||||
)
|
||||
ord = DYN_ADDED
|
||||
dyn["resched"] = True
|
||||
elif type == TYPE_DUE:
|
||||
terms = self.mw.col.build_search_string(
|
||||
SearchNode(card_state=SearchNode.CARD_STATE_DUE)
|
||||
)
|
||||
ord = DYN_DUE
|
||||
dyn["resched"] = True
|
||||
elif type == TYPE_REVIEW:
|
||||
terms = self.mw.col.build_search_string(
|
||||
SearchNode(negated=SearchNode(card_state=SearchNode.CARD_STATE_NEW))
|
||||
)
|
||||
ord = DYN_RANDOM
|
||||
dyn["resched"] = True
|
||||
request.cram.card_limit = self.form.spin.value()
|
||||
|
||||
tags = TagLimit.get_tags(self.mw, self)
|
||||
request.cram.tags_to_include.extend(tags[0])
|
||||
request.cram.tags_to_exclude.extend(tags[1])
|
||||
|
||||
cram_type = self.form.cardType.currentRow()
|
||||
if cram_type == TYPE_NEW:
|
||||
request.cram.kind = CustomStudyRequest.Cram.CRAM_KIND_NEW
|
||||
elif cram_type == TYPE_DUE:
|
||||
request.cram.kind = CustomStudyRequest.Cram.CRAM_KIND_DUE
|
||||
elif cram_type == TYPE_REVIEW:
|
||||
request.cram.kind = CustomStudyRequest.Cram.CRAM_KIND_REVIEW
|
||||
else:
|
||||
terms = ""
|
||||
ord = DYN_RANDOM
|
||||
dyn["resched"] = False
|
||||
dyn["terms"][0] = [(terms + tags).strip(), spin, ord]
|
||||
# add deck limit
|
||||
dyn["terms"][0][0] = self.mw.col.build_search_string(
|
||||
dyn["terms"][0][0], SearchNode(deck=self.deck["name"])
|
||||
)
|
||||
self.mw.col.decks.save(dyn)
|
||||
# generate cards
|
||||
self.created_custom_study = True
|
||||
if not self.mw.col.sched.rebuild_filtered_deck(dyn["id"]):
|
||||
showWarning(tr.custom_study_no_cards_matched_the_criteria_you())
|
||||
return
|
||||
self.mw.moveToState("overview")
|
||||
QDialog.accept(self)
|
||||
request.cram.kind = CustomStudyRequest.Cram.CRAM_KIND_ALL
|
||||
|
||||
# keep open on failure, as the cause was most likely an empty search
|
||||
# result, which the user can remedy
|
||||
custom_study(parent=self, request=request).success(
|
||||
lambda _: QDialog.accept(self)
|
||||
).run_in_background()
|
||||
|
||||
def reject(self) -> None:
|
||||
if self.created_custom_study:
|
||||
|
@ -219,8 +162,3 @@ class CustomStudy(QDialog):
|
|||
self.mw.col.decks.select(self.deck["id"])
|
||||
# fixme: clean up the empty custom study deck
|
||||
QDialog.reject(self)
|
||||
|
||||
def _getTags(self) -> str:
|
||||
from aqt.taglimit import TagLimit
|
||||
|
||||
return TagLimit(self.mw, self).tags
|
||||
|
|
|
@ -17,7 +17,7 @@ from anki.collection import (
|
|||
)
|
||||
from anki.decks import DeckId
|
||||
from anki.notes import NoteId
|
||||
from anki.scheduler import FilteredDeckForUpdate, UnburyDeck
|
||||
from anki.scheduler import CustomStudyRequest, FilteredDeckForUpdate, UnburyDeck
|
||||
from anki.scheduler.v3 import CardAnswer
|
||||
from anki.scheduler.v3 import Scheduler as V3Scheduler
|
||||
from aqt.operations import CollectionOp
|
||||
|
@ -228,3 +228,11 @@ def answer_card(
|
|||
return col.sched.answer_card(answer)
|
||||
|
||||
return CollectionOp(parent, answer_v3)
|
||||
|
||||
|
||||
def custom_study(
|
||||
*,
|
||||
parent: QWidget,
|
||||
request: CustomStudyRequest,
|
||||
) -> CollectionOp[OpChanges]:
|
||||
return CollectionOp(parent, lambda col: col.sched.custom_study(request))
|
||||
|
|
|
@ -1,23 +1,31 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/copyleft/agpl.html
|
||||
|
||||
from typing import Optional
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import aqt
|
||||
from anki.lang import with_collapsed_whitespace
|
||||
from aqt.customstudy import CustomStudy
|
||||
from aqt.main import AnkiQt
|
||||
from aqt.qt import *
|
||||
from aqt.utils import disable_help_button, restoreGeom, saveGeom, showWarning, tr
|
||||
|
||||
|
||||
class TagLimit(QDialog):
|
||||
def __init__(self, mw: AnkiQt, parent: CustomStudy) -> None:
|
||||
@staticmethod
|
||||
def get_tags(
|
||||
mw: AnkiQt, parent: aqt.customstudy.CustomStudy
|
||||
) -> Tuple[List[str], List[str]]:
|
||||
"""Get two lists of tags to include/exclude."""
|
||||
return TagLimit(mw, parent).tags
|
||||
|
||||
def __init__(self, mw: AnkiQt, parent: aqt.customstudy.CustomStudy) -> None:
|
||||
QDialog.__init__(self, parent, Qt.WindowType.Window)
|
||||
self.tags: str = ""
|
||||
self.tags: Tuple[List[str], List[str]] = ([], [])
|
||||
self.tags_list: list[str] = []
|
||||
self.mw = mw
|
||||
self.parent_: Optional[CustomStudy] = parent
|
||||
self.parent_: Optional[aqt.customstudy.CustomStudy] = parent
|
||||
self.deck = self.parent_.deck
|
||||
self.dialog = aqt.forms.taglimit.Ui_Dialog()
|
||||
self.dialog.setupUi(self)
|
||||
|
@ -75,44 +83,35 @@ class TagLimit(QDialog):
|
|||
self.dialog.inactiveList.selectionModel().select(idx, mode)
|
||||
|
||||
def reject(self) -> None:
|
||||
self.tags = ""
|
||||
QDialog.reject(self)
|
||||
|
||||
def accept(self) -> None:
|
||||
include_tags = exclude_tags = []
|
||||
# gather yes/no tags
|
||||
yes = []
|
||||
no = []
|
||||
for c in range(self.dialog.activeList.count()):
|
||||
# active
|
||||
if self.dialog.activeCheck.isChecked():
|
||||
item = self.dialog.activeList.item(c)
|
||||
idx = self.dialog.activeList.indexFromItem(item)
|
||||
if self.dialog.activeList.selectionModel().isSelected(idx):
|
||||
yes.append(self.tags_list[c])
|
||||
include_tags.append(self.tags_list[c])
|
||||
# inactive
|
||||
item = self.dialog.inactiveList.item(c)
|
||||
idx = self.dialog.inactiveList.indexFromItem(item)
|
||||
if self.dialog.inactiveList.selectionModel().isSelected(idx):
|
||||
no.append(self.tags_list[c])
|
||||
if (len(yes) + len(no)) > 100:
|
||||
exclude_tags.append(self.tags_list[c])
|
||||
|
||||
if (len(include_tags) + len(exclude_tags)) > 100:
|
||||
showWarning(with_collapsed_whitespace(tr.errors_100_tags_max()))
|
||||
return
|
||||
|
||||
self.hide()
|
||||
self.tags = (include_tags, exclude_tags)
|
||||
|
||||
# save in the deck for future invocations
|
||||
self.deck["activeTags"] = yes
|
||||
self.deck["inactiveTags"] = no
|
||||
self.deck["activeTags"] = include_tags
|
||||
self.deck["inactiveTags"] = exclude_tags
|
||||
self.mw.col.decks.save(self.deck)
|
||||
# build query string
|
||||
self.tags = ""
|
||||
if yes:
|
||||
arr = []
|
||||
for req in yes:
|
||||
arr.append(f'tag:"{req}"')
|
||||
self.tags += f"({' or '.join(arr)})"
|
||||
if no:
|
||||
arr = []
|
||||
for req in no:
|
||||
arr.append(f'-tag:"{req}"')
|
||||
self.tags += f" {' '.join(arr)}"
|
||||
|
||||
saveGeom(self, "tagLimit")
|
||||
QDialog.accept(self)
|
||||
|
|
|
@ -151,14 +151,14 @@ rust_test(
|
|||
rustfmt_test(
|
||||
name = "format_check",
|
||||
srcs = glob([
|
||||
"src/**/*.rs",
|
||||
"**/*.rs",
|
||||
]),
|
||||
)
|
||||
|
||||
rustfmt_fix(
|
||||
name = "format",
|
||||
srcs = glob([
|
||||
"src/**/*.rs",
|
||||
"**/*.rs",
|
||||
]),
|
||||
)
|
||||
|
||||
|
|
|
@ -105,10 +105,7 @@ pub fn write_backend_proto_rs() {
|
|||
"Deck.Filtered.SearchTerm.Order",
|
||||
"#[derive(strum::EnumIter)]",
|
||||
)
|
||||
.type_attribute(
|
||||
"HelpPageLinkRequest.HelpPage",
|
||||
"#[derive(strum::EnumIter)]",
|
||||
)
|
||||
.type_attribute("HelpPageLinkRequest.HelpPage", "#[derive(strum::EnumIter)]")
|
||||
.compile_protos(paths.as_slice(), &[proto_dir])
|
||||
.unwrap();
|
||||
}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
|
|
@ -85,4 +85,4 @@ mod test {
|
|||
.join("\n - ")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ impl AnkiError {
|
|||
AnkiError::UndoEmpty => Kind::UndoEmpty,
|
||||
AnkiError::MultipleNotetypesSelected => Kind::InvalidInput,
|
||||
AnkiError::DatabaseCheckRequired => Kind::InvalidInput,
|
||||
AnkiError::CustomStudyError(_) => Kind::CustomStudyError,
|
||||
};
|
||||
|
||||
pb::BackendError {
|
||||
|
|
|
@ -182,6 +182,10 @@ impl SchedulerService for Backend {
|
|||
.map(Into::into)
|
||||
})
|
||||
}
|
||||
|
||||
fn custom_study(&self, input: pb::CustomStudyRequest) -> Result<pb::OpChanges> {
|
||||
self.with_col(|col| col.custom_study(input)).map(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<crate::scheduler::timing::SchedTimingToday> for pb::SchedTimingTodayResponse {
|
||||
|
|
|
@ -13,13 +13,13 @@ use crate::{
|
|||
backend_proto::sort_order::Value as SortOrderProto,
|
||||
browser_table::Column,
|
||||
prelude::*,
|
||||
search::{concatenate_searches, replace_search_node, write_nodes, Node, SortMode},
|
||||
search::{replace_search_node, Node, SortMode},
|
||||
};
|
||||
|
||||
impl SearchService for Backend {
|
||||
fn build_search_string(&self, input: pb::SearchNode) -> Result<pb::String> {
|
||||
let node: Node = input.try_into()?;
|
||||
Ok(write_nodes(&node.into_node_list()).into())
|
||||
Ok(SearchBuilder::from_root(node).write().into())
|
||||
}
|
||||
|
||||
fn search_cards(&self, input: pb::SearchRequest) -> Result<pb::SearchResponse> {
|
||||
|
@ -43,13 +43,18 @@ impl SearchService for Backend {
|
|||
}
|
||||
|
||||
fn join_search_nodes(&self, input: pb::JoinSearchNodesRequest) -> Result<pb::String> {
|
||||
let sep = input.joiner().into();
|
||||
let existing_nodes = {
|
||||
let node: Node = input.existing_node.unwrap_or_default().try_into()?;
|
||||
node.into_node_list()
|
||||
};
|
||||
let additional_node = input.additional_node.unwrap_or_default().try_into()?;
|
||||
Ok(concatenate_searches(sep, existing_nodes, additional_node).into())
|
||||
let existing_node: Node = input.existing_node.unwrap_or_default().try_into()?;
|
||||
let additional_node: Node = input.additional_node.unwrap_or_default().try_into()?;
|
||||
let search = SearchBuilder::from_root(existing_node);
|
||||
|
||||
Ok(
|
||||
match pb::search_node::group::Joiner::from_i32(input.joiner).unwrap_or_default() {
|
||||
pb::search_node::group::Joiner::And => search.and(additional_node),
|
||||
pb::search_node::group::Joiner::Or => search.or(additional_node),
|
||||
}
|
||||
.write()
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
fn replace_search_node(&self, input: pb::ReplaceSearchNodeRequest) -> Result<pb::String> {
|
||||
|
|
|
@ -7,8 +7,7 @@ use crate::{
|
|||
backend_proto as pb,
|
||||
prelude::*,
|
||||
search::{
|
||||
parse_search, BoolSeparator, Node, PropertyKind, RatingKind, SearchNode, StateKind,
|
||||
TemplateKind,
|
||||
parse_search, Negated, Node, PropertyKind, RatingKind, SearchNode, StateKind, TemplateKind,
|
||||
},
|
||||
text::escape_anki_wildcards_for_search_node,
|
||||
};
|
||||
|
@ -20,15 +19,9 @@ impl TryFrom<pb::SearchNode> for Node {
|
|||
use pb::search_node::{group::Joiner, Filter, Flag};
|
||||
Ok(if let Some(filter) = msg.filter {
|
||||
match filter {
|
||||
Filter::Tag(s) => {
|
||||
Node::Search(SearchNode::Tag(escape_anki_wildcards_for_search_node(&s)))
|
||||
}
|
||||
Filter::Deck(s) => {
|
||||
Node::Search(SearchNode::Deck(escape_anki_wildcards_for_search_node(&s)))
|
||||
}
|
||||
Filter::Note(s) => Node::Search(SearchNode::Notetype(
|
||||
escape_anki_wildcards_for_search_node(&s),
|
||||
)),
|
||||
Filter::Tag(s) => SearchNode::from_tag_name(&s).into(),
|
||||
Filter::Deck(s) => SearchNode::from_deck_name(&s).into(),
|
||||
Filter::Note(s) => SearchNode::from_notetype_name(&s).into(),
|
||||
Filter::Template(u) => {
|
||||
Node::Search(SearchNode::CardTemplate(TemplateKind::Ordinal(u as u16)))
|
||||
}
|
||||
|
@ -112,15 +105,6 @@ impl TryFrom<pb::SearchNode> for Node {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<pb::search_node::group::Joiner> for BoolSeparator {
|
||||
fn from(sep: pb::search_node::group::Joiner) -> Self {
|
||||
match sep {
|
||||
pb::search_node::group::Joiner::And => BoolSeparator::And,
|
||||
pb::search_node::group::Joiner::Or => BoolSeparator::Or,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<pb::search_node::Rating> for RatingKind {
|
||||
fn from(r: pb::search_node::Rating) -> Self {
|
||||
match r {
|
||||
|
|
|
@ -32,3 +32,25 @@ impl From<FilteredDeckError> for AnkiError {
|
|||
AnkiError::FilteredDeckError(e)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum CustomStudyError {
|
||||
NoMatchingCards,
|
||||
ExistingDeck,
|
||||
}
|
||||
|
||||
impl CustomStudyError {
|
||||
pub fn localized_description(&self, tr: &I18n) -> String {
|
||||
match self {
|
||||
Self::NoMatchingCards => tr.custom_study_no_cards_matched_the_criteria_you(),
|
||||
Self::ExistingDeck => tr.custom_study_must_rename_deck(),
|
||||
}
|
||||
.into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CustomStudyError> for AnkiError {
|
||||
fn from(e: CustomStudyError) -> Self {
|
||||
AnkiError::CustomStudyError(e)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ mod search;
|
|||
use std::{fmt::Display, io};
|
||||
|
||||
pub use db::{DbError, DbErrorKind};
|
||||
pub use filtered::FilteredDeckError;
|
||||
pub use filtered::{CustomStudyError, FilteredDeckError};
|
||||
pub use network::{NetworkError, NetworkErrorKind, SyncError, SyncErrorKind};
|
||||
pub use search::{ParseError, SearchErrorKind};
|
||||
use tempfile::PathPersistError;
|
||||
|
@ -41,6 +41,7 @@ pub enum AnkiError {
|
|||
UndoEmpty,
|
||||
MultipleNotetypesSelected,
|
||||
DatabaseCheckRequired,
|
||||
CustomStudyError(CustomStudyError),
|
||||
}
|
||||
|
||||
impl Display for AnkiError {
|
||||
|
@ -94,6 +95,7 @@ impl AnkiError {
|
|||
AnkiError::InvalidRegex(err) => format!("<pre>{}</pre>", err),
|
||||
AnkiError::MultipleNotetypesSelected => tr.errors_multiple_notetypes_selected().into(),
|
||||
AnkiError::DatabaseCheckRequired => tr.errors_please_check_database().into(),
|
||||
AnkiError::CustomStudyError(err) => err.localized_description(tr),
|
||||
AnkiError::IoError(_)
|
||||
| AnkiError::JsonError(_)
|
||||
| AnkiError::ProtoError(_)
|
||||
|
|
|
@ -222,7 +222,7 @@ impl Collection {
|
|||
.ok_or(AnkiError::NotFound)?;
|
||||
|
||||
if self
|
||||
.search_notes_unordered(match_all![note1.notetype_id, nids_node])?
|
||||
.search_notes_unordered(SearchBuilder::from(note1.notetype_id).and(nids_node))?
|
||||
.len()
|
||||
!= note_ids.len()
|
||||
{
|
||||
|
|
|
@ -292,13 +292,12 @@ impl Collection {
|
|||
usn: Usn,
|
||||
) -> Result<(), AnkiError> {
|
||||
if !map.removed.is_empty() {
|
||||
let ords = Node::any(
|
||||
map.removed
|
||||
.iter()
|
||||
.map(|o| TemplateKind::Ordinal(*o as u16))
|
||||
.map(Into::into),
|
||||
);
|
||||
self.search_cards_into_table(match_all![nids, ords], SortMode::NoOrder)?;
|
||||
let ords =
|
||||
SearchBuilder::any(map.removed.iter().map(|o| TemplateKind::Ordinal(*o as u16)));
|
||||
self.search_cards_into_table(
|
||||
SearchBuilder::from(nids).and_join(&mut ords.group()),
|
||||
SortMode::NoOrder,
|
||||
)?;
|
||||
for card in self.storage.all_searched_cards()? {
|
||||
self.remove_card_and_add_grave_undoable(card, usn)?;
|
||||
}
|
||||
|
@ -315,13 +314,15 @@ impl Collection {
|
|||
usn: Usn,
|
||||
) -> Result<(), AnkiError> {
|
||||
if !map.remapped.is_empty() {
|
||||
let ords = Node::any(
|
||||
let mut ords = SearchBuilder::any(
|
||||
map.remapped
|
||||
.keys()
|
||||
.map(|o| TemplateKind::Ordinal(*o as u16))
|
||||
.map(Into::into),
|
||||
.map(|o| TemplateKind::Ordinal(*o as u16)),
|
||||
);
|
||||
self.search_cards_into_table(match_all![nids, ords], SortMode::NoOrder)?;
|
||||
self.search_cards_into_table(
|
||||
SearchBuilder::from(nids).and_join(&mut ords),
|
||||
SortMode::NoOrder,
|
||||
)?;
|
||||
for mut card in self.storage.all_searched_cards()? {
|
||||
let original = card.clone();
|
||||
card.template_idx =
|
||||
|
|
|
@ -8,7 +8,7 @@ use std::collections::HashMap;
|
|||
use super::{CardGenContext, Notetype};
|
||||
use crate::{
|
||||
prelude::*,
|
||||
search::{Node, SortMode, TemplateKind},
|
||||
search::{SortMode, TemplateKind},
|
||||
};
|
||||
|
||||
/// True if any ordinals added, removed or reordered.
|
||||
|
@ -143,14 +143,12 @@ impl Collection {
|
|||
|
||||
// remove any cards where the template was deleted
|
||||
if !changes.removed.is_empty() {
|
||||
let ords = Node::any(
|
||||
changes
|
||||
.removed
|
||||
.into_iter()
|
||||
.map(TemplateKind::Ordinal)
|
||||
.map(Into::into),
|
||||
);
|
||||
self.search_cards_into_table(match_all![nt.id, ords], SortMode::NoOrder)?;
|
||||
let mut ords =
|
||||
SearchBuilder::any(changes.removed.into_iter().map(TemplateKind::Ordinal));
|
||||
self.search_cards_into_table(
|
||||
SearchBuilder::from(nt.id).and_join(&mut ords),
|
||||
SortMode::NoOrder,
|
||||
)?;
|
||||
for card in self.storage.all_searched_cards()? {
|
||||
self.remove_card_and_add_grave_undoable(card, usn)?;
|
||||
}
|
||||
|
@ -159,15 +157,12 @@ impl Collection {
|
|||
|
||||
// update ordinals for cards with a repositioned template
|
||||
if !changes.moved.is_empty() {
|
||||
let ords = Node::any(
|
||||
changes
|
||||
.moved
|
||||
.keys()
|
||||
.cloned()
|
||||
.map(TemplateKind::Ordinal)
|
||||
.map(Into::into),
|
||||
);
|
||||
self.search_cards_into_table(match_all![nt.id, ords], SortMode::NoOrder)?;
|
||||
let mut ords =
|
||||
SearchBuilder::any(changes.moved.keys().cloned().map(TemplateKind::Ordinal));
|
||||
self.search_cards_into_table(
|
||||
SearchBuilder::from(nt.id).and_join(&mut ords),
|
||||
SortMode::NoOrder,
|
||||
)?;
|
||||
for mut card in self.storage.all_searched_cards()? {
|
||||
let original = card.clone();
|
||||
card.template_idx = *changes.moved.get(&card.template_idx).unwrap();
|
||||
|
|
|
@ -14,6 +14,7 @@ pub enum Op {
|
|||
Bury,
|
||||
ChangeNotetype,
|
||||
ClearUnusedTags,
|
||||
CreateCustomStudy,
|
||||
EmptyFilteredDeck,
|
||||
FindAndReplace,
|
||||
RebuildFilteredDeck,
|
||||
|
@ -53,6 +54,7 @@ impl Op {
|
|||
Op::AddNote => tr.actions_add_note(),
|
||||
Op::AnswerCard => tr.actions_answer_card(),
|
||||
Op::Bury => tr.studying_bury(),
|
||||
Op::CreateCustomStudy => tr.actions_custom_study(),
|
||||
Op::RemoveDeck => tr.decks_delete_deck(),
|
||||
Op::RemoveNote => tr.studying_delete_note(),
|
||||
Op::RenameDeck => tr.actions_rename_deck(),
|
||||
|
|
|
@ -12,12 +12,11 @@ pub use crate::{
|
|||
decks::{Deck, DeckId, DeckKind, NativeDeckName},
|
||||
error::{AnkiError, Result},
|
||||
i18n::I18n,
|
||||
match_all, match_any,
|
||||
notes::{Note, NoteId},
|
||||
notetype::{Notetype, NotetypeId},
|
||||
ops::{Op, OpChanges, OpOutput},
|
||||
revlog::RevlogId,
|
||||
search::TryIntoSearch,
|
||||
search::{SearchBuilder, TryIntoSearch},
|
||||
timestamp::{TimestampMillis, TimestampSecs},
|
||||
types::Usn,
|
||||
};
|
||||
|
|
|
@ -79,7 +79,7 @@ impl Collection {
|
|||
};
|
||||
self.transact(Op::UnburyUnsuspend, |col| {
|
||||
col.search_cards_into_table(
|
||||
match_all![SearchNode::DeckIdWithChildren(deck_id), state],
|
||||
SearchBuilder::from(SearchNode::DeckIdWithChildren(deck_id)).and(state),
|
||||
SortMode::NoOrder,
|
||||
)?;
|
||||
col.unsuspend_or_unbury_searched_cards()
|
||||
|
|
188
rslib/src/scheduler/filtered/custom_study.rs
Normal file
188
rslib/src/scheduler/filtered/custom_study.rs
Normal file
|
@ -0,0 +1,188 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use super::FilteredDeckForUpdate;
|
||||
use crate::{
|
||||
backend_proto::{
|
||||
self as pb,
|
||||
custom_study_request::{cram::CramKind, Cram, Value as CustomStudyValue},
|
||||
},
|
||||
decks::{FilteredDeck, FilteredSearchOrder, FilteredSearchTerm},
|
||||
error::{CustomStudyError, FilteredDeckError},
|
||||
prelude::*,
|
||||
search::{Negated, PropertyKind, RatingKind, SearchNode, StateKind},
|
||||
};
|
||||
|
||||
impl Collection {
|
||||
pub fn custom_study(&mut self, input: pb::CustomStudyRequest) -> Result<OpOutput<()>> {
|
||||
self.transact(Op::CreateCustomStudy, |col| col.custom_study_inner(input))
|
||||
}
|
||||
}
|
||||
|
||||
impl Collection {
|
||||
fn custom_study_inner(&mut self, input: pb::CustomStudyRequest) -> Result<()> {
|
||||
let current_deck = self.get_current_deck()?;
|
||||
|
||||
match input
|
||||
.value
|
||||
.ok_or_else(|| AnkiError::invalid_input("missing oneof value"))?
|
||||
{
|
||||
CustomStudyValue::NewLimitDelta(delta) => {
|
||||
let today = self.current_due_day(0)?;
|
||||
self.extend_limits(today, self.usn()?, current_deck.id, delta, 0)
|
||||
}
|
||||
CustomStudyValue::ReviewLimitDelta(delta) => {
|
||||
let today = self.current_due_day(0)?;
|
||||
self.extend_limits(today, self.usn()?, current_deck.id, 0, delta)
|
||||
}
|
||||
CustomStudyValue::ForgotDays(days) => {
|
||||
self.create_custom_study_deck(forgot_config(current_deck.human_name(), days))
|
||||
}
|
||||
CustomStudyValue::ReviewAheadDays(days) => {
|
||||
self.create_custom_study_deck(ahead_config(current_deck.human_name(), days))
|
||||
}
|
||||
CustomStudyValue::PreviewDays(days) => {
|
||||
self.create_custom_study_deck(preview_config(current_deck.human_name(), days))
|
||||
}
|
||||
CustomStudyValue::Cram(cram) => {
|
||||
self.create_custom_study_deck(cram_config(current_deck.human_name(), cram)?)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Reuse existing one or create new one if missing.
|
||||
/// Guaranteed to be a filtered deck.
|
||||
fn create_custom_study_deck(&mut self, config: FilteredDeck) -> Result<()> {
|
||||
let mut id = DeckId(0);
|
||||
let human_name = self.tr.custom_study_custom_study_session().to_string();
|
||||
|
||||
if let Some(did) = self.get_deck_id(&human_name)? {
|
||||
if !self
|
||||
.get_deck(did)?
|
||||
.ok_or(AnkiError::NotFound)?
|
||||
.is_filtered()
|
||||
{
|
||||
return Err(CustomStudyError::ExistingDeck.into());
|
||||
}
|
||||
id = did;
|
||||
}
|
||||
|
||||
let deck = FilteredDeckForUpdate {
|
||||
id,
|
||||
human_name,
|
||||
config,
|
||||
};
|
||||
|
||||
self.add_or_update_filtered_deck_inner(deck)
|
||||
.map(|_| ())
|
||||
.map_err(|err| {
|
||||
if err == AnkiError::FilteredDeckError(FilteredDeckError::SearchReturnedNoCards) {
|
||||
CustomStudyError::NoMatchingCards.into()
|
||||
} else {
|
||||
err
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn custom_study_config(
|
||||
reschedule: bool,
|
||||
search: String,
|
||||
order: FilteredSearchOrder,
|
||||
limit: Option<u32>,
|
||||
) -> FilteredDeck {
|
||||
FilteredDeck {
|
||||
reschedule,
|
||||
search_terms: vec![FilteredSearchTerm {
|
||||
search,
|
||||
limit: limit.unwrap_or(99_999),
|
||||
order: order as i32,
|
||||
}],
|
||||
delays: vec![],
|
||||
preview_delay: 10,
|
||||
}
|
||||
}
|
||||
|
||||
fn forgot_config(deck_name: String, days: u32) -> FilteredDeck {
|
||||
let search = SearchBuilder::from(SearchNode::Rated {
|
||||
days,
|
||||
ease: RatingKind::AnswerButton(1),
|
||||
})
|
||||
.and(SearchNode::from_deck_name(&deck_name))
|
||||
.write();
|
||||
custom_study_config(false, search, FilteredSearchOrder::Random, None)
|
||||
}
|
||||
|
||||
fn ahead_config(deck_name: String, days: u32) -> FilteredDeck {
|
||||
let search = SearchBuilder::from(SearchNode::Property {
|
||||
operator: "<=".to_string(),
|
||||
kind: PropertyKind::Due(days as i32),
|
||||
})
|
||||
.and(SearchNode::from_deck_name(&deck_name))
|
||||
.write();
|
||||
custom_study_config(true, search, FilteredSearchOrder::Due, None)
|
||||
}
|
||||
|
||||
fn preview_config(deck_name: String, days: u32) -> FilteredDeck {
|
||||
let search = SearchBuilder::from(StateKind::New)
|
||||
.and(SearchNode::AddedInDays(days))
|
||||
.and(SearchNode::from_deck_name(&deck_name))
|
||||
.write();
|
||||
custom_study_config(
|
||||
false,
|
||||
search,
|
||||
FilteredSearchOrder::OldestReviewedFirst,
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
fn cram_config(deck_name: String, cram: Cram) -> Result<FilteredDeck> {
|
||||
let (reschedule, nodes, order) = match CramKind::from_i32(cram.kind).unwrap_or_default() {
|
||||
CramKind::New => (
|
||||
true,
|
||||
SearchBuilder::from(StateKind::New),
|
||||
FilteredSearchOrder::Added,
|
||||
),
|
||||
CramKind::Due => (
|
||||
true,
|
||||
SearchBuilder::from(StateKind::Due),
|
||||
FilteredSearchOrder::Due,
|
||||
),
|
||||
CramKind::Review => (
|
||||
true,
|
||||
SearchBuilder::from(StateKind::New.negated()),
|
||||
FilteredSearchOrder::Random,
|
||||
),
|
||||
CramKind::All => (false, SearchBuilder::new(), FilteredSearchOrder::Random),
|
||||
};
|
||||
|
||||
let search = nodes
|
||||
.and_join(&mut tags_to_nodes(
|
||||
cram.tags_to_include,
|
||||
cram.tags_to_exclude,
|
||||
))
|
||||
.and(SearchNode::from_deck_name(&deck_name))
|
||||
.write();
|
||||
|
||||
Ok(custom_study_config(
|
||||
reschedule,
|
||||
search,
|
||||
order,
|
||||
Some(cram.card_limit),
|
||||
))
|
||||
}
|
||||
|
||||
fn tags_to_nodes(tags_to_include: Vec<String>, tags_to_exclude: Vec<String>) -> SearchBuilder {
|
||||
let include_nodes = SearchBuilder::any(
|
||||
tags_to_include
|
||||
.iter()
|
||||
.map(|tag| SearchNode::from_tag_name(tag)),
|
||||
);
|
||||
let mut exclude_nodes = SearchBuilder::all(
|
||||
tags_to_exclude
|
||||
.iter()
|
||||
.map(|tag| SearchNode::from_tag_name(tag).negated()),
|
||||
);
|
||||
|
||||
include_nodes.group().and_join(&mut exclude_nodes)
|
||||
}
|
|
@ -2,6 +2,7 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
mod card;
|
||||
mod custom_study;
|
||||
|
||||
use crate::{
|
||||
config::{ConfigKey, SchedulerVersion},
|
||||
|
|
|
@ -196,7 +196,7 @@ impl Collection {
|
|||
usn: Usn,
|
||||
) -> Result<usize> {
|
||||
let cids = self.search_cards(
|
||||
match_all![SearchNode::DeckIdWithoutChildren(deck), StateKind::New],
|
||||
SearchBuilder::from(SearchNode::DeckIdWithoutChildren(deck)).and(StateKind::New),
|
||||
SortMode::NoOrder,
|
||||
)?;
|
||||
self.sort_cards_inner(&cids, 1, 1, order.into(), false, usn)
|
||||
|
|
213
rslib/src/search/builder.rs
Normal file
213
rslib/src/search/builder.rs
Normal file
|
@ -0,0 +1,213 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::mem;
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::{writer::write_nodes, Node, SearchNode, StateKind, TemplateKind};
|
||||
use crate::{prelude::*, text::escape_anki_wildcards_for_search_node};
|
||||
|
||||
pub trait Negated {
|
||||
fn negated(self) -> Node;
|
||||
}
|
||||
|
||||
impl<T: Into<Node>> Negated for T {
|
||||
fn negated(self) -> Node {
|
||||
let node: Node = self.into();
|
||||
if let Node::Not(inner) = node {
|
||||
*inner
|
||||
} else {
|
||||
Node::Not(Box::new(node))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper to programmatically build searches.
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct SearchBuilder(Vec<Node>);
|
||||
|
||||
impl SearchBuilder {
|
||||
pub fn new() -> Self {
|
||||
Self(vec![])
|
||||
}
|
||||
|
||||
/// Construct [SearchBuilder] with this [Node], or its inner [Node]s,
|
||||
/// if it is a [Node::Group]
|
||||
pub fn from_root(node: Node) -> Self {
|
||||
match node {
|
||||
Node::Group(nodes) => Self(nodes),
|
||||
_ => Self(vec![node]),
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct [SearchBuilder] where given [Node]s are joined by [Node::And].
|
||||
pub fn all(iter: impl IntoIterator<Item = impl Into<Node>>) -> Self {
|
||||
Self(Itertools::intersperse(iter.into_iter().map(Into::into), Node::And).collect())
|
||||
}
|
||||
|
||||
/// Construct [SearchBuilder] where given [Node]s are joined by [Node::Or].
|
||||
pub fn any(iter: impl IntoIterator<Item = impl Into<Node>>) -> Self {
|
||||
Self(Itertools::intersperse(iter.into_iter().map(Into::into), Node::Or).collect())
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub fn and<N: Into<Node>>(mut self, node: N) -> Self {
|
||||
if !self.is_empty() {
|
||||
self.0.push(Node::And)
|
||||
}
|
||||
self.0.push(node.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn or<N: Into<Node>>(mut self, node: N) -> Self {
|
||||
if !self.is_empty() {
|
||||
self.0.push(Node::Or)
|
||||
}
|
||||
self.0.push(node.into());
|
||||
self
|
||||
}
|
||||
|
||||
/// Wrap [Node]s in [Node::Group] if there is more than 1.
|
||||
pub fn group(mut self) -> Self {
|
||||
if self.len() > 1 {
|
||||
self.0 = vec![Node::Group(mem::take(&mut self.0))];
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
/// Concatenate [Node]s of `other`, inserting [Node::And] if appropriate.
|
||||
/// No implicit grouping is done.
|
||||
pub fn and_join(mut self, other: &mut Self) -> Self {
|
||||
if !(self.is_empty() || other.is_empty()) {
|
||||
self.0.push(Node::And);
|
||||
}
|
||||
self.0.append(&mut other.0);
|
||||
self
|
||||
}
|
||||
|
||||
/// Concatenate [Node]s of `other`, inserting [Node::Or] if appropriate.
|
||||
/// No implicit grouping is done.
|
||||
pub fn or_join(mut self, other: &mut Self) -> Self {
|
||||
if !(self.is_empty() || other.is_empty()) {
|
||||
self.0.push(Node::And);
|
||||
}
|
||||
self.0.append(&mut other.0);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn write(&self) -> String {
|
||||
write_nodes(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Into<Node>> From<T> for SearchBuilder {
|
||||
fn from(node: T) -> Self {
|
||||
Self(vec![node.into()])
|
||||
}
|
||||
}
|
||||
|
||||
impl TryIntoSearch for SearchBuilder {
|
||||
fn try_into_search(self) -> Result<Node, AnkiError> {
|
||||
Ok(self.group().0.remove(0))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for SearchBuilder {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Node {
|
||||
/// If we're a group, return the contained elements.
|
||||
/// If we're a single node, return ourselves in an one-element vec.
|
||||
pub fn into_node_list(self) -> Vec<Node> {
|
||||
if let Node::Group(nodes) = self {
|
||||
nodes
|
||||
} else {
|
||||
vec![self]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SearchNode {
|
||||
/// Construct [SearchNode] from an unescaped deck name.
|
||||
pub fn from_deck_name(name: &str) -> Self {
|
||||
Self::Deck(escape_anki_wildcards_for_search_node(name))
|
||||
}
|
||||
|
||||
/// Construct [SearchNode] from an unescaped tag name.
|
||||
pub fn from_tag_name(name: &str) -> Self {
|
||||
Self::Tag(escape_anki_wildcards_for_search_node(name))
|
||||
}
|
||||
|
||||
/// Construct [SearchNode] from an unescaped notetype name.
|
||||
pub fn from_notetype_name(name: &str) -> Self {
|
||||
Self::Notetype(escape_anki_wildcards_for_search_node(name))
|
||||
}
|
||||
|
||||
/// Construct [SearchNode] from an unescaped template name.
|
||||
pub fn from_template_name(name: &str) -> Self {
|
||||
Self::CardTemplate(TemplateKind::Name(escape_anki_wildcards_for_search_node(
|
||||
name,
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Into<SearchNode>> From<T> for Node {
|
||||
fn from(node: T) -> Self {
|
||||
Self::Search(node.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<NotetypeId> for SearchNode {
|
||||
fn from(id: NotetypeId) -> Self {
|
||||
SearchNode::NotetypeId(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TemplateKind> for SearchNode {
|
||||
fn from(k: TemplateKind) -> Self {
|
||||
SearchNode::CardTemplate(k)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<NoteId> for SearchNode {
|
||||
fn from(n: NoteId) -> Self {
|
||||
SearchNode::NoteIds(format!("{}", n))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<StateKind> for SearchNode {
|
||||
fn from(k: StateKind) -> Self {
|
||||
SearchNode::State(k)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn negating() {
|
||||
let node = Node::Search(SearchNode::UnqualifiedText("foo".to_string()));
|
||||
let neg_node = Node::Not(Box::new(Node::Search(SearchNode::UnqualifiedText(
|
||||
"foo".to_string(),
|
||||
))));
|
||||
assert_eq!(node.clone().negated(), neg_node);
|
||||
assert_eq!(node.clone().negated().negated(), node);
|
||||
|
||||
assert_eq!(
|
||||
StateKind::Due.negated(),
|
||||
Node::Not(Box::new(Node::Search(SearchNode::State(StateKind::Due))))
|
||||
)
|
||||
}
|
||||
}
|
|
@ -1,18 +1,21 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
mod builder;
|
||||
mod parser;
|
||||
mod sqlwriter;
|
||||
pub(crate) mod writer;
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
use rusqlite::{params_from_iter, types::FromSql};
|
||||
use sqlwriter::{RequiredTable, SqlWriter};
|
||||
|
||||
pub use builder::{Negated, SearchBuilder};
|
||||
pub use parser::{
|
||||
parse as parse_search, Node, PropertyKind, RatingKind, SearchNode, StateKind, TemplateKind,
|
||||
};
|
||||
use rusqlite::{params_from_iter, types::FromSql};
|
||||
use sqlwriter::{RequiredTable, SqlWriter};
|
||||
pub use writer::{concatenate_searches, replace_search_node, write_nodes, BoolSeparator};
|
||||
pub use writer::replace_search_node;
|
||||
|
||||
use crate::{
|
||||
browser_table::Column,
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use nom::{
|
||||
branch::alt,
|
||||
|
@ -39,48 +38,6 @@ pub enum Node {
|
|||
Search(SearchNode),
|
||||
}
|
||||
|
||||
impl Node {
|
||||
pub fn negated(self) -> Node {
|
||||
if let Node::Not(inner) = self {
|
||||
*inner
|
||||
} else {
|
||||
Node::Not(Box::new(self))
|
||||
}
|
||||
}
|
||||
|
||||
/// If we're a group, return the contained elements.
|
||||
/// If we're a single node, return ourselves in an one-element vec.
|
||||
pub fn into_node_list(self) -> Vec<Node> {
|
||||
if let Node::Group(nodes) = self {
|
||||
nodes
|
||||
} else {
|
||||
vec![self]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn all(iter: impl IntoIterator<Item = Node>) -> Node {
|
||||
Node::Group(Itertools::intersperse(iter.into_iter(), Node::And).collect())
|
||||
}
|
||||
|
||||
pub fn any(iter: impl IntoIterator<Item = Node>) -> Node {
|
||||
Node::Group(Itertools::intersperse(iter.into_iter(), Node::Or).collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! match_all {
|
||||
($($param:expr),+ $(,)?) => {
|
||||
$crate::search::Node::all(vec![$($param.into()),+])
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! match_any {
|
||||
($($param:expr),+ $(,)?) => {
|
||||
$crate::search::Node::any(vec![$($param.into()),+])
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum SearchNode {
|
||||
// text without a colon
|
||||
|
@ -177,36 +134,6 @@ pub fn parse(input: &str) -> Result<Vec<Node>> {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<SearchNode> for Node {
|
||||
fn from(n: SearchNode) -> Self {
|
||||
Node::Search(n)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<NotetypeId> for Node {
|
||||
fn from(id: NotetypeId) -> Self {
|
||||
Node::Search(SearchNode::NotetypeId(id))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TemplateKind> for Node {
|
||||
fn from(k: TemplateKind) -> Self {
|
||||
Node::Search(SearchNode::CardTemplate(k))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<NoteId> for Node {
|
||||
fn from(n: NoteId) -> Self {
|
||||
Node::Search(SearchNode::NoteIds(format!("{}", n)))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<StateKind> for Node {
|
||||
fn from(k: StateKind) -> Self {
|
||||
Node::Search(SearchNode::State(k))
|
||||
}
|
||||
}
|
||||
|
||||
/// Zero or more nodes inside brackets, eg 'one OR two -three'.
|
||||
/// Empty vec must be handled by caller.
|
||||
fn group_inner(input: &str) -> IResult<Vec<Node>> {
|
||||
|
@ -1106,14 +1033,4 @@ mod test {
|
|||
SearchErrorKind::InvalidNumber { .. }
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn negating() {
|
||||
let node = Node::Search(SearchNode::UnqualifiedText("foo".to_string()));
|
||||
let neg_node = Node::Not(Box::new(Node::Search(SearchNode::UnqualifiedText(
|
||||
"foo".to_string(),
|
||||
))));
|
||||
assert_eq!(node.clone().negated(), neg_node);
|
||||
assert_eq!(node.clone().negated().negated(), node);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,32 +14,6 @@ use crate::{
|
|||
text::escape_anki_wildcards,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum BoolSeparator {
|
||||
And,
|
||||
Or,
|
||||
}
|
||||
|
||||
/// Take an existing search, and AND/OR it with the provided additional search.
|
||||
/// This is required because when the user has "a AND b" in an existing search and
|
||||
/// wants to add "c", we want "a AND b AND c", not "(a AND b) AND C", which is what we'd
|
||||
/// get if we tried to join the existing search string with a new SearchTerm on the
|
||||
/// client side.
|
||||
pub fn concatenate_searches(
|
||||
sep: BoolSeparator,
|
||||
mut existing: Vec<Node>,
|
||||
additional: Node,
|
||||
) -> String {
|
||||
if !existing.is_empty() {
|
||||
existing.push(match sep {
|
||||
BoolSeparator::And => Node::And,
|
||||
BoolSeparator::Or => Node::Or,
|
||||
});
|
||||
}
|
||||
existing.push(additional);
|
||||
write_nodes(&existing)
|
||||
}
|
||||
|
||||
/// Given an existing parsed search, if the provided `replacement` is a single search node such
|
||||
/// as a deck:xxx search, replace any instances of that search in `existing` with the new value.
|
||||
/// Then return the possibly modified first search as a string.
|
||||
|
@ -65,7 +39,7 @@ pub fn replace_search_node(mut existing: Vec<Node>, replacement: Node) -> String
|
|||
write_nodes(&existing)
|
||||
}
|
||||
|
||||
pub fn write_nodes(nodes: &[Node]) -> String {
|
||||
pub(super) fn write_nodes(nodes: &[Node]) -> String {
|
||||
nodes.iter().map(write_node).collect()
|
||||
}
|
||||
|
||||
|
@ -234,42 +208,6 @@ mod test {
|
|||
assert_eq!("prop:ease>1", normalize_search("prop:ease>1.0").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn concatenating() {
|
||||
assert_eq!(
|
||||
concatenate_searches(
|
||||
BoolSeparator::And,
|
||||
vec![Node::Search(SearchNode::UnqualifiedText("foo".to_string()))],
|
||||
Node::Search(SearchNode::UnqualifiedText("bar".to_string()))
|
||||
),
|
||||
"foo bar",
|
||||
);
|
||||
assert_eq!(
|
||||
concatenate_searches(
|
||||
BoolSeparator::Or,
|
||||
vec![Node::Search(SearchNode::UnqualifiedText("foo".to_string()))],
|
||||
Node::Search(SearchNode::UnqualifiedText("bar".to_string()))
|
||||
),
|
||||
"foo OR bar",
|
||||
);
|
||||
assert_eq!(
|
||||
concatenate_searches(
|
||||
BoolSeparator::Or,
|
||||
vec![Node::Search(SearchNode::WholeCollection)],
|
||||
Node::Search(SearchNode::UnqualifiedText("bar".to_string()))
|
||||
),
|
||||
"deck:* OR bar",
|
||||
);
|
||||
assert_eq!(
|
||||
concatenate_searches(
|
||||
BoolSeparator::Or,
|
||||
vec![],
|
||||
Node::Search(SearchNode::UnqualifiedText("bar".to_string()))
|
||||
),
|
||||
"bar",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replacing() -> Result<()> {
|
||||
assert_eq!(
|
||||
|
|
Loading…
Reference in a new issue