mirror of
https://github.com/ankitects/anki.git
synced 2025-09-20 06:52:21 -04:00
switch search parser to using owned values
I was a bit too enthusiastic with using borrowed values in structs earlier on in the Rust porting. In this case any performance gains are dwarfed by the cost of querying the DB, and using owned values here simplifies the code, and will make it easier to parse a fragment in the From<SearchTerm> impl.
This commit is contained in:
parent
8852359fa9
commit
242b4ea505
5 changed files with 52 additions and 59 deletions
|
@ -296,41 +296,32 @@ impl From<pb::DeckConfigId> for DeckConfID {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<pb::SearchTerm> for Node<'_> {
|
impl From<pb::SearchTerm> for Node {
|
||||||
fn from(msg: pb::SearchTerm) -> Self {
|
fn from(msg: pb::SearchTerm) -> Self {
|
||||||
use pb::search_term::group::Operator;
|
use pb::search_term::group::Operator;
|
||||||
use pb::search_term::Filter;
|
use pb::search_term::Filter;
|
||||||
use pb::search_term::Flag;
|
use pb::search_term::Flag;
|
||||||
if let Some(filter) = msg.filter {
|
if let Some(filter) = msg.filter {
|
||||||
match filter {
|
match filter {
|
||||||
Filter::Tag(s) => Node::Search(SearchNode::Tag(
|
Filter::Tag(s) => Node::Search(SearchNode::Tag(escape_anki_wildcards(&s))),
|
||||||
escape_anki_wildcards(&s).into_owned().into(),
|
Filter::Deck(s) => Node::Search(SearchNode::Deck(if s == "*" {
|
||||||
)),
|
|
||||||
Filter::Deck(s) => Node::Search(SearchNode::Deck(
|
|
||||||
if s == "*" {
|
|
||||||
s
|
s
|
||||||
} else {
|
} else {
|
||||||
escape_anki_wildcards(&s).into_owned()
|
escape_anki_wildcards(&s)
|
||||||
}
|
})),
|
||||||
.into(),
|
Filter::Note(s) => Node::Search(SearchNode::NoteType(escape_anki_wildcards(&s))),
|
||||||
)),
|
|
||||||
Filter::Note(s) => Node::Search(SearchNode::NoteType(
|
|
||||||
escape_anki_wildcards(&s).into_owned().into(),
|
|
||||||
)),
|
|
||||||
Filter::Template(u) => {
|
Filter::Template(u) => {
|
||||||
Node::Search(SearchNode::CardTemplate(TemplateKind::Ordinal(u as u16)))
|
Node::Search(SearchNode::CardTemplate(TemplateKind::Ordinal(u as u16)))
|
||||||
}
|
}
|
||||||
Filter::Nid(nid) => Node::Search(SearchNode::NoteIDs(nid.to_string().into())),
|
Filter::Nid(nid) => Node::Search(SearchNode::NoteIDs(nid.to_string())),
|
||||||
Filter::Nids(nids) => {
|
Filter::Nids(nids) => Node::Search(SearchNode::NoteIDs(nids.into_id_string())),
|
||||||
Node::Search(SearchNode::NoteIDs(nids.into_id_string().into()))
|
|
||||||
}
|
|
||||||
Filter::Dupe(dupe) => Node::Search(SearchNode::Duplicates {
|
Filter::Dupe(dupe) => Node::Search(SearchNode::Duplicates {
|
||||||
note_type_id: dupe.notetype_id.into(),
|
note_type_id: dupe.notetype_id.into(),
|
||||||
text: dupe.first_field.into(),
|
text: dupe.first_field,
|
||||||
}),
|
}),
|
||||||
Filter::FieldName(s) => Node::Search(SearchNode::SingleField {
|
Filter::FieldName(s) => Node::Search(SearchNode::SingleField {
|
||||||
field: escape_anki_wildcards(&s).into_owned().into(),
|
field: escape_anki_wildcards(&s),
|
||||||
text: "*".to_string().into(),
|
text: "*".to_string(),
|
||||||
is_re: false,
|
is_re: false,
|
||||||
}),
|
}),
|
||||||
Filter::Rated(rated) => Node::Search(SearchNode::Rated {
|
Filter::Rated(rated) => Node::Search(SearchNode::Rated {
|
||||||
|
|
|
@ -17,7 +17,6 @@ use nom::{
|
||||||
sequence::{preceded, separated_pair},
|
sequence::{preceded, separated_pair},
|
||||||
};
|
};
|
||||||
use regex::{Captures, Regex};
|
use regex::{Captures, Regex};
|
||||||
use std::borrow::Cow;
|
|
||||||
|
|
||||||
type IResult<'a, O> = std::result::Result<(&'a str, O), nom::Err<ParseError<'a>>>;
|
type IResult<'a, O> = std::result::Result<(&'a str, O), nom::Err<ParseError<'a>>>;
|
||||||
type ParseResult<'a, O> = std::result::Result<O, nom::Err<ParseError<'a>>>;
|
type ParseResult<'a, O> = std::result::Result<O, nom::Err<ParseError<'a>>>;
|
||||||
|
@ -31,52 +30,52 @@ fn parse_error(input: &str) -> nom::Err<ParseError<'_>> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub enum Node<'a> {
|
pub enum Node {
|
||||||
And,
|
And,
|
||||||
Or,
|
Or,
|
||||||
Not(Box<Node<'a>>),
|
Not(Box<Node>),
|
||||||
Group(Vec<Node<'a>>),
|
Group(Vec<Node>),
|
||||||
Search(SearchNode<'a>),
|
Search(SearchNode),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub enum SearchNode<'a> {
|
pub enum SearchNode {
|
||||||
// text without a colon
|
// text without a colon
|
||||||
UnqualifiedText(Cow<'a, str>),
|
UnqualifiedText(String),
|
||||||
// foo:bar, where foo doesn't match a term below
|
// foo:bar, where foo doesn't match a term below
|
||||||
SingleField {
|
SingleField {
|
||||||
field: Cow<'a, str>,
|
field: String,
|
||||||
text: Cow<'a, str>,
|
text: String,
|
||||||
is_re: bool,
|
is_re: bool,
|
||||||
},
|
},
|
||||||
AddedInDays(u32),
|
AddedInDays(u32),
|
||||||
EditedInDays(u32),
|
EditedInDays(u32),
|
||||||
CardTemplate(TemplateKind<'a>),
|
CardTemplate(TemplateKind),
|
||||||
Deck(Cow<'a, str>),
|
Deck(String),
|
||||||
DeckID(DeckID),
|
DeckID(DeckID),
|
||||||
NoteTypeID(NoteTypeID),
|
NoteTypeID(NoteTypeID),
|
||||||
NoteType(Cow<'a, str>),
|
NoteType(String),
|
||||||
Rated {
|
Rated {
|
||||||
days: u32,
|
days: u32,
|
||||||
ease: RatingKind,
|
ease: RatingKind,
|
||||||
},
|
},
|
||||||
Tag(Cow<'a, str>),
|
Tag(String),
|
||||||
Duplicates {
|
Duplicates {
|
||||||
note_type_id: NoteTypeID,
|
note_type_id: NoteTypeID,
|
||||||
text: Cow<'a, str>,
|
text: String,
|
||||||
},
|
},
|
||||||
State(StateKind),
|
State(StateKind),
|
||||||
Flag(u8),
|
Flag(u8),
|
||||||
NoteIDs(Cow<'a, str>),
|
NoteIDs(String),
|
||||||
CardIDs(&'a str),
|
CardIDs(String),
|
||||||
Property {
|
Property {
|
||||||
operator: String,
|
operator: String,
|
||||||
kind: PropertyKind,
|
kind: PropertyKind,
|
||||||
},
|
},
|
||||||
WholeCollection,
|
WholeCollection,
|
||||||
Regex(Cow<'a, str>),
|
Regex(String),
|
||||||
NoCombining(Cow<'a, str>),
|
NoCombining(String),
|
||||||
WordBoundary(Cow<'a, str>),
|
WordBoundary(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
@ -103,9 +102,9 @@ pub enum StateKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub enum TemplateKind<'a> {
|
pub enum TemplateKind {
|
||||||
Ordinal(u16),
|
Ordinal(u16),
|
||||||
Name(Cow<'a, str>),
|
Name(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
@ -303,7 +302,7 @@ fn search_node_for_text(s: &str) -> ParseResult<SearchNode> {
|
||||||
fn search_node_for_text_with_argument<'a>(
|
fn search_node_for_text_with_argument<'a>(
|
||||||
key: &'a str,
|
key: &'a str,
|
||||||
val: &'a str,
|
val: &'a str,
|
||||||
) -> ParseResult<'a, SearchNode<'a>> {
|
) -> ParseResult<'a, SearchNode> {
|
||||||
Ok(match key.to_ascii_lowercase().as_str() {
|
Ok(match key.to_ascii_lowercase().as_str() {
|
||||||
"deck" => SearchNode::Deck(unescape(val)?),
|
"deck" => SearchNode::Deck(unescape(val)?),
|
||||||
"note" => SearchNode::NoteType(unescape(val)?),
|
"note" => SearchNode::NoteType(unescape(val)?),
|
||||||
|
@ -319,7 +318,7 @@ fn search_node_for_text_with_argument<'a>(
|
||||||
"did" => parse_did(val)?,
|
"did" => parse_did(val)?,
|
||||||
"mid" => parse_mid(val)?,
|
"mid" => parse_mid(val)?,
|
||||||
"nid" => SearchNode::NoteIDs(check_id_list(val, key)?.into()),
|
"nid" => SearchNode::NoteIDs(check_id_list(val, key)?.into()),
|
||||||
"cid" => SearchNode::CardIDs(check_id_list(val, key)?),
|
"cid" => SearchNode::CardIDs(check_id_list(val, key)?.into()),
|
||||||
"re" => SearchNode::Regex(unescape_quotes(val)),
|
"re" => SearchNode::Regex(unescape_quotes(val)),
|
||||||
"nc" => SearchNode::NoCombining(unescape(val)?),
|
"nc" => SearchNode::NoCombining(unescape(val)?),
|
||||||
"w" => SearchNode::WordBoundary(unescape(val)?),
|
"w" => SearchNode::WordBoundary(unescape(val)?),
|
||||||
|
@ -579,7 +578,7 @@ fn parse_dupe(s: &str) -> ParseResult<SearchNode> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_single_field<'a>(key: &'a str, val: &'a str) -> ParseResult<'a, SearchNode<'a>> {
|
fn parse_single_field<'a>(key: &'a str, val: &'a str) -> ParseResult<'a, SearchNode> {
|
||||||
Ok(if let Some(stripped) = val.strip_prefix("re:") {
|
Ok(if let Some(stripped) = val.strip_prefix("re:") {
|
||||||
SearchNode::SingleField {
|
SearchNode::SingleField {
|
||||||
field: unescape(key)?,
|
field: unescape(key)?,
|
||||||
|
@ -596,25 +595,25 @@ fn parse_single_field<'a>(key: &'a str, val: &'a str) -> ParseResult<'a, SearchN
|
||||||
}
|
}
|
||||||
|
|
||||||
/// For strings without unescaped ", convert \" to "
|
/// For strings without unescaped ", convert \" to "
|
||||||
fn unescape_quotes(s: &str) -> Cow<str> {
|
fn unescape_quotes(s: &str) -> String {
|
||||||
if s.contains('"') {
|
if s.contains('"') {
|
||||||
s.replace(r#"\""#, "\"").into()
|
s.replace(r#"\""#, "\"")
|
||||||
} else {
|
} else {
|
||||||
s.into()
|
s.into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// For non-globs like dupe text without any assumption about the content
|
/// For non-globs like dupe text without any assumption about the content
|
||||||
fn unescape_quotes_and_backslashes(s: &str) -> Cow<str> {
|
fn unescape_quotes_and_backslashes(s: &str) -> String {
|
||||||
if s.contains('"') || s.contains('\\') {
|
if s.contains('"') || s.contains('\\') {
|
||||||
s.replace(r#"\""#, "\"").replace(r"\\", r"\").into()
|
s.replace(r#"\""#, "\"").replace(r"\\", r"\")
|
||||||
} else {
|
} else {
|
||||||
s.into()
|
s.into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Unescape chars with special meaning to the parser.
|
/// Unescape chars with special meaning to the parser.
|
||||||
fn unescape(txt: &str) -> ParseResult<Cow<str>> {
|
fn unescape(txt: &str) -> ParseResult<String> {
|
||||||
if let Some(seq) = invalid_escape_sequence(txt) {
|
if let Some(seq) = invalid_escape_sequence(txt) {
|
||||||
Err(parse_failure(txt, FailKind::UnknownEscape(seq)))
|
Err(parse_failure(txt, FailKind::UnknownEscape(seq)))
|
||||||
} else {
|
} else {
|
||||||
|
@ -631,6 +630,7 @@ fn unescape(txt: &str) -> ParseResult<Cow<str>> {
|
||||||
r"\-" => "-",
|
r"\-" => "-",
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
})
|
})
|
||||||
|
.into()
|
||||||
} else {
|
} else {
|
||||||
txt.into()
|
txt.into()
|
||||||
})
|
})
|
||||||
|
|
|
@ -134,7 +134,9 @@ impl SqlWriter<'_> {
|
||||||
SearchNode::EditedInDays(days) => self.write_edited(*days)?,
|
SearchNode::EditedInDays(days) => self.write_edited(*days)?,
|
||||||
SearchNode::CardTemplate(template) => match template {
|
SearchNode::CardTemplate(template) => match template {
|
||||||
TemplateKind::Ordinal(_) => self.write_template(template)?,
|
TemplateKind::Ordinal(_) => self.write_template(template)?,
|
||||||
TemplateKind::Name(name) => self.write_template(&TemplateKind::Name(norm(name)))?,
|
TemplateKind::Name(name) => {
|
||||||
|
self.write_template(&TemplateKind::Name(norm(name).into()))?
|
||||||
|
}
|
||||||
},
|
},
|
||||||
SearchNode::Deck(deck) => self.write_deck(&norm(deck))?,
|
SearchNode::Deck(deck) => self.write_deck(&norm(deck))?,
|
||||||
SearchNode::NoteTypeID(ntid) => {
|
SearchNode::NoteTypeID(ntid) => {
|
||||||
|
@ -532,7 +534,7 @@ impl RequiredTable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Node<'_> {
|
impl Node {
|
||||||
fn required_table(&self) -> RequiredTable {
|
fn required_table(&self) -> RequiredTable {
|
||||||
match self {
|
match self {
|
||||||
Node::And => RequiredTable::CardsOrNotes,
|
Node::And => RequiredTable::CardsOrNotes,
|
||||||
|
@ -546,7 +548,7 @@ impl Node<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SearchNode<'_> {
|
impl SearchNode {
|
||||||
fn required_table(&self) -> RequiredTable {
|
fn required_table(&self) -> RequiredTable {
|
||||||
match self {
|
match self {
|
||||||
SearchNode::AddedInDays(_) => RequiredTable::Cards,
|
SearchNode::AddedInDays(_) => RequiredTable::Cards,
|
||||||
|
|
|
@ -67,8 +67,8 @@ pub fn replace_search_term(search: &str, replacement: &str) -> Result<String> {
|
||||||
let mut nodes = parse(search)?;
|
let mut nodes = parse(search)?;
|
||||||
let new = parse(replacement)?;
|
let new = parse(replacement)?;
|
||||||
if let [Node::Search(search_node)] = &new[..] {
|
if let [Node::Search(search_node)] = &new[..] {
|
||||||
fn update_node_vec<'a>(old_nodes: &mut [Node<'a>], new_node: &SearchNode<'a>) {
|
fn update_node_vec(old_nodes: &mut [Node], new_node: &SearchNode) {
|
||||||
fn update_node<'a>(old_node: &mut Node<'a>, new_node: &SearchNode<'a>) {
|
fn update_node(old_node: &mut Node, new_node: &SearchNode) {
|
||||||
match old_node {
|
match old_node {
|
||||||
Node::Not(n) => update_node(n, new_node),
|
Node::Not(n) => update_node(n, new_node),
|
||||||
Node::Group(ns) => update_node_vec(ns, new_node),
|
Node::Group(ns) => update_node_vec(ns, new_node),
|
||||||
|
@ -89,7 +89,7 @@ pub fn replace_search_term(search: &str, replacement: &str) -> Result<String> {
|
||||||
|
|
||||||
pub fn write_nodes<'a, I>(nodes: I) -> String
|
pub fn write_nodes<'a, I>(nodes: I) -> String
|
||||||
where
|
where
|
||||||
I: IntoIterator<Item = &'a Node<'a>>,
|
I: IntoIterator<Item = &'a Node>,
|
||||||
{
|
{
|
||||||
nodes.into_iter().map(|node| write_node(node)).collect()
|
nodes.into_iter().map(|node| write_node(node)).collect()
|
||||||
}
|
}
|
||||||
|
|
|
@ -336,11 +336,11 @@ pub(crate) fn to_text(txt: &str) -> Cow<str> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Escape Anki wildcards and the backslash for escaping them: \*_
|
/// Escape Anki wildcards and the backslash for escaping them: \*_
|
||||||
pub(crate) fn escape_anki_wildcards(txt: &str) -> Cow<str> {
|
pub(crate) fn escape_anki_wildcards(txt: &str) -> String {
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref RE: Regex = Regex::new(r"[\\*_]").unwrap();
|
static ref RE: Regex = Regex::new(r"[\\*_]").unwrap();
|
||||||
}
|
}
|
||||||
RE.replace_all(&txt, r"\$0")
|
RE.replace_all(&txt, r"\$0").into()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Compare text with a possible glob, folding case.
|
/// Compare text with a possible glob, folding case.
|
||||||
|
|
Loading…
Reference in a new issue