mirror of
https://github.com/ankitects/anki.git
synced 2025-09-25 01:06:35 -04:00
handle empty searches and leading/trailing whitespace
This commit is contained in:
parent
67cb27bada
commit
224bad2566
2 changed files with 29 additions and 2 deletions
|
@ -76,6 +76,7 @@ pub(super) enum SearchNode<'a> {
|
||||||
operator: String,
|
operator: String,
|
||||||
kind: PropertyKind,
|
kind: PropertyKind,
|
||||||
},
|
},
|
||||||
|
WholeCollection,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
|
@ -106,6 +107,11 @@ pub(super) enum TemplateKind {
|
||||||
/// Parse the input string into a list of nodes.
|
/// Parse the input string into a list of nodes.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub(super) fn parse(input: &str) -> Result<Vec<Node>> {
|
pub(super) fn parse(input: &str) -> Result<Vec<Node>> {
|
||||||
|
let input = input.trim();
|
||||||
|
if input.is_empty() {
|
||||||
|
return Ok(vec![Node::Search(SearchNode::WholeCollection)]);
|
||||||
|
}
|
||||||
|
|
||||||
let (_, nodes) = all_consuming(group_inner)(input)
|
let (_, nodes) = all_consuming(group_inner)(input)
|
||||||
.map_err(|_e| AnkiError::invalid_input("unable to parse search"))?;
|
.map_err(|_e| AnkiError::invalid_input("unable to parse search"))?;
|
||||||
Ok(nodes)
|
Ok(nodes)
|
||||||
|
@ -150,12 +156,19 @@ fn group_inner(input: &str) -> IResult<&str, Vec<Node>> {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if nodes.is_empty() {
|
||||||
|
Err(nom::Err::Error((remaining, nom::error::ErrorKind::Many1)))
|
||||||
|
} else {
|
||||||
Ok((remaining, nodes))
|
Ok((remaining, nodes))
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn whitespace0(s: &str) -> IResult<&str, Vec<char>> {
|
||||||
|
many0(one_of(" \u{3000}"))(s)
|
||||||
|
}
|
||||||
|
|
||||||
/// Optional leading space, then a (negated) group or text
|
/// Optional leading space, then a (negated) group or text
|
||||||
fn node(s: &str) -> IResult<&str, Node> {
|
fn node(s: &str) -> IResult<&str, Node> {
|
||||||
let whitespace0 = many0(one_of(" \u{3000}"));
|
|
||||||
preceded(whitespace0, alt((negated_node, group, text)))(s)
|
preceded(whitespace0, alt((negated_node, group, text)))(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -374,6 +387,19 @@ mod test {
|
||||||
use Node::*;
|
use Node::*;
|
||||||
use SearchNode::*;
|
use SearchNode::*;
|
||||||
|
|
||||||
|
assert_eq!(parse("")?, vec![Search(SearchNode::WholeCollection)]);
|
||||||
|
assert_eq!(parse(" ")?, vec![Search(SearchNode::WholeCollection)]);
|
||||||
|
|
||||||
|
// leading/trailing/interspersed whitespace
|
||||||
|
assert_eq!(
|
||||||
|
parse(" t t2 ")?,
|
||||||
|
vec![
|
||||||
|
Search(UnqualifiedText("t".into())),
|
||||||
|
And,
|
||||||
|
Search(UnqualifiedText("t2".into()))
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
parse(r#"hello -(world and "foo:bar baz") OR test"#)?,
|
parse(r#"hello -(world and "foo:bar baz") OR test"#)?,
|
||||||
vec![
|
vec![
|
||||||
|
|
|
@ -79,6 +79,7 @@ impl SqlWriter<'_, '_> {
|
||||||
write!(self.sql, "c.id in ({})", cids).unwrap();
|
write!(self.sql, "c.id in ({})", cids).unwrap();
|
||||||
}
|
}
|
||||||
SearchNode::Property { operator, kind } => self.write_prop(operator, kind)?,
|
SearchNode::Property { operator, kind } => self.write_prop(operator, kind)?,
|
||||||
|
SearchNode::WholeCollection => write!(self.sql, "true").unwrap(),
|
||||||
};
|
};
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue