From bca5f2ddffb022b34d92680837e29c8a2007fc67 Mon Sep 17 00:00:00 2001 From: Damien Elmes Date: Fri, 20 Mar 2020 08:20:47 +1000 Subject: [PATCH] prototype of integration no ordering yet, and no tests --- proto/backend.proto | 11 ++ pylib/anki/rsbackend.py | 5 + rslib/src/backend/mod.rs | 11 ++ rslib/src/search/cards.rs | 28 +++ rslib/src/search/mod.rs | 5 +- rslib/src/search/parser.rs | 10 +- rslib/src/search/searcher.rs | 361 ---------------------------------- rslib/src/search/sqlwriter.rs | 361 ++++++++++++++++++++++++++++++++++ 8 files changed, 426 insertions(+), 366 deletions(-) create mode 100644 rslib/src/search/cards.rs delete mode 100644 rslib/src/search/searcher.rs create mode 100644 rslib/src/search/sqlwriter.rs diff --git a/proto/backend.proto b/proto/backend.proto index 18b6c174c..638d9bc57 100644 --- a/proto/backend.proto +++ b/proto/backend.proto @@ -43,6 +43,7 @@ message BackendInput { Empty restore_trash = 35; OpenCollectionIn open_collection = 36; Empty close_collection = 37; + SearchCardsIn search_cards = 38; } } @@ -73,6 +74,7 @@ message BackendOutput { Empty restore_trash = 35; Empty open_collection = 36; Empty close_collection = 37; + SearchCardsOut search_cards = 38; BackendError error = 2047; } @@ -332,3 +334,12 @@ message OpenCollectionIn { string media_db_path = 3; string log_path = 4; } + +message SearchCardsIn { + string search = 1; +} + +message SearchCardsOut { + repeated int64 card_ids = 1; + +} diff --git a/pylib/anki/rsbackend.py b/pylib/anki/rsbackend.py index a77e0f367..eb3f69d63 100644 --- a/pylib/anki/rsbackend.py +++ b/pylib/anki/rsbackend.py @@ -423,6 +423,11 @@ class RustBackend: def _db_command(self, input: Dict[str, Any]) -> Any: return orjson.loads(self._backend.db_command(orjson.dumps(input))) + def search_cards(self, search: str) -> Sequence[int]: + return self._run_command( + pb.BackendInput(search_cards=pb.SearchCardsIn(search=search)) + ).search_cards.card_ids + def translate_string_in( key: TR, **kwargs: Union[str, int, float] diff --git a/rslib/src/backend/mod.rs b/rslib/src/backend/mod.rs index 84d40129e..8640be245 100644 --- a/rslib/src/backend/mod.rs +++ b/rslib/src/backend/mod.rs @@ -14,6 +14,7 @@ use crate::media::sync::MediaSyncProgress; use crate::media::MediaManager; use crate::sched::cutoff::{local_minutes_west_for_stamp, sched_timing_today_v2_new}; use crate::sched::timespan::{answer_button_time, learning_congrats, studied_today, time_span}; +use crate::search::search_cards; use crate::template::{ render_card, without_legacy_template_directives, FieldMap, FieldRequirements, ParsedTemplate, RenderedNode, @@ -246,6 +247,7 @@ impl Backend { self.close_collection()?; OValue::CloseCollection(Empty {}) } + Value::SearchCards(input) => OValue::SearchCards(self.search_cards(input)?), }) } @@ -577,6 +579,15 @@ impl Backend { pub fn db_command(&self, input: &[u8]) -> Result { self.with_col(|col| col.with_ctx(|ctx| db_command_bytes(&ctx.storage, input))) } + + fn search_cards(&self, input: pb::SearchCardsIn) -> Result { + self.with_col(|col| { + col.with_ctx(|ctx| { + let cids = search_cards(ctx, &input.search)?; + Ok(pb::SearchCardsOut { card_ids: cids }) + }) + }) + } } fn translate_arg_to_fluent_val(arg: &pb::TranslateArgValue) -> FluentValue { diff --git a/rslib/src/search/cards.rs b/rslib/src/search/cards.rs new file mode 100644 index 000000000..00a3f21e9 --- /dev/null +++ b/rslib/src/search/cards.rs @@ -0,0 +1,28 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use super::{parser::Node, sqlwriter::node_to_sql}; +use crate::collection::RequestContext; +use crate::err::Result; +use crate::search::parser::parse; +use crate::types::ObjID; + +pub(crate) fn search_cards<'a, 'b>( + req: &'a mut RequestContext<'b>, + search: &'a str, +) -> Result> { + let top_node = Node::Group(parse(search)?); + let (sql, args) = node_to_sql(req, &top_node)?; + + let sql = format!( + "select c.id from cards c, notes n where c.nid=n.id and {} order by c.id", + sql + ); + let mut stmt = req.storage.db.prepare(&sql)?; + let ids: Vec = stmt + .query_map(&args, |row| row.get(0))? + .collect::>()?; + + println!("sql {}\nargs {:?} count {}", sql, args, ids.len()); + Ok(ids) +} diff --git a/rslib/src/search/mod.rs b/rslib/src/search/mod.rs index 2732ec547..417241051 100644 --- a/rslib/src/search/mod.rs +++ b/rslib/src/search/mod.rs @@ -1,2 +1,5 @@ +mod cards; mod parser; -mod searcher; +mod sqlwriter; + +pub(crate) use cards::search_cards; diff --git a/rslib/src/search/parser.rs b/rslib/src/search/parser.rs index 62824fe9f..d60112a2d 100644 --- a/rslib/src/search/parser.rs +++ b/rslib/src/search/parser.rs @@ -1,6 +1,7 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use crate::err::{AnkiError, Result}; use crate::types::ObjID; use nom::branch::alt; use nom::bytes::complete::{escaped, is_not, tag, take_while1}; @@ -33,7 +34,7 @@ impl From> for ParseError { } } -type ParseResult = Result; +type ParseResult = std::result::Result; #[derive(Debug, PartialEq)] pub(super) enum Node<'a> { @@ -104,8 +105,9 @@ pub(super) enum TemplateKind { /// Parse the input string into a list of nodes. #[allow(dead_code)] -pub(super) fn parse(input: &str) -> std::result::Result, String> { - let (_, nodes) = all_consuming(group_inner)(input).map_err(|e| format!("{:?}", e))?; +pub(super) fn parse(input: &str) -> Result> { + let (_, nodes) = all_consuming(group_inner)(input) + .map_err(|_e| AnkiError::invalid_input("unable to parse search"))?; Ok(nodes) } @@ -368,7 +370,7 @@ mod test { use super::*; #[test] - fn parsing() -> Result<(), String> { + fn parsing() -> Result<()> { use Node::*; use SearchNode::*; diff --git a/rslib/src/search/searcher.rs b/rslib/src/search/searcher.rs deleted file mode 100644 index 35229a21d..000000000 --- a/rslib/src/search/searcher.rs +++ /dev/null @@ -1,361 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use super::parser::{Node, PropertyKind, SearchNode, StateKind, TemplateKind}; -use crate::card::CardQueue; -use crate::decks::child_ids; -use crate::decks::get_deck; -use crate::err::{AnkiError, Result}; -use crate::notes::field_checksum; -use crate::text::matches_wildcard; -use crate::{ - collection::RequestContext, text::strip_html_preserving_image_filenames, types::ObjID, -}; -use rusqlite::types::ToSqlOutput; -use std::fmt::Write; - -struct SearchContext<'a> { - #[allow(dead_code)] - req: &'a mut RequestContext<'a>, - sql: String, - args: Vec>, -} - -#[allow(dead_code)] -fn node_to_sql<'a>( - req: &'a mut RequestContext<'a>, - node: &'a Node, -) -> Result<(String, Vec>)> { - let sql = String::new(); - let args = vec![]; - let mut sctx = SearchContext { req, sql, args }; - write_node_to_sql(&mut sctx, node)?; - Ok((sctx.sql, sctx.args)) -} - -fn write_node_to_sql(ctx: &mut SearchContext, node: &Node) -> Result<()> { - match node { - Node::And => write!(ctx.sql, " and ").unwrap(), - Node::Or => write!(ctx.sql, " or ").unwrap(), - Node::Not(node) => { - write!(ctx.sql, "not ").unwrap(); - write_node_to_sql(ctx, node)?; - } - Node::Group(nodes) => { - write!(ctx.sql, "(").unwrap(); - for node in nodes { - write_node_to_sql(ctx, node)?; - } - write!(ctx.sql, ")").unwrap(); - } - Node::Search(search) => write_search_node_to_sql(ctx, search)?, - }; - Ok(()) -} - -fn write_search_node_to_sql(ctx: &mut SearchContext, node: &SearchNode) -> Result<()> { - match node { - SearchNode::UnqualifiedText(text) => write_unqualified(ctx, text), - SearchNode::SingleField { field, text } => { - write_single_field(ctx, field.as_ref(), text.as_ref())? - } - SearchNode::AddedInDays(days) => { - write!(ctx.sql, "c.id > {}", days).unwrap(); - } - SearchNode::CardTemplate(template) => write_template(ctx, template)?, - SearchNode::Deck(deck) => write_deck(ctx, deck.as_ref())?, - SearchNode::NoteTypeID(ntid) => { - write!(ctx.sql, "n.mid = {}", ntid).unwrap(); - } - SearchNode::NoteType(notetype) => write_note_type(ctx, notetype.as_ref())?, - SearchNode::Rated { days, ease } => write_rated(ctx, *days, *ease)?, - SearchNode::Tag(tag) => write_tag(ctx, tag), - SearchNode::Duplicates { note_type_id, text } => write_dupes(ctx, *note_type_id, text), - SearchNode::State(state) => write_state(ctx, state)?, - SearchNode::Flag(flag) => { - write!(ctx.sql, "(c.flags & 7) == {}", flag).unwrap(); - } - SearchNode::NoteIDs(nids) => { - write!(ctx.sql, "n.id in ({})", nids).unwrap(); - } - SearchNode::CardIDs(cids) => { - write!(ctx.sql, "c.id in ({})", cids).unwrap(); - } - SearchNode::Property { operator, kind } => write_prop(ctx, operator, kind)?, - }; - Ok(()) -} - -fn write_unqualified(ctx: &mut SearchContext, text: &str) { - // implicitly wrap in % - let text = format!("%{}%", text); - write!( - ctx.sql, - "(n.sfld like ? escape '\\' or n.flds like ? escape '\\')" - ) - .unwrap(); - ctx.args.push(text.clone().into()); - ctx.args.push(text.into()); -} - -fn write_tag(ctx: &mut SearchContext, text: &str) { - if text == "none" { - write!(ctx.sql, "n.tags = ''").unwrap(); - return; - } - - let tag = format!(" %{}% ", text.replace('*', "%")); - write!(ctx.sql, "n.tags like ?").unwrap(); - ctx.args.push(tag.into()); -} - -fn write_rated(ctx: &mut SearchContext, days: u32, ease: Option) -> Result<()> { - let today_cutoff = ctx.req.storage.timing_today()?.next_day_at; - let days = days.min(31) as i64; - let target_cutoff = today_cutoff - 86_400 * days; - write!( - ctx.sql, - "c.id in (select cid from revlog where id>{}", - target_cutoff - ) - .unwrap(); - if let Some(ease) = ease { - write!(ctx.sql, "and ease={})", ease).unwrap(); - } else { - write!(ctx.sql, ")").unwrap(); - } - - Ok(()) -} - -fn write_prop(ctx: &mut SearchContext, op: &str, kind: &PropertyKind) -> Result<()> { - let timing = ctx.req.storage.timing_today()?; - match kind { - PropertyKind::Due(days) => { - let day = days + (timing.days_elapsed as i32); - write!( - ctx.sql, - "(c.queue in ({rev},{daylrn}) and due {op} {day})", - rev = CardQueue::Review as u8, - daylrn = CardQueue::DayLearn as u8, - op = op, - day = day - ) - } - PropertyKind::Interval(ivl) => write!(ctx.sql, "ivl {} {}", op, ivl), - PropertyKind::Reps(reps) => write!(ctx.sql, "reps {} {}", op, reps), - PropertyKind::Lapses(days) => write!(ctx.sql, "lapses {} {}", op, days), - PropertyKind::Ease(ease) => write!(ctx.sql, "ease {} {}", op, (ease * 1000.0) as u32), - } - .unwrap(); - Ok(()) -} - -fn write_state(ctx: &mut SearchContext, state: &StateKind) -> Result<()> { - let timing = ctx.req.storage.timing_today()?; - match state { - StateKind::New => write!(ctx.sql, "c.queue = {}", CardQueue::New as u8), - StateKind::Review => write!(ctx.sql, "c.queue = {}", CardQueue::Review as u8), - StateKind::Learning => write!( - ctx.sql, - "c.queue in ({},{})", - CardQueue::Learn as u8, - CardQueue::DayLearn as u8 - ), - StateKind::Buried => write!( - ctx.sql, - "c.queue in ({},{})", - CardQueue::SchedBuried as u8, - CardQueue::UserBuried as u8 - ), - StateKind::Suspended => write!(ctx.sql, "c.queue = {}", CardQueue::Suspended as u8), - StateKind::Due => write!( - ctx.sql, - " -(c.queue in ({rev},{daylrn}) and c.due <= {today}) or -(c.queue = {lrn} and c.due <= {daycutoff})", - rev = CardQueue::Review as u8, - daylrn = CardQueue::DayLearn as u8, - today = timing.days_elapsed, - lrn = CardQueue::Learn as u8, - daycutoff = timing.next_day_at, - ), - } - .unwrap(); - Ok(()) -} - -fn write_deck(ctx: &mut SearchContext, deck: &str) -> Result<()> { - match deck { - "*" => write!(ctx.sql, "true").unwrap(), - "filtered" => write!(ctx.sql, "c.odid > 0").unwrap(), - deck => { - let all_decks = ctx.req.storage.all_decks()?; - let dids_with_children = if deck == "current" { - let config = ctx.req.storage.all_config()?; - let mut dids_with_children = vec![config.current_deck_id]; - let current = get_deck(&all_decks, config.current_deck_id) - .ok_or_else(|| AnkiError::invalid_input("invalid current deck"))?; - for child_did in child_ids(&all_decks, ¤t.name) { - dids_with_children.push(child_did); - } - dids_with_children - } else { - let mut dids_with_children = vec![]; - for deck in all_decks.iter().filter(|d| matches_wildcard(&d.name, deck)) { - dids_with_children.push(deck.id); - for child_id in child_ids(&all_decks, &deck.name) { - dids_with_children.push(child_id); - } - } - dids_with_children - }; - - ctx.sql.push_str("c.did in "); - ids_to_string(&mut ctx.sql, &dids_with_children); - } - }; - Ok(()) -} - -fn write_template(ctx: &mut SearchContext, template: &TemplateKind) -> Result<()> { - match template { - TemplateKind::Ordinal(n) => { - write!(ctx.sql, "c.ord = {}", n).unwrap(); - } - TemplateKind::Name(name) => { - let note_types = ctx.req.storage.all_note_types()?; - let mut id_ords = vec![]; - for nt in note_types.values() { - for tmpl in &nt.templates { - if matches_wildcard(&tmpl.name, name) { - id_ords.push(format!("(n.mid = {} and c.ord = {})", nt.id, tmpl.ord)); - } - } - } - - if id_ords.is_empty() { - ctx.sql.push_str("false"); - } else { - write!(ctx.sql, "({})", id_ords.join(",")).unwrap(); - } - } - }; - Ok(()) -} - -fn write_note_type(ctx: &mut SearchContext, nt_name: &str) -> Result<()> { - let ntids: Vec<_> = ctx - .req - .storage - .all_note_types()? - .values() - .filter(|nt| matches_wildcard(&nt.name, nt_name)) - .map(|nt| nt.id) - .collect(); - ctx.sql.push_str("n.mid in "); - ids_to_string(&mut ctx.sql, &ntids); - Ok(()) -} - -fn write_single_field(ctx: &mut SearchContext, field_name: &str, val: &str) -> Result<()> { - let note_types = ctx.req.storage.all_note_types()?; - - let mut field_map = vec![]; - for nt in note_types.values() { - for field in &nt.fields { - if field.name.eq_ignore_ascii_case(field_name) { - field_map.push((nt.id, field.ord)); - } - } - } - - if field_map.is_empty() { - write!(ctx.sql, "false").unwrap(); - return Ok(()); - } - - write!(ctx.sql, "(").unwrap(); - ctx.args.push(val.to_string().into()); - let arg_idx = ctx.args.len(); - for (ntid, ord) in field_map { - write!( - ctx.sql, - "(n.mid = {} and field_at_index(n.flds, {}) like ?{})", - ntid, ord, arg_idx - ) - .unwrap(); - } - write!(ctx.sql, ")").unwrap(); - - Ok(()) -} - -fn write_dupes(ctx: &mut SearchContext, ntid: ObjID, text: &str) { - let text_nohtml = strip_html_preserving_image_filenames(text); - let csum = field_checksum(text_nohtml.as_ref()); - write!( - ctx.sql, - "(n.mid = {} and n.csum = {} and field_at_index(n.flds, 0) = ?", - ntid, csum - ) - .unwrap(); - ctx.args.push(text.to_string().into()) -} - -// Write a list of IDs as '(x,y,...)' into the provided string. -fn ids_to_string(buf: &mut String, ids: &[T]) -where - T: std::fmt::Display, -{ - buf.push('('); - if !ids.is_empty() { - for id in ids.iter().skip(1) { - write!(buf, "{},", id).unwrap(); - } - write!(buf, "{}", ids[0]).unwrap(); - } - buf.push(')'); -} - -#[cfg(test)] -mod test { - use super::ids_to_string; - - #[test] - fn ids_string() { - let mut s = String::new(); - ids_to_string::(&mut s, &[]); - assert_eq!(s, "()"); - s.clear(); - ids_to_string(&mut s, &[7]); - assert_eq!(s, "(7)"); - s.clear(); - ids_to_string(&mut s, &[7, 6]); - assert_eq!(s, "(6,7)"); - s.clear(); - ids_to_string(&mut s, &[7, 6, 5]); - assert_eq!(s, "(6,5,7)"); - s.clear(); - } - - // use super::super::parser::parse; - // use super::*; - - // parse - // fn p(search: &str) -> Node { - // Node::Group(parse(search).unwrap()) - // } - - // get sql - // fn s<'a>(n: &'a Node) -> (String, Vec>) { - // node_to_sql(n) - // } - - #[test] - fn tosql() -> Result<(), String> { - // assert_eq!(s(&p("added:1")), ("(c.id > 1)".into(), vec![])); - - Ok(()) - } -} diff --git a/rslib/src/search/sqlwriter.rs b/rslib/src/search/sqlwriter.rs new file mode 100644 index 000000000..c7a0ea671 --- /dev/null +++ b/rslib/src/search/sqlwriter.rs @@ -0,0 +1,361 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use super::parser::{Node, PropertyKind, SearchNode, StateKind, TemplateKind}; +use crate::card::CardQueue; +use crate::decks::child_ids; +use crate::decks::get_deck; +use crate::err::{AnkiError, Result}; +use crate::notes::field_checksum; +use crate::text::matches_wildcard; +use crate::{ + collection::RequestContext, text::strip_html_preserving_image_filenames, types::ObjID, +}; +use std::fmt::Write; + +struct SqlWriter<'a, 'b> { + req: &'a mut RequestContext<'b>, + sql: String, + args: Vec, +} + +pub(super) fn node_to_sql(req: &mut RequestContext, node: &Node) -> Result<(String, Vec)> { + let mut sctx = SqlWriter::new(req); + sctx.write_node_to_sql(&node)?; + Ok((sctx.sql, sctx.args)) +} + +impl SqlWriter<'_, '_> { + fn new<'a, 'b>(req: &'a mut RequestContext<'b>) -> SqlWriter<'a, 'b> { + let sql = String::new(); + let args = vec![]; + SqlWriter { req, sql, args } + } + + fn write_node_to_sql(&mut self, node: &Node) -> Result<()> { + match node { + Node::And => write!(self.sql, " and ").unwrap(), + Node::Or => write!(self.sql, " or ").unwrap(), + Node::Not(node) => { + write!(self.sql, "not ").unwrap(); + self.write_node_to_sql(node)?; + } + Node::Group(nodes) => { + write!(self.sql, "(").unwrap(); + for node in nodes { + self.write_node_to_sql(node)?; + } + write!(self.sql, ")").unwrap(); + } + Node::Search(search) => self.write_search_node_to_sql(search)?, + }; + Ok(()) + } + + fn write_search_node_to_sql(&mut self, node: &SearchNode) -> Result<()> { + match node { + SearchNode::UnqualifiedText(text) => self.write_unqualified(text), + SearchNode::SingleField { field, text } => { + self.write_single_field(field.as_ref(), text.as_ref())? + } + SearchNode::AddedInDays(days) => { + write!(self.sql, "c.id > {}", days).unwrap(); + } + SearchNode::CardTemplate(template) => self.write_template(template)?, + SearchNode::Deck(deck) => self.write_deck(deck.as_ref())?, + SearchNode::NoteTypeID(ntid) => { + write!(self.sql, "n.mid = {}", ntid).unwrap(); + } + SearchNode::NoteType(notetype) => self.write_note_type(notetype.as_ref())?, + SearchNode::Rated { days, ease } => self.write_rated(*days, *ease)?, + SearchNode::Tag(tag) => self.write_tag(tag), + SearchNode::Duplicates { note_type_id, text } => self.write_dupes(*note_type_id, text), + SearchNode::State(state) => self.write_state(state)?, + SearchNode::Flag(flag) => { + write!(self.sql, "(c.flags & 7) == {}", flag).unwrap(); + } + SearchNode::NoteIDs(nids) => { + write!(self.sql, "n.id in ({})", nids).unwrap(); + } + SearchNode::CardIDs(cids) => { + write!(self.sql, "c.id in ({})", cids).unwrap(); + } + SearchNode::Property { operator, kind } => self.write_prop(operator, kind)?, + }; + Ok(()) + } + + fn write_unqualified(&mut self, text: &str) { + // implicitly wrap in % + let text = format!("%{}%", text); + write!( + self.sql, + "(n.sfld like ? escape '\\' or n.flds like ? escape '\\')" + ) + .unwrap(); + self.args.push(text.clone().into()); + self.args.push(text.into()); + } + + fn write_tag(&mut self, text: &str) { + if text == "none" { + write!(self.sql, "n.tags = ''").unwrap(); + return; + } + + let tag = format!(" %{}% ", text.replace('*', "%")); + write!(self.sql, "n.tags like ?").unwrap(); + self.args.push(tag.into()); + } + + fn write_rated(&mut self, days: u32, ease: Option) -> Result<()> { + let today_cutoff = self.req.storage.timing_today()?.next_day_at; + let days = days.min(31) as i64; + let target_cutoff = today_cutoff - 86_400 * days; + write!( + self.sql, + "c.id in (select cid from revlog where id>{}", + target_cutoff + ) + .unwrap(); + if let Some(ease) = ease { + write!(self.sql, "and ease={})", ease).unwrap(); + } else { + write!(self.sql, ")").unwrap(); + } + + Ok(()) + } + + fn write_prop(&mut self, op: &str, kind: &PropertyKind) -> Result<()> { + let timing = self.req.storage.timing_today()?; + match kind { + PropertyKind::Due(days) => { + let day = days + (timing.days_elapsed as i32); + write!( + self.sql, + "(c.queue in ({rev},{daylrn}) and due {op} {day})", + rev = CardQueue::Review as u8, + daylrn = CardQueue::DayLearn as u8, + op = op, + day = day + ) + } + PropertyKind::Interval(ivl) => write!(self.sql, "ivl {} {}", op, ivl), + PropertyKind::Reps(reps) => write!(self.sql, "reps {} {}", op, reps), + PropertyKind::Lapses(days) => write!(self.sql, "lapses {} {}", op, days), + PropertyKind::Ease(ease) => write!(self.sql, "ease {} {}", op, (ease * 1000.0) as u32), + } + .unwrap(); + Ok(()) + } + + fn write_state(&mut self, state: &StateKind) -> Result<()> { + let timing = self.req.storage.timing_today()?; + match state { + StateKind::New => write!(self.sql, "c.queue = {}", CardQueue::New as u8), + StateKind::Review => write!(self.sql, "c.queue = {}", CardQueue::Review as u8), + StateKind::Learning => write!( + self.sql, + "c.queue in ({},{})", + CardQueue::Learn as u8, + CardQueue::DayLearn as u8 + ), + StateKind::Buried => write!( + self.sql, + "c.queue in ({},{})", + CardQueue::SchedBuried as u8, + CardQueue::UserBuried as u8 + ), + StateKind::Suspended => write!(self.sql, "c.queue = {}", CardQueue::Suspended as u8), + StateKind::Due => write!( + self.sql, + " + (c.queue in ({rev},{daylrn}) and c.due <= {today}) or + (c.queue = {lrn} and c.due <= {daycutoff})", + rev = CardQueue::Review as u8, + daylrn = CardQueue::DayLearn as u8, + today = timing.days_elapsed, + lrn = CardQueue::Learn as u8, + daycutoff = timing.next_day_at, + ), + } + .unwrap(); + Ok(()) + } + + fn write_deck(&mut self, deck: &str) -> Result<()> { + match deck { + "*" => write!(self.sql, "true").unwrap(), + "filtered" => write!(self.sql, "c.odid > 0").unwrap(), + deck => { + let all_decks = self.req.storage.all_decks()?; + let dids_with_children = if deck == "current" { + let config = self.req.storage.all_config()?; + let mut dids_with_children = vec![config.current_deck_id]; + let current = get_deck(&all_decks, config.current_deck_id) + .ok_or_else(|| AnkiError::invalid_input("invalid current deck"))?; + for child_did in child_ids(&all_decks, ¤t.name) { + dids_with_children.push(child_did); + } + dids_with_children + } else { + let mut dids_with_children = vec![]; + for deck in all_decks.iter().filter(|d| matches_wildcard(&d.name, deck)) { + dids_with_children.push(deck.id); + for child_id in child_ids(&all_decks, &deck.name) { + dids_with_children.push(child_id); + } + } + dids_with_children + }; + + self.sql.push_str("c.did in "); + ids_to_string(&mut self.sql, &dids_with_children); + } + }; + Ok(()) + } + + fn write_template(&mut self, template: &TemplateKind) -> Result<()> { + match template { + TemplateKind::Ordinal(n) => { + write!(self.sql, "c.ord = {}", n).unwrap(); + } + TemplateKind::Name(name) => { + let note_types = self.req.storage.all_note_types()?; + let mut id_ords = vec![]; + for nt in note_types.values() { + for tmpl in &nt.templates { + if matches_wildcard(&tmpl.name, name) { + id_ords.push(format!("(n.mid = {} and c.ord = {})", nt.id, tmpl.ord)); + } + } + } + + if id_ords.is_empty() { + self.sql.push_str("false"); + } else { + write!(self.sql, "({})", id_ords.join(",")).unwrap(); + } + } + }; + Ok(()) + } + + fn write_note_type(&mut self, nt_name: &str) -> Result<()> { + let ntids: Vec<_> = self + .req + .storage + .all_note_types()? + .values() + .filter(|nt| matches_wildcard(&nt.name, nt_name)) + .map(|nt| nt.id) + .collect(); + self.sql.push_str("n.mid in "); + ids_to_string(&mut self.sql, &ntids); + Ok(()) + } + + fn write_single_field(&mut self, field_name: &str, val: &str) -> Result<()> { + let note_types = self.req.storage.all_note_types()?; + + let mut field_map = vec![]; + for nt in note_types.values() { + for field in &nt.fields { + if field.name.eq_ignore_ascii_case(field_name) { + field_map.push((nt.id, field.ord)); + } + } + } + + if field_map.is_empty() { + write!(self.sql, "false").unwrap(); + return Ok(()); + } + + write!(self.sql, "(").unwrap(); + self.args.push(val.to_string().into()); + let arg_idx = self.args.len(); + for (ntid, ord) in field_map { + write!( + self.sql, + "(n.mid = {} and field_at_index(n.flds, {}) like ?{})", + ntid, ord, arg_idx + ) + .unwrap(); + } + write!(self.sql, ")").unwrap(); + + Ok(()) + } + + fn write_dupes(&mut self, ntid: ObjID, text: &str) { + let text_nohtml = strip_html_preserving_image_filenames(text); + let csum = field_checksum(text_nohtml.as_ref()); + write!( + self.sql, + "(n.mid = {} and n.csum = {} and field_at_index(n.flds, 0) = ?", + ntid, csum + ) + .unwrap(); + self.args.push(text.to_string().into()) + } +} + +// Write a list of IDs as '(x,y,...)' into the provided string. +fn ids_to_string(buf: &mut String, ids: &[T]) +where + T: std::fmt::Display, +{ + buf.push('('); + if !ids.is_empty() { + for id in ids.iter().skip(1) { + write!(buf, "{},", id).unwrap(); + } + write!(buf, "{}", ids[0]).unwrap(); + } + buf.push(')'); +} + +#[cfg(test)] +mod test { + use super::ids_to_string; + + #[test] + fn ids_string() { + let mut s = String::new(); + ids_to_string::(&mut s, &[]); + assert_eq!(s, "()"); + s.clear(); + ids_to_string(&mut s, &[7]); + assert_eq!(s, "(7)"); + s.clear(); + ids_to_string(&mut s, &[7, 6]); + assert_eq!(s, "(6,7)"); + s.clear(); + ids_to_string(&mut s, &[7, 6, 5]); + assert_eq!(s, "(6,5,7)"); + s.clear(); + } + + // use super::super::parser::parse; + // use super::*; + + // parse + // fn p(search: &str) -> Node { + // Node::Group(parse(search).unwrap()) + // } + + // get sql + // fn s<'a>(n: &'a Node) -> (String, Vec>) { + // node_to_sql(n) + // } + + #[test] + fn tosql() -> Result<(), String> { + // assert_eq!(s(&p("added:1")), ("(c.id > 1)".into(), vec![])); + + Ok(()) + } +}