mirror of
https://github.com/ankitects/anki.git
synced 2025-09-23 00:12:25 -04:00
move note db code to storage
This commit is contained in:
parent
da29d0fcae
commit
cc297f1dc6
11 changed files with 156 additions and 110 deletions
|
@ -24,7 +24,7 @@ use crate::{
|
||||||
notetype::{all_stock_notetypes, NoteTypeID, NoteTypeSchema11},
|
notetype::{all_stock_notetypes, NoteTypeID, NoteTypeSchema11},
|
||||||
sched::cutoff::{local_minutes_west_for_stamp, sched_timing_today},
|
sched::cutoff::{local_minutes_west_for_stamp, sched_timing_today},
|
||||||
sched::timespan::{answer_button_time, learning_congrats, studied_today, time_span},
|
sched::timespan::{answer_button_time, learning_congrats, studied_today, time_span},
|
||||||
search::{search_cards, search_notes, SortMode},
|
search::{search_cards, SortMode},
|
||||||
template::{
|
template::{
|
||||||
render_card, without_legacy_template_directives, FieldMap, FieldRequirements,
|
render_card, without_legacy_template_directives, FieldMap, FieldRequirements,
|
||||||
ParsedTemplate, RenderedNode,
|
ParsedTemplate, RenderedNode,
|
||||||
|
@ -705,7 +705,7 @@ impl Backend {
|
||||||
|
|
||||||
fn search_notes(&self, input: pb::SearchNotesIn) -> Result<pb::SearchNotesOut> {
|
fn search_notes(&self, input: pb::SearchNotesIn) -> Result<pb::SearchNotesOut> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let nids = search_notes(col, &input.search)?;
|
let nids = col.search_notes(&input.search)?;
|
||||||
Ok(pb::SearchNotesOut {
|
Ok(pb::SearchNotesOut {
|
||||||
note_ids: nids.into_iter().map(|v| v.0).collect(),
|
note_ids: nids.into_iter().map(|v| v.0).collect(),
|
||||||
})
|
})
|
||||||
|
|
|
@ -11,7 +11,7 @@ use crate::media::files::{
|
||||||
data_for_file, filename_if_normalized, normalize_nfc_filename, trash_folder,
|
data_for_file, filename_if_normalized, normalize_nfc_filename, trash_folder,
|
||||||
MEDIA_SYNC_FILESIZE_LIMIT,
|
MEDIA_SYNC_FILESIZE_LIMIT,
|
||||||
};
|
};
|
||||||
use crate::notes::{for_every_note, set_note, Note};
|
use crate::notes::Note;
|
||||||
use crate::text::{normalize_to_nfc, MediaRef};
|
use crate::text::{normalize_to_nfc, MediaRef};
|
||||||
use crate::{media::MediaManager, text::extract_media_refs};
|
use crate::{media::MediaManager, text::extract_media_refs};
|
||||||
use coarsetime::Instant;
|
use coarsetime::Instant;
|
||||||
|
@ -44,26 +44,26 @@ struct MediaFolderCheck {
|
||||||
oversize: Vec<String>,
|
oversize: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct MediaChecker<'a, P>
|
pub struct MediaChecker<'a, 'b, P>
|
||||||
where
|
where
|
||||||
P: FnMut(usize) -> bool,
|
P: FnMut(usize) -> bool,
|
||||||
{
|
{
|
||||||
ctx: &'a Collection,
|
ctx: &'a mut Collection,
|
||||||
mgr: &'a MediaManager,
|
mgr: &'b MediaManager,
|
||||||
progress_cb: P,
|
progress_cb: P,
|
||||||
checked: usize,
|
checked: usize,
|
||||||
progress_updated: Instant,
|
progress_updated: Instant,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<P> MediaChecker<'_, P>
|
impl<P> MediaChecker<'_, '_, P>
|
||||||
where
|
where
|
||||||
P: FnMut(usize) -> bool,
|
P: FnMut(usize) -> bool,
|
||||||
{
|
{
|
||||||
pub(crate) fn new<'a>(
|
pub(crate) fn new<'a, 'b>(
|
||||||
ctx: &'a mut Collection,
|
ctx: &'a mut Collection,
|
||||||
mgr: &'a MediaManager,
|
mgr: &'b MediaManager,
|
||||||
progress_cb: P,
|
progress_cb: P,
|
||||||
) -> MediaChecker<'a, P> {
|
) -> MediaChecker<'a, 'b, P> {
|
||||||
MediaChecker {
|
MediaChecker {
|
||||||
ctx,
|
ctx,
|
||||||
mgr,
|
mgr,
|
||||||
|
@ -383,11 +383,14 @@ where
|
||||||
let note_types = self.ctx.storage.get_all_notetype_core()?;
|
let note_types = self.ctx.storage.get_all_notetype_core()?;
|
||||||
let mut collection_modified = false;
|
let mut collection_modified = false;
|
||||||
|
|
||||||
for_every_note(&self.ctx.storage.db, |note| {
|
let nids = self.ctx.search_notes("")?;
|
||||||
|
let usn = self.ctx.usn()?;
|
||||||
|
for nid in nids {
|
||||||
self.checked += 1;
|
self.checked += 1;
|
||||||
if self.checked % 10 == 0 {
|
if self.checked % 10 == 0 {
|
||||||
self.maybe_fire_progress_cb()?;
|
self.maybe_fire_progress_cb()?;
|
||||||
}
|
}
|
||||||
|
let mut note = self.ctx.storage.get_note(nid)?.unwrap();
|
||||||
let nt = note_types
|
let nt = note_types
|
||||||
.get(¬e.ntid)
|
.get(¬e.ntid)
|
||||||
.ok_or_else(|| AnkiError::DBError {
|
.ok_or_else(|| AnkiError::DBError {
|
||||||
|
@ -395,24 +398,25 @@ where
|
||||||
kind: DBErrorKind::MissingEntity,
|
kind: DBErrorKind::MissingEntity,
|
||||||
})?;
|
})?;
|
||||||
if fix_and_extract_media_refs(
|
if fix_and_extract_media_refs(
|
||||||
note,
|
&mut note,
|
||||||
&mut referenced_files,
|
&mut referenced_files,
|
||||||
renamed,
|
renamed,
|
||||||
&self.mgr.media_folder,
|
&self.mgr.media_folder,
|
||||||
)? {
|
)? {
|
||||||
// note was modified, needs saving
|
// note was modified, needs saving
|
||||||
set_note(
|
note.prepare_for_update(nt.config.sort_field_idx as usize, usn);
|
||||||
&self.ctx.storage.db,
|
self.ctx.storage.update_note(¬e)?;
|
||||||
note,
|
|
||||||
nt.config.sort_field_idx as usize,
|
|
||||||
)?;
|
|
||||||
collection_modified = true;
|
collection_modified = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// extract latex
|
// extract latex
|
||||||
extract_latex_refs(note, &mut referenced_files, nt.config.latex_svg);
|
extract_latex_refs(¬e, &mut referenced_files, nt.config.latex_svg);
|
||||||
Ok(())
|
}
|
||||||
})?;
|
|
||||||
|
if collection_modified {
|
||||||
|
// fixme: need to refactor to use new transaction handling?
|
||||||
|
// self.ctx.storage.commit_trx()?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(referenced_files)
|
Ok(referenced_files)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,25 +1,28 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
/// At the moment, this is just basic note reading/updating functionality for
|
use crate::err::{AnkiError, Result};
|
||||||
/// the media DB check.
|
|
||||||
use crate::err::{AnkiError, DBErrorKind, Result};
|
|
||||||
use crate::notetype::NoteTypeID;
|
use crate::notetype::NoteTypeID;
|
||||||
use crate::text::strip_html_preserving_image_filenames;
|
use crate::text::strip_html_preserving_image_filenames;
|
||||||
use crate::timestamp::TimestampSecs;
|
use crate::timestamp::TimestampSecs;
|
||||||
use crate::{define_newtype, types::Usn};
|
use crate::{define_newtype, types::Usn};
|
||||||
use rusqlite::{params, Connection, Row, NO_PARAMS};
|
|
||||||
use std::convert::TryInto;
|
use std::convert::TryInto;
|
||||||
|
|
||||||
define_newtype!(NoteID, i64);
|
define_newtype!(NoteID, i64);
|
||||||
|
|
||||||
|
// fixme: ensure nulls and x1f not in field contents
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(super) struct Note {
|
pub struct Note {
|
||||||
pub id: NoteID,
|
pub id: NoteID,
|
||||||
|
pub guid: String,
|
||||||
pub ntid: NoteTypeID,
|
pub ntid: NoteTypeID,
|
||||||
pub mtime: TimestampSecs,
|
pub mtime: TimestampSecs,
|
||||||
pub usn: Usn,
|
pub usn: Usn,
|
||||||
fields: Vec<String>,
|
pub tags: Vec<String>,
|
||||||
|
pub fields: Vec<String>,
|
||||||
|
pub(crate) sort_field: Option<String>,
|
||||||
|
pub(crate) checksum: Option<u32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Note {
|
impl Note {
|
||||||
|
@ -38,6 +41,26 @@ impl Note {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn prepare_for_update(&mut self, sort_field_idx: usize, usn: Usn) {
|
||||||
|
let field1_nohtml = strip_html_preserving_image_filenames(&self.fields()[0]);
|
||||||
|
let checksum = field_checksum(field1_nohtml.as_ref());
|
||||||
|
let sort_field = if sort_field_idx == 0 {
|
||||||
|
field1_nohtml
|
||||||
|
} else {
|
||||||
|
strip_html_preserving_image_filenames(
|
||||||
|
self.fields
|
||||||
|
.get(sort_field_idx)
|
||||||
|
.map(AsRef::as_ref)
|
||||||
|
.unwrap_or(""),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
self.sort_field = Some(sort_field.into());
|
||||||
|
self.checksum = Some(checksum);
|
||||||
|
self.mtime = TimestampSecs::now();
|
||||||
|
// hard-coded for now
|
||||||
|
self.usn = usn;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Text must be passed to strip_html_preserving_image_filenames() by
|
/// Text must be passed to strip_html_preserving_image_filenames() by
|
||||||
|
@ -46,71 +69,3 @@ pub(crate) fn field_checksum(text: &str) -> u32 {
|
||||||
let digest = sha1::Sha1::from(text).digest().bytes();
|
let digest = sha1::Sha1::from(text).digest().bytes();
|
||||||
u32::from_be_bytes(digest[..4].try_into().unwrap())
|
u32::from_be_bytes(digest[..4].try_into().unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
fn get_note(db: &Connection, nid: NoteID) -> Result<Option<Note>> {
|
|
||||||
let mut stmt = db.prepare_cached("select id, mid, mod, usn, flds from notes where id=?")?;
|
|
||||||
let note = stmt.query_and_then(params![nid], row_to_note)?.next();
|
|
||||||
|
|
||||||
note.transpose()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn for_every_note<F: FnMut(&mut Note) -> Result<()>>(
|
|
||||||
db: &Connection,
|
|
||||||
mut func: F,
|
|
||||||
) -> Result<()> {
|
|
||||||
let mut stmt = db.prepare("select id, mid, mod, usn, flds from notes")?;
|
|
||||||
for result in stmt.query_and_then(NO_PARAMS, |row| {
|
|
||||||
let mut note = row_to_note(row)?;
|
|
||||||
func(&mut note)
|
|
||||||
})? {
|
|
||||||
result?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn row_to_note(row: &Row) -> Result<Note> {
|
|
||||||
Ok(Note {
|
|
||||||
id: row.get(0)?,
|
|
||||||
ntid: row.get(1)?,
|
|
||||||
mtime: row.get(2)?,
|
|
||||||
usn: row.get(3)?,
|
|
||||||
fields: row
|
|
||||||
.get_raw(4)
|
|
||||||
.as_str()?
|
|
||||||
.split('\x1f')
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn set_note(db: &Connection, note: &mut Note, sort_field_idx: usize) -> Result<()> {
|
|
||||||
note.mtime = TimestampSecs::now();
|
|
||||||
// hard-coded for now
|
|
||||||
note.usn = Usn(-1);
|
|
||||||
let field1_nohtml = strip_html_preserving_image_filenames(¬e.fields()[0]);
|
|
||||||
let csum = field_checksum(field1_nohtml.as_ref());
|
|
||||||
let sort_field = if sort_field_idx == 0 {
|
|
||||||
field1_nohtml
|
|
||||||
} else {
|
|
||||||
strip_html_preserving_image_filenames(note.fields().get(sort_field_idx).ok_or_else(
|
|
||||||
|| AnkiError::DBError {
|
|
||||||
info: "sort field out of range".to_string(),
|
|
||||||
kind: DBErrorKind::MissingEntity,
|
|
||||||
},
|
|
||||||
)?)
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut stmt =
|
|
||||||
db.prepare_cached("update notes set mod=?,usn=?,flds=?,sfld=?,csum=? where id=?")?;
|
|
||||||
stmt.execute(params![
|
|
||||||
note.mtime,
|
|
||||||
note.usn,
|
|
||||||
note.fields().join("\x1f"),
|
|
||||||
sort_field,
|
|
||||||
csum,
|
|
||||||
note.id,
|
|
||||||
])?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
|
@ -4,4 +4,3 @@ mod parser;
|
||||||
mod sqlwriter;
|
mod sqlwriter;
|
||||||
|
|
||||||
pub(crate) use cards::{search_cards, SortMode};
|
pub(crate) use cards::{search_cards, SortMode};
|
||||||
pub(crate) use notes::search_notes;
|
|
||||||
|
|
|
@ -7,19 +7,21 @@ use crate::err::Result;
|
||||||
use crate::notes::NoteID;
|
use crate::notes::NoteID;
|
||||||
use crate::search::parser::parse;
|
use crate::search::parser::parse;
|
||||||
|
|
||||||
pub(crate) fn search_notes<'a>(req: &'a mut Collection, search: &'a str) -> Result<Vec<NoteID>> {
|
impl Collection {
|
||||||
let top_node = Node::Group(parse(search)?);
|
pub(crate) fn search_notes(&mut self, search: &str) -> Result<Vec<NoteID>> {
|
||||||
let (sql, args) = node_to_sql(req, &top_node)?;
|
let top_node = Node::Group(parse(search)?);
|
||||||
|
let (sql, args) = node_to_sql(self, &top_node)?;
|
||||||
|
|
||||||
let sql = format!(
|
let sql = format!(
|
||||||
"select n.id from cards c, notes n where c.nid=n.id and {}",
|
"select n.id from cards c, notes n where c.nid=n.id and {}",
|
||||||
sql
|
sql
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut stmt = req.storage.db.prepare(&sql)?;
|
let mut stmt = self.storage.db.prepare(&sql)?;
|
||||||
let ids: Vec<_> = stmt
|
let ids: Vec<_> = stmt
|
||||||
.query_map(&args, |row| row.get(0))?
|
.query_map(&args, |row| row.get(0))?
|
||||||
.collect::<std::result::Result<_, _>>()?;
|
.collect::<std::result::Result<_, _>>()?;
|
||||||
|
|
||||||
Ok(ids)
|
Ok(ids)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
-- the casts are required as Anki didn't prevent add-ons from
|
|
||||||
-- storing strings or floats in columns before
|
|
||||||
select
|
select
|
||||||
nid,
|
nid,
|
||||||
did,
|
did,
|
||||||
|
|
|
@ -5,6 +5,7 @@ mod card;
|
||||||
mod config;
|
mod config;
|
||||||
mod deck;
|
mod deck;
|
||||||
mod deckconf;
|
mod deckconf;
|
||||||
|
mod note;
|
||||||
mod notetype;
|
mod notetype;
|
||||||
mod sqlite;
|
mod sqlite;
|
||||||
mod tag;
|
mod tag;
|
||||||
|
|
10
rslib/src/storage/note/get.sql
Normal file
10
rslib/src/storage/note/get.sql
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
select
|
||||||
|
guid,
|
||||||
|
mid,
|
||||||
|
mod,
|
||||||
|
usn,
|
||||||
|
tags,
|
||||||
|
flds
|
||||||
|
from notes
|
||||||
|
where
|
||||||
|
id = ?
|
57
rslib/src/storage/note/mod.rs
Normal file
57
rslib/src/storage/note/mod.rs
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
err::Result,
|
||||||
|
notes::{Note, NoteID},
|
||||||
|
tags::{join_tags, split_tags},
|
||||||
|
};
|
||||||
|
use rusqlite::{params, OptionalExtension};
|
||||||
|
|
||||||
|
fn split_fields(fields: &str) -> Vec<String> {
|
||||||
|
fields.split('\x1f').map(Into::into).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn join_fields(fields: &[String]) -> String {
|
||||||
|
fields.join("\x1f")
|
||||||
|
}
|
||||||
|
|
||||||
|
impl super::SqliteStorage {
|
||||||
|
pub fn get_note(&self, nid: NoteID) -> Result<Option<Note>> {
|
||||||
|
let mut stmt = self.db.prepare_cached(include_str!("get.sql"))?;
|
||||||
|
stmt.query_row(params![nid], |row| {
|
||||||
|
Ok(Note {
|
||||||
|
id: nid,
|
||||||
|
guid: row.get(0)?,
|
||||||
|
ntid: row.get(1)?,
|
||||||
|
mtime: row.get(2)?,
|
||||||
|
usn: row.get(3)?,
|
||||||
|
tags: split_tags(row.get_raw(4).as_str()?)
|
||||||
|
.map(Into::into)
|
||||||
|
.collect(),
|
||||||
|
fields: split_fields(row.get_raw(5).as_str()?),
|
||||||
|
sort_field: None,
|
||||||
|
checksum: None,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.optional()
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Caller must call note.prepare_for_update() prior to calling this.
|
||||||
|
pub(crate) fn update_note(&self, note: &Note) -> Result<()> {
|
||||||
|
let mut stmt = self.db.prepare_cached(include_str!("update.sql"))?;
|
||||||
|
stmt.execute(params![
|
||||||
|
note.guid,
|
||||||
|
note.ntid,
|
||||||
|
note.mtime,
|
||||||
|
note.usn,
|
||||||
|
join_tags(¬e.tags),
|
||||||
|
join_fields(¬e.fields),
|
||||||
|
note.sort_field.as_ref().unwrap(),
|
||||||
|
note.checksum.unwrap(),
|
||||||
|
note.id
|
||||||
|
])?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
12
rslib/src/storage/note/update.sql
Normal file
12
rslib/src/storage/note/update.sql
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
update notes
|
||||||
|
set
|
||||||
|
guid = ?,
|
||||||
|
mid = ?,
|
||||||
|
mod = ?,
|
||||||
|
usn = ?,
|
||||||
|
tags = ?,
|
||||||
|
flds = ?,
|
||||||
|
sfld = ?,
|
||||||
|
csum = ?
|
||||||
|
where
|
||||||
|
id = ?
|
|
@ -7,11 +7,19 @@ use crate::types::Usn;
|
||||||
use std::{borrow::Cow, collections::HashSet};
|
use std::{borrow::Cow, collections::HashSet};
|
||||||
use unicase::UniCase;
|
use unicase::UniCase;
|
||||||
|
|
||||||
fn split_tags(tags: &str) -> impl Iterator<Item = &str> {
|
pub(crate) fn split_tags(tags: &str) -> impl Iterator<Item = &str> {
|
||||||
tags.split(|c| c == ' ' || c == '\u{3000}')
|
tags.split(|c| c == ' ' || c == '\u{3000}')
|
||||||
.filter(|tag| !tag.is_empty())
|
.filter(|tag| !tag.is_empty())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn join_tags(tags: &[String]) -> String {
|
||||||
|
if tags.is_empty() {
|
||||||
|
"".into()
|
||||||
|
} else {
|
||||||
|
format!(" {} ", tags.join(" "))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
/// Given a space-separated list of tags, fix case, ordering and duplicates.
|
/// Given a space-separated list of tags, fix case, ordering and duplicates.
|
||||||
/// Returns true if any new tags were added.
|
/// Returns true if any new tags were added.
|
||||||
|
|
Loading…
Reference in a new issue