mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 22:12:21 -04:00
add a builder for Collection
This commit is contained in:
parent
283776d8e7
commit
cb07b232d8
8 changed files with 130 additions and 130 deletions
|
@ -8,8 +8,8 @@ pub(super) use crate::backend_proto::collection_service::Service as CollectionSe
|
||||||
use crate::{
|
use crate::{
|
||||||
backend::progress::progress_to_proto,
|
backend::progress::progress_to_proto,
|
||||||
backend_proto as pb,
|
backend_proto as pb,
|
||||||
collection::open_collection,
|
collection::CollectionBuilder,
|
||||||
log::{self, default_logger},
|
log::{self},
|
||||||
prelude::*,
|
prelude::*,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -30,25 +30,16 @@ impl CollectionService for Backend {
|
||||||
return Err(AnkiError::CollectionAlreadyOpen);
|
return Err(AnkiError::CollectionAlreadyOpen);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut path = input.collection_path.clone();
|
let mut builder = CollectionBuilder::new(input.collection_path);
|
||||||
path.push_str(".log");
|
builder
|
||||||
|
.set_media_paths(input.media_folder_path, input.media_db_path)
|
||||||
|
.set_server(self.server)
|
||||||
|
.set_tr(self.tr.clone());
|
||||||
|
if !input.log_path.is_empty() {
|
||||||
|
builder.set_log_file(&input.log_path)?;
|
||||||
|
}
|
||||||
|
|
||||||
let log_path = match input.log_path.as_str() {
|
*col = Some(builder.build()?);
|
||||||
"" => None,
|
|
||||||
path => Some(path),
|
|
||||||
};
|
|
||||||
let logger = default_logger(log_path)?;
|
|
||||||
|
|
||||||
let new_col = open_collection(
|
|
||||||
input.collection_path,
|
|
||||||
input.media_folder_path,
|
|
||||||
input.media_db_path,
|
|
||||||
self.server,
|
|
||||||
self.tr.clone(),
|
|
||||||
logger,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
*col = Some(new_col);
|
|
||||||
|
|
||||||
Ok(().into())
|
Ok(().into())
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,6 @@ use super::{progress::AbortHandleSlot, Backend};
|
||||||
pub(super) use crate::backend_proto::sync_service::Service as SyncService;
|
pub(super) use crate::backend_proto::sync_service::Service as SyncService;
|
||||||
use crate::{
|
use crate::{
|
||||||
backend_proto as pb,
|
backend_proto as pb,
|
||||||
collection::open_collection,
|
|
||||||
media::MediaManager,
|
media::MediaManager,
|
||||||
prelude::*,
|
prelude::*,
|
||||||
sync::{
|
sync::{
|
||||||
|
@ -329,10 +328,7 @@ impl Backend {
|
||||||
|
|
||||||
let (_guard, abort_reg) = self.sync_abort_handle()?;
|
let (_guard, abort_reg) = self.sync_abort_handle()?;
|
||||||
|
|
||||||
let col_path = col_inner.col_path.clone();
|
let builder = col_inner.as_builder();
|
||||||
let media_folder_path = col_inner.media_folder.clone();
|
|
||||||
let media_db_path = col_inner.media_db.clone();
|
|
||||||
let logger = col_inner.log.clone();
|
|
||||||
|
|
||||||
let mut handler = self.new_progress_handler();
|
let mut handler = self.new_progress_handler();
|
||||||
let progress_fn = move |progress: FullSyncProgress, throttle: bool| {
|
let progress_fn = move |progress: FullSyncProgress, throttle: bool| {
|
||||||
|
@ -350,14 +346,7 @@ impl Backend {
|
||||||
};
|
};
|
||||||
|
|
||||||
// ensure re-opened regardless of outcome
|
// ensure re-opened regardless of outcome
|
||||||
col.replace(open_collection(
|
col.replace(builder.build()?);
|
||||||
col_path,
|
|
||||||
media_folder_path,
|
|
||||||
media_db_path,
|
|
||||||
self.server,
|
|
||||||
self.tr.clone(),
|
|
||||||
logger,
|
|
||||||
)?);
|
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
Ok(sync_result) => {
|
Ok(sync_result) => {
|
||||||
|
|
|
@ -5,18 +5,14 @@ pub(crate) mod timestamps;
|
||||||
mod transact;
|
mod transact;
|
||||||
pub(crate) mod undo;
|
pub(crate) mod undo;
|
||||||
|
|
||||||
use std::{
|
use std::{collections::HashMap, path::PathBuf, sync::Arc};
|
||||||
collections::HashMap,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
browser_table,
|
browser_table,
|
||||||
decks::{Deck, DeckId},
|
decks::{Deck, DeckId},
|
||||||
error::Result,
|
error::Result,
|
||||||
i18n::I18n,
|
i18n::I18n,
|
||||||
log::Logger,
|
log::{default_logger, Logger},
|
||||||
notetype::{Notetype, NotetypeId},
|
notetype::{Notetype, NotetypeId},
|
||||||
scheduler::{queue::CardQueues, SchedulerInfo},
|
scheduler::{queue::CardQueues, SchedulerInfo},
|
||||||
storage::SqliteStorage,
|
storage::SqliteStorage,
|
||||||
|
@ -24,22 +20,42 @@ use crate::{
|
||||||
undo::UndoManager,
|
undo::UndoManager,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn open_collection<P: Into<PathBuf>>(
|
#[derive(Default)]
|
||||||
path: P,
|
pub struct CollectionBuilder {
|
||||||
media_folder: P,
|
collection_path: Option<PathBuf>,
|
||||||
media_db: P,
|
media_folder: Option<PathBuf>,
|
||||||
server: bool,
|
media_db: Option<PathBuf>,
|
||||||
tr: I18n,
|
server: Option<bool>,
|
||||||
log: Logger,
|
tr: Option<I18n>,
|
||||||
) -> Result<Collection> {
|
log: Option<Logger>,
|
||||||
let col_path = path.into();
|
}
|
||||||
let storage = SqliteStorage::open_or_create(&col_path, &tr, server)?;
|
|
||||||
|
|
||||||
|
impl CollectionBuilder {
|
||||||
|
/// Create a new builder with the provided collection path.
|
||||||
|
/// If an in-memory database is desired, used ::default() instead.
|
||||||
|
pub fn new(col_path: impl Into<PathBuf>) -> Self {
|
||||||
|
let mut builder = Self::default();
|
||||||
|
builder.set_collection_path(col_path);
|
||||||
|
builder
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build(&self) -> Result<Collection> {
|
||||||
|
let col_path = self
|
||||||
|
.collection_path
|
||||||
|
.clone()
|
||||||
|
.unwrap_or_else(|| PathBuf::from(":memory:"));
|
||||||
|
let tr = self.tr.clone().unwrap_or_else(I18n::template_only);
|
||||||
|
let server = self.server.unwrap_or_default();
|
||||||
|
let media_folder = self.media_folder.clone().unwrap_or_default();
|
||||||
|
let media_db = self.media_db.clone().unwrap_or_default();
|
||||||
|
let log = self.log.clone().unwrap_or_else(crate::log::terminal);
|
||||||
|
|
||||||
|
let storage = SqliteStorage::open_or_create(&col_path, &tr, server)?;
|
||||||
let col = Collection {
|
let col = Collection {
|
||||||
storage,
|
storage,
|
||||||
col_path,
|
col_path,
|
||||||
media_folder: media_folder.into(),
|
media_folder,
|
||||||
media_db: media_db.into(),
|
media_db,
|
||||||
tr,
|
tr,
|
||||||
log,
|
log,
|
||||||
server,
|
server,
|
||||||
|
@ -47,35 +63,45 @@ pub fn open_collection<P: Into<PathBuf>>(
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(col)
|
Ok(col)
|
||||||
}
|
}
|
||||||
|
|
||||||
// We need to make a Builder for Collection in the future.
|
pub fn set_collection_path<P: Into<PathBuf>>(&mut self, collection: P) -> &mut Self {
|
||||||
|
self.collection_path = Some(collection.into());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_media_paths<P: Into<PathBuf>>(&mut self, media_folder: P, media_db: P) -> &mut Self {
|
||||||
|
self.media_folder = Some(media_folder.into());
|
||||||
|
self.media_db = Some(media_db.into());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_server(&mut self, server: bool) -> &mut Self {
|
||||||
|
self.server = Some(server);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_tr(&mut self, tr: I18n) -> &mut Self {
|
||||||
|
self.tr = Some(tr);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Directly set the logger.
|
||||||
|
pub fn set_logger(&mut self, log: Logger) -> &mut Self {
|
||||||
|
self.log = Some(log);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Log to the provided file.
|
||||||
|
pub fn set_log_file(&mut self, log_file: &str) -> Result<&mut Self, std::io::Error> {
|
||||||
|
self.set_logger(default_logger(Some(log_file))?);
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub fn open_test_collection() -> Collection {
|
pub fn open_test_collection() -> Collection {
|
||||||
use crate::config::SchedulerVersion;
|
CollectionBuilder::default().build().unwrap()
|
||||||
let mut col = open_test_collection_with_server(false);
|
|
||||||
// our unit tests assume v2 is the default, but at the time of writing v1
|
|
||||||
// is still the default
|
|
||||||
col.set_scheduler_version_config_key(SchedulerVersion::V2)
|
|
||||||
.unwrap();
|
|
||||||
col
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub fn open_test_collection_with_server(server: bool) -> Collection {
|
|
||||||
use crate::log;
|
|
||||||
let tr = I18n::template_only();
|
|
||||||
open_collection(":memory:", "", "", server, tr, log::terminal()).unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Helper used by syncing to make sure the file can be opened. This should be replaced
|
|
||||||
/// with a builder in the future.
|
|
||||||
pub(crate) fn open_and_check_collection(col_path: &Path) -> Result<Collection> {
|
|
||||||
use crate::log;
|
|
||||||
let tr = I18n::template_only();
|
|
||||||
let empty = Path::new("");
|
|
||||||
open_collection(col_path, empty, empty, true, tr, log::terminal())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
|
@ -104,6 +130,16 @@ pub struct Collection {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
|
pub fn as_builder(&self) -> CollectionBuilder {
|
||||||
|
let mut builder = CollectionBuilder::new(&self.col_path);
|
||||||
|
builder
|
||||||
|
.set_media_paths(self.media_folder.clone(), self.media_db.clone())
|
||||||
|
.set_server(self.server)
|
||||||
|
.set_tr(self.tr.clone())
|
||||||
|
.set_logger(self.log.clone());
|
||||||
|
builder
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn close(self, downgrade: bool) -> Result<()> {
|
pub(crate) fn close(self, downgrade: bool) -> Result<()> {
|
||||||
self.storage.close(downgrade)
|
self.storage.close(downgrade)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,8 +5,8 @@ use serde_json::json;
|
||||||
|
|
||||||
/// These items are expected to exist in schema 11. When adding
|
/// These items are expected to exist in schema 11. When adding
|
||||||
/// new config variables, you do not need to add them here -
|
/// new config variables, you do not need to add them here -
|
||||||
/// just create an accessor function below with an appropriate
|
/// just create an accessor function in one of the config/*.rs files,
|
||||||
/// default on missing/invalid values instead.
|
/// with an appropriate default for missing/invalid values instead.
|
||||||
pub(crate) fn schema11_config_as_string(creation_offset: Option<i32>) -> String {
|
pub(crate) fn schema11_config_as_string(creation_offset: Option<i32>) -> String {
|
||||||
let obj = json!({
|
let obj = json!({
|
||||||
"activeDecks": [1],
|
"activeDecks": [1],
|
||||||
|
|
|
@ -8,6 +8,8 @@ use std::{
|
||||||
path::Path,
|
path::Path,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use anki_i18n::without_unicode_isolation;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
collection::Collection,
|
collection::Collection,
|
||||||
error::{AnkiError, DbErrorKind, Result},
|
error::{AnkiError, DbErrorKind, Result},
|
||||||
|
@ -25,8 +27,6 @@ use crate::{
|
||||||
text::{extract_media_refs, normalize_to_nfc, MediaRef, REMOTE_FILENAME},
|
text::{extract_media_refs, normalize_to_nfc, MediaRef, REMOTE_FILENAME},
|
||||||
};
|
};
|
||||||
|
|
||||||
use anki_i18n::without_unicode_isolation;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub struct MediaCheckOutput {
|
pub struct MediaCheckOutput {
|
||||||
pub unused: Vec<String>,
|
pub unused: Vec<String>,
|
||||||
|
@ -518,10 +518,8 @@ pub(crate) mod test {
|
||||||
|
|
||||||
use super::normalize_and_maybe_rename_files;
|
use super::normalize_and_maybe_rename_files;
|
||||||
use crate::{
|
use crate::{
|
||||||
collection::{open_collection, Collection},
|
collection::{Collection, CollectionBuilder},
|
||||||
error::Result,
|
error::Result,
|
||||||
i18n::I18n,
|
|
||||||
log,
|
|
||||||
media::{
|
media::{
|
||||||
check::{MediaCheckOutput, MediaChecker},
|
check::{MediaCheckOutput, MediaChecker},
|
||||||
files::trash_folder,
|
files::trash_folder,
|
||||||
|
@ -531,18 +529,16 @@ pub(crate) mod test {
|
||||||
|
|
||||||
fn common_setup() -> Result<(TempDir, MediaManager, Collection)> {
|
fn common_setup() -> Result<(TempDir, MediaManager, Collection)> {
|
||||||
let dir = tempdir()?;
|
let dir = tempdir()?;
|
||||||
let media_dir = dir.path().join("media");
|
let media_folder = dir.path().join("media");
|
||||||
fs::create_dir(&media_dir)?;
|
fs::create_dir(&media_folder)?;
|
||||||
let media_db = dir.path().join("media.db");
|
let media_db = dir.path().join("media.db");
|
||||||
let col_path = dir.path().join("col.anki2");
|
let col_path = dir.path().join("col.anki2");
|
||||||
fs::write(&col_path, MEDIACHECK_ANKI2)?;
|
fs::write(&col_path, MEDIACHECK_ANKI2)?;
|
||||||
|
|
||||||
let mgr = MediaManager::new(&media_dir, media_db.clone())?;
|
let mgr = MediaManager::new(&media_folder, media_db.clone())?;
|
||||||
|
let col = CollectionBuilder::new(col_path)
|
||||||
let log = log::terminal();
|
.set_media_paths(media_folder, media_db)
|
||||||
let tr = I18n::template_only();
|
.build()?;
|
||||||
|
|
||||||
let col = open_collection(col_path, media_dir, media_db, false, tr, log)?;
|
|
||||||
|
|
||||||
Ok((dir, mgr, col))
|
Ok((dir, mgr, col))
|
||||||
}
|
}
|
||||||
|
|
|
@ -610,16 +610,12 @@ impl SearchNode {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use std::{fs, path::PathBuf};
|
use std::fs;
|
||||||
|
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
|
|
||||||
use super::{super::parser::parse, *};
|
use super::{super::parser::parse, *};
|
||||||
use crate::{
|
use crate::collection::{Collection, CollectionBuilder};
|
||||||
collection::{open_collection, Collection},
|
|
||||||
i18n::I18n,
|
|
||||||
log,
|
|
||||||
};
|
|
||||||
|
|
||||||
// shortcut
|
// shortcut
|
||||||
fn s(req: &mut Collection, search: &str) -> (String, Vec<String>) {
|
fn s(req: &mut Collection, search: &str) -> (String, Vec<String>) {
|
||||||
|
@ -638,17 +634,7 @@ mod test {
|
||||||
let col_path = dir.path().join("col.anki2");
|
let col_path = dir.path().join("col.anki2");
|
||||||
fs::write(&col_path, MEDIACHECK_ANKI2).unwrap();
|
fs::write(&col_path, MEDIACHECK_ANKI2).unwrap();
|
||||||
|
|
||||||
let tr = I18n::template_only();
|
let mut col = CollectionBuilder::new(col_path).build().unwrap();
|
||||||
let mut col = open_collection(
|
|
||||||
&col_path,
|
|
||||||
&PathBuf::new(),
|
|
||||||
&PathBuf::new(),
|
|
||||||
false,
|
|
||||||
tr,
|
|
||||||
log::terminal(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let ctx = &mut col;
|
let ctx = &mut col;
|
||||||
|
|
||||||
// unqualified search
|
// unqualified search
|
||||||
|
|
|
@ -1199,7 +1199,7 @@ mod test {
|
||||||
|
|
||||||
use super::{server::LocalServer, *};
|
use super::{server::LocalServer, *};
|
||||||
use crate::{
|
use crate::{
|
||||||
collection::open_collection, deckconfig::DeckConfig, decks::DeckKind, i18n::I18n, log,
|
collection::CollectionBuilder, deckconfig::DeckConfig, decks::DeckKind,
|
||||||
notetype::all_stock_notetypes, search::SortMode,
|
notetype::all_stock_notetypes, search::SortMode,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1225,22 +1225,20 @@ mod test {
|
||||||
rt.block_on(regular_sync(&ctx))
|
rt.block_on(regular_sync(&ctx))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn open_col(dir: &Path, server: bool, fname: &str) -> Result<Collection> {
|
|
||||||
let path = dir.join(fname);
|
|
||||||
let tr = I18n::template_only();
|
|
||||||
open_collection(path, "".into(), "".into(), server, tr, log::terminal())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait(?Send)]
|
#[async_trait(?Send)]
|
||||||
trait TestContext {
|
trait TestContext {
|
||||||
fn server(&self) -> Box<dyn SyncServer>;
|
fn server(&self) -> Box<dyn SyncServer>;
|
||||||
|
|
||||||
fn col1(&self) -> Collection {
|
fn col1(&self) -> Collection {
|
||||||
open_col(self.dir(), false, "col1.anki2").unwrap()
|
CollectionBuilder::new(self.dir().join("col1.anki2"))
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn col2(&self) -> Collection {
|
fn col2(&self) -> Collection {
|
||||||
open_col(self.dir(), false, "col2.anki2").unwrap()
|
CollectionBuilder::new(self.dir().join("col2.anki2"))
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dir(&self) -> &Path {
|
fn dir(&self) -> &Path {
|
||||||
|
@ -1274,7 +1272,11 @@ mod test {
|
||||||
#[async_trait(?Send)]
|
#[async_trait(?Send)]
|
||||||
impl TestContext for LocalTestContext {
|
impl TestContext for LocalTestContext {
|
||||||
fn server(&self) -> Box<dyn SyncServer> {
|
fn server(&self) -> Box<dyn SyncServer> {
|
||||||
let col = open_col(self.dir(), true, "server.anki2").unwrap();
|
let col_path = self.dir().join("server.anki2");
|
||||||
|
let col = CollectionBuilder::new(col_path)
|
||||||
|
.set_server(true)
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
Box::new(LocalServer::new(col))
|
Box::new(LocalServer::new(col))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ use tempfile::NamedTempFile;
|
||||||
|
|
||||||
use super::ChunkableIds;
|
use super::ChunkableIds;
|
||||||
use crate::{
|
use crate::{
|
||||||
collection::open_and_check_collection,
|
collection::CollectionBuilder,
|
||||||
prelude::*,
|
prelude::*,
|
||||||
storage::open_and_check_sqlite_file,
|
storage::open_and_check_sqlite_file,
|
||||||
sync::{
|
sync::{
|
||||||
|
@ -200,7 +200,7 @@ impl SyncServer for LocalServer {
|
||||||
|
|
||||||
// ensure it's a valid sqlite file, and a valid collection
|
// ensure it's a valid sqlite file, and a valid collection
|
||||||
open_and_check_sqlite_file(col_path)
|
open_and_check_sqlite_file(col_path)
|
||||||
.and_then(|_| open_and_check_collection(col_path))
|
.and_then(|_| CollectionBuilder::new(col_path).build())
|
||||||
.map_err(|check_err| match fs::remove_file(col_path) {
|
.map_err(|check_err| match fs::remove_file(col_path) {
|
||||||
Ok(_) => check_err,
|
Ok(_) => check_err,
|
||||||
Err(remove_err) => remove_err.into(),
|
Err(remove_err) => remove_err.into(),
|
||||||
|
|
Loading…
Reference in a new issue