diff --git a/rslib/src/backend/collection.rs b/rslib/src/backend/collection.rs index 206efe9dc..a92ffa11b 100644 --- a/rslib/src/backend/collection.rs +++ b/rslib/src/backend/collection.rs @@ -8,8 +8,8 @@ pub(super) use crate::backend_proto::collection_service::Service as CollectionSe use crate::{ backend::progress::progress_to_proto, backend_proto as pb, - collection::open_collection, - log::{self, default_logger}, + collection::CollectionBuilder, + log::{self}, prelude::*, }; @@ -30,25 +30,16 @@ impl CollectionService for Backend { return Err(AnkiError::CollectionAlreadyOpen); } - let mut path = input.collection_path.clone(); - path.push_str(".log"); + let mut builder = CollectionBuilder::new(input.collection_path); + builder + .set_media_paths(input.media_folder_path, input.media_db_path) + .set_server(self.server) + .set_tr(self.tr.clone()); + if !input.log_path.is_empty() { + builder.set_log_file(&input.log_path)?; + } - let log_path = match input.log_path.as_str() { - "" => None, - path => Some(path), - }; - let logger = default_logger(log_path)?; - - let new_col = open_collection( - input.collection_path, - input.media_folder_path, - input.media_db_path, - self.server, - self.tr.clone(), - logger, - )?; - - *col = Some(new_col); + *col = Some(builder.build()?); Ok(().into()) } diff --git a/rslib/src/backend/sync/mod.rs b/rslib/src/backend/sync/mod.rs index ff72185d4..7c63e1eb5 100644 --- a/rslib/src/backend/sync/mod.rs +++ b/rslib/src/backend/sync/mod.rs @@ -12,7 +12,6 @@ use super::{progress::AbortHandleSlot, Backend}; pub(super) use crate::backend_proto::sync_service::Service as SyncService; use crate::{ backend_proto as pb, - collection::open_collection, media::MediaManager, prelude::*, sync::{ @@ -329,10 +328,7 @@ impl Backend { let (_guard, abort_reg) = self.sync_abort_handle()?; - let col_path = col_inner.col_path.clone(); - let media_folder_path = col_inner.media_folder.clone(); - let media_db_path = col_inner.media_db.clone(); - let logger = col_inner.log.clone(); + let builder = col_inner.as_builder(); let mut handler = self.new_progress_handler(); let progress_fn = move |progress: FullSyncProgress, throttle: bool| { @@ -350,14 +346,7 @@ impl Backend { }; // ensure re-opened regardless of outcome - col.replace(open_collection( - col_path, - media_folder_path, - media_db_path, - self.server, - self.tr.clone(), - logger, - )?); + col.replace(builder.build()?); match result { Ok(sync_result) => { diff --git a/rslib/src/collection/mod.rs b/rslib/src/collection/mod.rs index b3eba5e63..e991cea87 100644 --- a/rslib/src/collection/mod.rs +++ b/rslib/src/collection/mod.rs @@ -5,18 +5,14 @@ pub(crate) mod timestamps; mod transact; pub(crate) mod undo; -use std::{ - collections::HashMap, - path::{Path, PathBuf}, - sync::Arc, -}; +use std::{collections::HashMap, path::PathBuf, sync::Arc}; use crate::{ browser_table, decks::{Deck, DeckId}, error::Result, i18n::I18n, - log::Logger, + log::{default_logger, Logger}, notetype::{Notetype, NotetypeId}, scheduler::{queue::CardQueues, SchedulerInfo}, storage::SqliteStorage, @@ -24,58 +20,88 @@ use crate::{ undo::UndoManager, }; -pub fn open_collection>( - path: P, - media_folder: P, - media_db: P, - server: bool, - tr: I18n, - log: Logger, -) -> Result { - let col_path = path.into(); - let storage = SqliteStorage::open_or_create(&col_path, &tr, server)?; - - let col = Collection { - storage, - col_path, - media_folder: media_folder.into(), - media_db: media_db.into(), - tr, - log, - server, - state: CollectionState::default(), - }; - - Ok(col) +#[derive(Default)] +pub struct CollectionBuilder { + collection_path: Option, + media_folder: Option, + media_db: Option, + server: Option, + tr: Option, + log: Option, } -// We need to make a Builder for Collection in the future. +impl CollectionBuilder { + /// Create a new builder with the provided collection path. + /// If an in-memory database is desired, used ::default() instead. + pub fn new(col_path: impl Into) -> Self { + let mut builder = Self::default(); + builder.set_collection_path(col_path); + builder + } + + pub fn build(&self) -> Result { + let col_path = self + .collection_path + .clone() + .unwrap_or_else(|| PathBuf::from(":memory:")); + let tr = self.tr.clone().unwrap_or_else(I18n::template_only); + let server = self.server.unwrap_or_default(); + let media_folder = self.media_folder.clone().unwrap_or_default(); + let media_db = self.media_db.clone().unwrap_or_default(); + let log = self.log.clone().unwrap_or_else(crate::log::terminal); + + let storage = SqliteStorage::open_or_create(&col_path, &tr, server)?; + let col = Collection { + storage, + col_path, + media_folder, + media_db, + tr, + log, + server, + state: CollectionState::default(), + }; + + Ok(col) + } + + pub fn set_collection_path>(&mut self, collection: P) -> &mut Self { + self.collection_path = Some(collection.into()); + self + } + + pub fn set_media_paths>(&mut self, media_folder: P, media_db: P) -> &mut Self { + self.media_folder = Some(media_folder.into()); + self.media_db = Some(media_db.into()); + self + } + + pub fn set_server(&mut self, server: bool) -> &mut Self { + self.server = Some(server); + self + } + + pub fn set_tr(&mut self, tr: I18n) -> &mut Self { + self.tr = Some(tr); + self + } + + /// Directly set the logger. + pub fn set_logger(&mut self, log: Logger) -> &mut Self { + self.log = Some(log); + self + } + + /// Log to the provided file. + pub fn set_log_file(&mut self, log_file: &str) -> Result<&mut Self, std::io::Error> { + self.set_logger(default_logger(Some(log_file))?); + Ok(self) + } +} #[cfg(test)] pub fn open_test_collection() -> Collection { - use crate::config::SchedulerVersion; - let mut col = open_test_collection_with_server(false); - // our unit tests assume v2 is the default, but at the time of writing v1 - // is still the default - col.set_scheduler_version_config_key(SchedulerVersion::V2) - .unwrap(); - col -} - -#[cfg(test)] -pub fn open_test_collection_with_server(server: bool) -> Collection { - use crate::log; - let tr = I18n::template_only(); - open_collection(":memory:", "", "", server, tr, log::terminal()).unwrap() -} - -/// Helper used by syncing to make sure the file can be opened. This should be replaced -/// with a builder in the future. -pub(crate) fn open_and_check_collection(col_path: &Path) -> Result { - use crate::log; - let tr = I18n::template_only(); - let empty = Path::new(""); - open_collection(col_path, empty, empty, true, tr, log::terminal()) + CollectionBuilder::default().build().unwrap() } #[derive(Debug, Default)] @@ -104,6 +130,16 @@ pub struct Collection { } impl Collection { + pub fn as_builder(&self) -> CollectionBuilder { + let mut builder = CollectionBuilder::new(&self.col_path); + builder + .set_media_paths(self.media_folder.clone(), self.media_db.clone()) + .set_server(self.server) + .set_tr(self.tr.clone()) + .set_logger(self.log.clone()); + builder + } + pub(crate) fn close(self, downgrade: bool) -> Result<()> { self.storage.close(downgrade) } diff --git a/rslib/src/config/schema11.rs b/rslib/src/config/schema11.rs index 90eac637e..cb3c9f451 100644 --- a/rslib/src/config/schema11.rs +++ b/rslib/src/config/schema11.rs @@ -5,8 +5,8 @@ use serde_json::json; /// These items are expected to exist in schema 11. When adding /// new config variables, you do not need to add them here - -/// just create an accessor function below with an appropriate -/// default on missing/invalid values instead. +/// just create an accessor function in one of the config/*.rs files, +/// with an appropriate default for missing/invalid values instead. pub(crate) fn schema11_config_as_string(creation_offset: Option) -> String { let obj = json!({ "activeDecks": [1], diff --git a/rslib/src/media/check.rs b/rslib/src/media/check.rs index d94203721..9f6d6554c 100644 --- a/rslib/src/media/check.rs +++ b/rslib/src/media/check.rs @@ -8,6 +8,8 @@ use std::{ path::Path, }; +use anki_i18n::without_unicode_isolation; + use crate::{ collection::Collection, error::{AnkiError, DbErrorKind, Result}, @@ -25,8 +27,6 @@ use crate::{ text::{extract_media_refs, normalize_to_nfc, MediaRef, REMOTE_FILENAME}, }; -use anki_i18n::without_unicode_isolation; - #[derive(Debug, PartialEq, Clone)] pub struct MediaCheckOutput { pub unused: Vec, @@ -518,10 +518,8 @@ pub(crate) mod test { use super::normalize_and_maybe_rename_files; use crate::{ - collection::{open_collection, Collection}, + collection::{Collection, CollectionBuilder}, error::Result, - i18n::I18n, - log, media::{ check::{MediaCheckOutput, MediaChecker}, files::trash_folder, @@ -531,18 +529,16 @@ pub(crate) mod test { fn common_setup() -> Result<(TempDir, MediaManager, Collection)> { let dir = tempdir()?; - let media_dir = dir.path().join("media"); - fs::create_dir(&media_dir)?; + let media_folder = dir.path().join("media"); + fs::create_dir(&media_folder)?; let media_db = dir.path().join("media.db"); let col_path = dir.path().join("col.anki2"); fs::write(&col_path, MEDIACHECK_ANKI2)?; - let mgr = MediaManager::new(&media_dir, media_db.clone())?; - - let log = log::terminal(); - let tr = I18n::template_only(); - - let col = open_collection(col_path, media_dir, media_db, false, tr, log)?; + let mgr = MediaManager::new(&media_folder, media_db.clone())?; + let col = CollectionBuilder::new(col_path) + .set_media_paths(media_folder, media_db) + .build()?; Ok((dir, mgr, col)) } diff --git a/rslib/src/search/sqlwriter.rs b/rslib/src/search/sqlwriter.rs index 50c0d4827..b9165abea 100644 --- a/rslib/src/search/sqlwriter.rs +++ b/rslib/src/search/sqlwriter.rs @@ -610,16 +610,12 @@ impl SearchNode { #[cfg(test)] mod test { - use std::{fs, path::PathBuf}; + use std::fs; use tempfile::tempdir; use super::{super::parser::parse, *}; - use crate::{ - collection::{open_collection, Collection}, - i18n::I18n, - log, - }; + use crate::collection::{Collection, CollectionBuilder}; // shortcut fn s(req: &mut Collection, search: &str) -> (String, Vec) { @@ -638,17 +634,7 @@ mod test { let col_path = dir.path().join("col.anki2"); fs::write(&col_path, MEDIACHECK_ANKI2).unwrap(); - let tr = I18n::template_only(); - let mut col = open_collection( - &col_path, - &PathBuf::new(), - &PathBuf::new(), - false, - tr, - log::terminal(), - ) - .unwrap(); - + let mut col = CollectionBuilder::new(col_path).build().unwrap(); let ctx = &mut col; // unqualified search diff --git a/rslib/src/sync/mod.rs b/rslib/src/sync/mod.rs index e6c538326..8d77fa1ed 100644 --- a/rslib/src/sync/mod.rs +++ b/rslib/src/sync/mod.rs @@ -1199,7 +1199,7 @@ mod test { use super::{server::LocalServer, *}; use crate::{ - collection::open_collection, deckconfig::DeckConfig, decks::DeckKind, i18n::I18n, log, + collection::CollectionBuilder, deckconfig::DeckConfig, decks::DeckKind, notetype::all_stock_notetypes, search::SortMode, }; @@ -1225,22 +1225,20 @@ mod test { rt.block_on(regular_sync(&ctx)) } - fn open_col(dir: &Path, server: bool, fname: &str) -> Result { - let path = dir.join(fname); - let tr = I18n::template_only(); - open_collection(path, "".into(), "".into(), server, tr, log::terminal()) - } - #[async_trait(?Send)] trait TestContext { fn server(&self) -> Box; fn col1(&self) -> Collection { - open_col(self.dir(), false, "col1.anki2").unwrap() + CollectionBuilder::new(self.dir().join("col1.anki2")) + .build() + .unwrap() } fn col2(&self) -> Collection { - open_col(self.dir(), false, "col2.anki2").unwrap() + CollectionBuilder::new(self.dir().join("col2.anki2")) + .build() + .unwrap() } fn dir(&self) -> &Path { @@ -1274,7 +1272,11 @@ mod test { #[async_trait(?Send)] impl TestContext for LocalTestContext { fn server(&self) -> Box { - let col = open_col(self.dir(), true, "server.anki2").unwrap(); + let col_path = self.dir().join("server.anki2"); + let col = CollectionBuilder::new(col_path) + .set_server(true) + .build() + .unwrap(); Box::new(LocalServer::new(col)) } } diff --git a/rslib/src/sync/server.rs b/rslib/src/sync/server.rs index 8d680618c..98ca2c15d 100644 --- a/rslib/src/sync/server.rs +++ b/rslib/src/sync/server.rs @@ -8,7 +8,7 @@ use tempfile::NamedTempFile; use super::ChunkableIds; use crate::{ - collection::open_and_check_collection, + collection::CollectionBuilder, prelude::*, storage::open_and_check_sqlite_file, sync::{ @@ -200,7 +200,7 @@ impl SyncServer for LocalServer { // ensure it's a valid sqlite file, and a valid collection open_and_check_sqlite_file(col_path) - .and_then(|_| open_and_check_collection(col_path)) + .and_then(|_| CollectionBuilder::new(col_path).build()) .map_err(|check_err| match fs::remove_file(col_path) { Ok(_) => check_err, Err(remove_err) => remove_err.into(),