diff --git a/rslib/build.rs b/rslib/build.rs index 41e768a09..9722afc41 100644 --- a/rslib/build.rs +++ b/rslib/build.rs @@ -104,7 +104,7 @@ pub enum BackendMethod { "#, ); for (idx, method) in service.methods.iter().enumerate() { - write!(buf, " {} = {},\n", method.proto_name, idx + 1).unwrap(); + writeln!(buf, " {} = {},", method.proto_name, idx + 1).unwrap(); } buf.push_str("}\n\n"); } diff --git a/rslib/src/findreplace.rs b/rslib/src/findreplace.rs index 32368cc19..43ee636d4 100644 --- a/rslib/src/findreplace.rs +++ b/rslib/src/findreplace.rs @@ -137,7 +137,7 @@ mod test { "Text".into() ] ); - let cnt = col.find_and_replace(nids.clone(), "BBB", "ccc", Some("Front".into()))?; + let cnt = col.find_and_replace(nids, "BBB", "ccc", Some("Front".into()))?; // still 2, as the caller is expected to provide only note ids that have // that field, and if we can't find the field we fall back on all fields assert_eq!(cnt, 2); diff --git a/rslib/src/media/changetracker.rs b/rslib/src/media/changetracker.rs index a68d9e4d7..9dac17115 100644 --- a/rslib/src/media/changetracker.rs +++ b/rslib/src/media/changetracker.rs @@ -297,7 +297,7 @@ mod test { entry, MediaEntry { fname: "file.jpg".into(), - sha1: Some(sha1_of_data("hello".as_bytes())), + sha1: Some(sha1_of_data(b"hello")), mtime: f1 .metadata()? .modified()? @@ -331,7 +331,7 @@ mod test { ctx.get_entry("file.jpg")?.unwrap(), MediaEntry { fname: "file.jpg".into(), - sha1: Some(sha1_of_data("hello1".as_bytes())), + sha1: Some(sha1_of_data(b"hello1")), mtime: f1 .metadata()? .modified()? diff --git a/rslib/src/media/check.rs b/rslib/src/media/check.rs index 1c2a487e1..6866c4df5 100644 --- a/rslib/src/media/check.rs +++ b/rslib/src/media/check.rs @@ -519,7 +519,7 @@ fn extract_latex_refs(note: &Note, seen_files: &mut HashSet, svg: bool) #[cfg(test)] pub(crate) mod test { - pub(crate) const MEDIACHECK_ANKI2: &'static [u8] = + pub(crate) const MEDIACHECK_ANKI2: &[u8] = include_bytes!("../../tests/support/mediacheck.anki2"); use crate::collection::{open_collection, Collection}; diff --git a/rslib/src/media/database.rs b/rslib/src/media/database.rs index 1ce500567..03cd8def9 100644 --- a/rslib/src/media/database.rs +++ b/rslib/src/media/database.rs @@ -279,7 +279,7 @@ mod test { assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry); // update it - entry.sha1 = Some(sha1_of_data("hello".as_bytes())); + entry.sha1 = Some(sha1_of_data(b"hello")); entry.mtime = 123; entry.sync_required = true; ctx.set_entry(&entry)?; diff --git a/rslib/src/media/files.rs b/rslib/src/media/files.rs index 563071537..763cdb47c 100644 --- a/rslib/src/media/files.rs +++ b/rslib/src/media/files.rs @@ -442,7 +442,7 @@ mod test { #[test] fn add_hash_suffix() { - let hash = sha1_of_data("hello".as_bytes()); + let hash = sha1_of_data(b"hello"); assert_eq!( add_hash_suffix_to_file_stem("test.jpg", &hash).as_str(), "test-aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d.jpg" @@ -455,22 +455,22 @@ mod test { let dpath = dir.path(); // no existing file case - let h1 = sha1_of_data("hello".as_bytes()); + let h1 = sha1_of_data(b"hello"); assert_eq!( - add_data_to_folder_uniquely(dpath, "test.mp3", "hello".as_bytes(), h1).unwrap(), + add_data_to_folder_uniquely(dpath, "test.mp3", b"hello", h1).unwrap(), "test.mp3" ); // same contents case assert_eq!( - add_data_to_folder_uniquely(dpath, "test.mp3", "hello".as_bytes(), h1).unwrap(), + add_data_to_folder_uniquely(dpath, "test.mp3", b"hello", h1).unwrap(), "test.mp3" ); // different contents - let h2 = sha1_of_data("hello1".as_bytes()); + let h2 = sha1_of_data(b"hello1"); assert_eq!( - add_data_to_folder_uniquely(dpath, "test.mp3", "hello1".as_bytes(), h2).unwrap(), + add_data_to_folder_uniquely(dpath, "test.mp3", b"hello1", h2).unwrap(), "test-88fdd585121a4ccb3d1540527aee53a77c77abb8.mp3" ); diff --git a/rslib/src/sched/cutoff.rs b/rslib/src/sched/cutoff.rs index 8483eabb1..5938984d5 100644 --- a/rslib/src/sched/cutoff.rs +++ b/rslib/src/sched/cutoff.rs @@ -378,16 +378,16 @@ mod test { fn legacy_creation_stamp() { let offset = fixed_offset_from_minutes(AEST_MINS_WEST); - let now = TimestampSecs(offset.ymd(2020, 05, 10).and_hms(9, 30, 30).timestamp()); + let now = TimestampSecs(offset.ymd(2020, 5, 10).and_hms(9, 30, 30).timestamp()); assert_eq!( v1_creation_date_inner(now, AEST_MINS_WEST), - offset.ymd(2020, 05, 10).and_hms(4, 0, 0).timestamp() + offset.ymd(2020, 5, 10).and_hms(4, 0, 0).timestamp() ); - let now = TimestampSecs(offset.ymd(2020, 05, 10).and_hms(1, 30, 30).timestamp()); + let now = TimestampSecs(offset.ymd(2020, 5, 10).and_hms(1, 30, 30).timestamp()); assert_eq!( v1_creation_date_inner(now, AEST_MINS_WEST), - offset.ymd(2020, 05, 9).and_hms(4, 0, 0).timestamp() + offset.ymd(2020, 5, 9).and_hms(4, 0, 0).timestamp() ); let crt = v1_creation_date_inner(now, AEST_MINS_WEST); diff --git a/rslib/src/sync/mod.rs b/rslib/src/sync/mod.rs index cfe31a802..b2933e22d 100644 --- a/rslib/src/sync/mod.rs +++ b/rslib/src/sync/mod.rs @@ -1194,14 +1194,7 @@ mod test { fn open_col(ctx: &TestContext, fname: &str) -> Result { let path = ctx.dir.path().join(fname); let i18n = I18n::new(&[""], "", log::terminal()); - open_collection( - path, - "".into(), - "".into(), - false, - i18n.clone(), - log::terminal(), - ) + open_collection(path, "".into(), "".into(), false, i18n, log::terminal()) } async fn upload_download(ctx: &mut TestContext) -> Result<()> { diff --git a/rslib/src/template.rs b/rslib/src/template.rs index f1b7c233f..34663d650 100644 --- a/rslib/src/template.rs +++ b/rslib/src/template.rs @@ -1141,7 +1141,7 @@ mod test { if let FN::Text { ref text } = qnodes[1] { assert!(text.contains("card is blank")); } else { - assert!(false); + unreachable!(); } } } diff --git a/rspy/src/lib.rs b/rspy/src/lib.rs index 50261cbd7..bda030380 100644 --- a/rspy/src/lib.rs +++ b/rspy/src/lib.rs @@ -139,7 +139,7 @@ impl Backend { let out_obj = PyBytes::new(py, &out_bytes); out_obj.into() }) - .map_err(|err_bytes| BackendError::py_err(err_bytes)) + .map_err(BackendError::py_err) } /// This takes and returns JSON, due to Python's slow protobuf @@ -149,7 +149,7 @@ impl Backend { let out_res = py.allow_threads(move || { self.backend .run_db_command_bytes(in_bytes) - .map_err(|err_bytes| BackendError::py_err(err_bytes)) + .map_err(BackendError::py_err) }); let out_bytes = out_res?; let out_obj = PyBytes::new(py, &out_bytes);