lazy_static → once_cell → stabilized versions (#3447)

* Anki: Replace lazy_static with once_cell

Unify to once_cell, lazy_static's replacement. The latter in unmaintained.

* Anki: Replace once_cell with stabilized LazyCell / LazyLock as far as possible

Since 1.80: https://github.com/rust-lang/rust/issues/109736 and https://github.com/rust-lang/rust/pull/98165

Non-Thread-Safe Lazy → std::cell::LazyCell https://doc.rust-lang.org/nightly/std/cell/struct.LazyCell.html

Thread-safe SyncLazy → std::sync::LazyLock https://doc.rust-lang.org/nightly/std/sync/struct.LazyLock.html

The compiler accepted LazyCell only in minilints.

The final use in rslib/src/log.rs couldn't be replaced since get_or_try_init has not yet been standardized: https://github.com/rust-lang/rust/issues/109737

* Declare correct MSRV (dae)

Some of our deps require newer Rust versions, so this was misleading.

Updating the MSRV also allows us to use .inspect() on Option now
This commit is contained in:
a.r 2024-09-30 15:35:06 +02:00 committed by GitHub
parent e2124cd790
commit d9969a9f4f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
39 changed files with 200 additions and 220 deletions

6
Cargo.lock generated
View file

@ -134,7 +134,6 @@ dependencies = [
"id_tree", "id_tree",
"inflections", "inflections",
"itertools 0.13.0", "itertools 0.13.0",
"lazy_static",
"nom", "nom",
"num_cpus", "num_cpus",
"num_enum", "num_enum",
@ -249,7 +248,6 @@ dependencies = [
"camino", "camino",
"inflections", "inflections",
"itertools 0.13.0", "itertools 0.13.0",
"once_cell",
"prost-reflect", "prost-reflect",
"prost-types", "prost-types",
"regex", "regex",
@ -1891,7 +1889,6 @@ dependencies = [
"clap", "clap",
"fluent-syntax", "fluent-syntax",
"itertools 0.13.0", "itertools 0.13.0",
"lazy_static",
"regex", "regex",
"serde_json", "serde_json",
"snafu", "snafu",
@ -3167,7 +3164,6 @@ dependencies = [
"anki", "anki",
"futures", "futures",
"itertools 0.13.0", "itertools 0.13.0",
"lazy_static",
"linkcheck", "linkcheck",
"regex", "regex",
"reqwest 0.12.7", "reqwest 0.12.7",
@ -3445,7 +3441,6 @@ dependencies = [
"anki_process", "anki_process",
"anyhow", "anyhow",
"camino", "camino",
"once_cell",
"walkdir", "walkdir",
"which", "which",
] ]
@ -3609,7 +3604,6 @@ dependencies = [
"dunce", "dunce",
"globset", "globset",
"itertools 0.13.0", "itertools 0.13.0",
"lazy_static",
"maplit", "maplit",
"num_cpus", "num_cpus",
"walkdir", "walkdir",

View file

@ -3,7 +3,7 @@ version = "0.0.0"
authors = ["Ankitects Pty Ltd and contributors <https://help.ankiweb.net>"] authors = ["Ankitects Pty Ltd and contributors <https://help.ankiweb.net>"]
edition = "2021" edition = "2021"
license = "AGPL-3.0-or-later" license = "AGPL-3.0-or-later"
rust-version = "1.65" rust-version = "1.80"
[workspace] [workspace]
members = [ members = [

View file

@ -14,7 +14,6 @@ camino.workspace = true
dunce.workspace = true dunce.workspace = true
globset.workspace = true globset.workspace = true
itertools.workspace = true itertools.workspace = true
lazy_static.workspace = true
maplit.workspace = true maplit.workspace = true
num_cpus.workspace = true num_cpus.workspace = true
walkdir.workspace = true walkdir.workspace = true

View file

@ -3,6 +3,7 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::fmt::Display; use std::fmt::Display;
use std::sync::LazyLock;
use camino::Utf8PathBuf; use camino::Utf8PathBuf;
@ -118,9 +119,7 @@ pub struct Glob {
pub exclude: Option<String>, pub exclude: Option<String>,
} }
lazy_static::lazy_static! { static CACHED_FILES: LazyLock<Vec<Utf8PathBuf>> = LazyLock::new(cache_files);
static ref CACHED_FILES: Vec<Utf8PathBuf> = cache_files();
}
/// Walking the source tree once instead of for each glob yields ~4x speed /// Walking the source tree once instead of for each glob yields ~4x speed
/// improvements. /// improvements.

View file

@ -16,7 +16,6 @@ camino.workspace = true
clap.workspace = true clap.workspace = true
fluent-syntax.workspace = true fluent-syntax.workspace = true
itertools.workspace = true itertools.workspace = true
lazy_static.workspace = true
regex.workspace = true regex.workspace = true
serde_json.workspace = true serde_json.workspace = true
snafu.workspace = true snafu.workspace = true

View file

@ -6,6 +6,7 @@ use std::fs;
use std::io::BufReader; use std::io::BufReader;
use std::iter::FromIterator; use std::iter::FromIterator;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::LazyLock;
use anki_io::create_file; use anki_io::create_file;
use anyhow::Context; use anyhow::Context;
@ -14,7 +15,6 @@ use clap::Args;
use fluent_syntax::ast; use fluent_syntax::ast;
use fluent_syntax::ast::Resource; use fluent_syntax::ast::Resource;
use fluent_syntax::parser; use fluent_syntax::parser;
use lazy_static::lazy_static;
use regex::Regex; use regex::Regex;
use walkdir::DirEntry; use walkdir::DirEntry;
use walkdir::WalkDir; use walkdir::WalkDir;
@ -144,9 +144,8 @@ fn extract_nested_messages_and_terms(
ftl_roots: &[impl AsRef<str>], ftl_roots: &[impl AsRef<str>],
used_ftls: &mut HashSet<String>, used_ftls: &mut HashSet<String>,
) { ) {
lazy_static! { static REFERENCE: LazyLock<Regex> =
static ref REFERENCE: Regex = Regex::new(r"\{\s*-?([-0-9a-z]+)\s*\}").unwrap(); LazyLock::new(|| Regex::new(r"\{\s*-?([-0-9a-z]+)\s*\}").unwrap());
}
for_files_with_ending(ftl_roots, ".ftl", |entry| { for_files_with_ending(ftl_roots, ".ftl", |entry| {
let source = fs::read_to_string(entry.path()).expect("file not readable"); let source = fs::read_to_string(entry.path()).expect("file not readable");
for caps in REFERENCE.captures_iter(&source) { for caps in REFERENCE.captures_iter(&source) {
@ -198,11 +197,12 @@ fn entry_use_check(used_ftls: &HashSet<String>) -> impl Fn(&ast::Entry<&str>) ->
} }
fn extract_references_from_file(refs: &mut HashSet<String>, entry: &DirEntry) { fn extract_references_from_file(refs: &mut HashSet<String>, entry: &DirEntry) {
lazy_static! { static SNAKECASE_TR: LazyLock<Regex> =
static ref SNAKECASE_TR: Regex = Regex::new(r"\Wtr\s*\.([0-9a-z_]+)\W").unwrap(); LazyLock::new(|| Regex::new(r"\Wtr\s*\.([0-9a-z_]+)\W").unwrap());
static ref CAMELCASE_TR: Regex = Regex::new(r"\Wtr2?\.([0-9A-Za-z_]+)\W").unwrap(); static CAMELCASE_TR: LazyLock<Regex> =
static ref DESIGNER_STYLE_TR: Regex = Regex::new(r"<string>([0-9a-z_]+)</string>").unwrap(); LazyLock::new(|| Regex::new(r"\Wtr2?\.([0-9A-Za-z_]+)\W").unwrap());
} static DESIGNER_STYLE_TR: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"<string>([0-9a-z_]+)</string>").unwrap());
let file_name = entry.file_name().to_str().expect("non-unicode filename"); let file_name = entry.file_name().to_str().expect("non-unicode filename");

View file

@ -69,7 +69,6 @@ htmlescape.workspace = true
hyper.workspace = true hyper.workspace = true
id_tree.workspace = true id_tree.workspace = true
itertools.workspace = true itertools.workspace = true
lazy_static.workspace = true
nom.workspace = true nom.workspace = true
num_cpus.workspace = true num_cpus.workspace = true
num_enum.workspace = true num_enum.workspace = true

View file

@ -11,7 +11,6 @@ rust-version.workspace = true
anki.workspace = true anki.workspace = true
futures.workspace = true futures.workspace = true
itertools.workspace = true itertools.workspace = true
lazy_static.workspace = true
linkcheck.workspace = true linkcheck.workspace = true
regex.workspace = true regex.workspace = true
reqwest.workspace = true reqwest.workspace = true

View file

@ -6,6 +6,7 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::env; use std::env;
use std::iter; use std::iter;
use std::sync::LazyLock;
use std::time::Duration; use std::time::Duration;
use anki::links::help_page_link_suffix; use anki::links::help_page_link_suffix;
@ -13,7 +14,6 @@ use anki::links::help_page_to_link;
use anki::links::HelpPage; use anki::links::HelpPage;
use futures::StreamExt; use futures::StreamExt;
use itertools::Itertools; use itertools::Itertools;
use lazy_static::lazy_static;
use linkcheck::validation::check_web; use linkcheck::validation::check_web;
use linkcheck::validation::Context; use linkcheck::validation::Context;
use linkcheck::validation::Reason; use linkcheck::validation::Reason;
@ -70,9 +70,8 @@ impl From<&'static str> for CheckableUrl {
} }
fn ts_help_pages() -> impl Iterator<Item = &'static str> { fn ts_help_pages() -> impl Iterator<Item = &'static str> {
lazy_static! { static QUOTED_URL: LazyLock<Regex> = LazyLock::new(|| Regex::new("\"(http.+)\"").unwrap());
static ref QUOTED_URL: Regex = Regex::new("\"(http.+)\"").unwrap();
}
QUOTED_URL QUOTED_URL
.captures_iter(include_str!("../../../ts/lib/tslib/help-page.ts")) .captures_iter(include_str!("../../../ts/lib/tslib/help-page.ts"))
.map(|caps| caps.get(1).unwrap().as_str()) .map(|caps| caps.get(1).unwrap().as_str())

View file

@ -15,7 +15,6 @@ anyhow.workspace = true
camino.workspace = true camino.workspace = true
inflections.workspace = true inflections.workspace = true
itertools.workspace = true itertools.workspace = true
once_cell.workspace = true
prost-reflect.workspace = true prost-reflect.workspace = true
prost-types.workspace = true prost-types.workspace = true
regex.workspace = true regex.workspace = true

View file

@ -7,6 +7,7 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::env; use std::env;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::LazyLock;
use anki_io::read_to_string; use anki_io::read_to_string;
use anki_io::write_file_if_changed; use anki_io::write_file_if_changed;
@ -16,7 +17,6 @@ use camino::Utf8Path;
use inflections::Inflect; use inflections::Inflect;
use itertools::Either; use itertools::Either;
use itertools::Itertools; use itertools::Itertools;
use once_cell::sync::Lazy;
use prost_reflect::DescriptorPool; use prost_reflect::DescriptorPool;
use prost_reflect::MessageDescriptor; use prost_reflect::MessageDescriptor;
use prost_reflect::MethodDescriptor; use prost_reflect::MethodDescriptor;
@ -238,8 +238,8 @@ pub fn add_must_use_annotations_to_file<E>(path: &Utf8Path, is_empty: E) -> Resu
where where
E: Fn(&Utf8Path, &str) -> bool, E: Fn(&Utf8Path, &str) -> bool,
{ {
static MESSAGE_OR_ENUM_RE: Lazy<Regex> = static MESSAGE_OR_ENUM_RE: LazyLock<Regex> =
Lazy::new(|| Regex::new(r"pub (struct|enum) ([[:alnum:]]+?)\s").unwrap()); LazyLock::new(|| Regex::new(r"pub (struct|enum) ([[:alnum:]]+?)\s").unwrap());
let contents = read_to_string(path)?; let contents = read_to_string(path)?;
let contents = MESSAGE_OR_ENUM_RE.replace_all(&contents, |caps: &Captures| { let contents = MESSAGE_OR_ENUM_RE.replace_all(&contents, |caps: &Captures| {
let is_enum = caps.get(1).unwrap().as_str() == "enum"; let is_enum = caps.get(1).unwrap().as_str() == "enum";

View file

@ -5,6 +5,7 @@ use std::collections::HashMap;
use std::mem::size_of; use std::mem::size_of;
use std::sync::atomic::AtomicI32; use std::sync::atomic::AtomicI32;
use std::sync::atomic::Ordering; use std::sync::atomic::Ordering;
use std::sync::LazyLock;
use std::sync::Mutex; use std::sync::Mutex;
use anki_proto::ankidroid::sql_value::Data; use anki_proto::ankidroid::sql_value::Data;
@ -16,7 +17,6 @@ use itertools::FoldWhile;
use itertools::FoldWhile::Continue; use itertools::FoldWhile::Continue;
use itertools::FoldWhile::Done; use itertools::FoldWhile::Done;
use itertools::Itertools; use itertools::Itertools;
use lazy_static::lazy_static;
use rusqlite::ToSql; use rusqlite::ToSql;
use serde::Deserialize; use serde::Deserialize;
@ -110,10 +110,8 @@ fn select_slice_of_size<'a>(
type SequenceNumber = i32; type SequenceNumber = i32;
lazy_static! { static HASHMAP: LazyLock<Mutex<HashMap<CollectionId, HashMap<SequenceNumber, DbResponse>>>> =
static ref HASHMAP: Mutex<HashMap<CollectionId, HashMap<SequenceNumber, DbResponse>>> = LazyLock::new(|| Mutex::new(HashMap::new()));
Mutex::new(HashMap::new());
}
pub(crate) fn flush_single_result(col: &Collection, sequence_number: i32) { pub(crate) fn flush_single_result(col: &Collection, sequence_number: i32) {
HASHMAP HASHMAP
@ -244,10 +242,9 @@ pub(crate) fn next_sequence_number() -> i32 {
SEQUENCE_NUMBER.fetch_add(1, Ordering::SeqCst) SEQUENCE_NUMBER.fetch_add(1, Ordering::SeqCst)
} }
lazy_static! { // same as we get from
// same as we get from io.requery.android.database.CursorWindow.sCursorWindowSize // io.requery.android.database.CursorWindow.sCursorWindowSize
static ref DB_COMMAND_PAGE_SIZE: Mutex<usize> = Mutex::new(1024 * 1024 * 2); static DB_COMMAND_PAGE_SIZE: LazyLock<Mutex<usize>> = LazyLock::new(|| Mutex::new(1024 * 1024 * 2));
}
pub(crate) fn set_max_page_size(size: usize) { pub(crate) fn set_max_page_size(size: usize) {
let mut state = DB_COMMAND_PAGE_SIZE.lock().expect("Could not lock mutex"); let mut state = DB_COMMAND_PAGE_SIZE.lock().expect("Could not lock mutex");

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use lazy_static::lazy_static; use std::sync::LazyLock;
use regex::Regex; use regex::Regex;
use reqwest::Client; use reqwest::Client;
use serde; use serde;
@ -31,11 +32,9 @@ pub async fn ankihub_login<S: Into<String>>(
client: Client, client: Client,
) -> Result<LoginResponse> { ) -> Result<LoginResponse> {
let client = HttpAnkiHubClient::new("", client); let client = HttpAnkiHubClient::new("", client);
lazy_static! { static EMAIL_RE: LazyLock<Regex> = LazyLock::new(|| {
static ref EMAIL_RE: Regex = Regex::new(r"^[a-zA-Z0-9.!#$%&*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$").unwrap()
Regex::new(r"^[a-zA-Z0-9.!#$%&*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$") });
.unwrap();
}
let mut request = LoginRequest { let mut request = LoginRequest {
username: None, username: None,
email: None, email: None,

View file

@ -19,10 +19,10 @@ use std::ops::Deref;
use std::result; use std::result;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex; use std::sync::Mutex;
use std::sync::OnceLock;
use std::thread::JoinHandle; use std::thread::JoinHandle;
use futures::future::AbortHandle; use futures::future::AbortHandle;
use once_cell::sync::OnceCell;
use prost::Message; use prost::Message;
use reqwest::Client; use reqwest::Client;
use tokio::runtime; use tokio::runtime;
@ -53,7 +53,7 @@ pub struct BackendInner {
server: bool, server: bool,
sync_abort: Mutex<Option<AbortHandle>>, sync_abort: Mutex<Option<AbortHandle>>,
progress_state: Arc<Mutex<ProgressState>>, progress_state: Arc<Mutex<ProgressState>>,
runtime: OnceCell<Runtime>, runtime: OnceLock<Runtime>,
state: Mutex<BackendState>, state: Mutex<BackendState>,
backup_task: Mutex<Option<JoinHandle<Result<()>>>>, backup_task: Mutex<Option<JoinHandle<Result<()>>>>,
media_sync_task: Mutex<Option<JoinHandle<Result<()>>>>, media_sync_task: Mutex<Option<JoinHandle<Result<()>>>>,
@ -88,7 +88,7 @@ impl Backend {
want_abort: false, want_abort: false,
last_progress: None, last_progress: None,
})), })),
runtime: OnceCell::new(), runtime: OnceLock::new(),
state: Mutex::new(BackendState::default()), state: Mutex::new(BackendState::default()),
backup_task: Mutex::new(None), backup_task: Mutex::new(None),
media_sync_task: Mutex::new(None), media_sync_task: Mutex::new(None),

View file

@ -5,11 +5,11 @@ use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::fmt::Write; use std::fmt::Write;
use std::sync::LazyLock;
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion; use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion;
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape; use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape;
use htmlescape::encode_attribute; use htmlescape::encode_attribute;
use lazy_static::lazy_static;
use nom::branch::alt; use nom::branch::alt;
use nom::bytes::complete::tag; use nom::bytes::complete::tag;
use nom::bytes::complete::take_while; use nom::bytes::complete::take_while;
@ -24,16 +24,16 @@ use crate::latex::contains_latex;
use crate::template::RenderContext; use crate::template::RenderContext;
use crate::text::strip_html_preserving_entities; use crate::text::strip_html_preserving_entities;
lazy_static! { static MATHJAX: LazyLock<Regex> = LazyLock::new(|| {
static ref MATHJAX: Regex = Regex::new( Regex::new(
r"(?xsi) r"(?xsi)
(\\[(\[]) # 1 = mathjax opening tag (\\[(\[]) # 1 = mathjax opening tag
(.*?) # 2 = inner content (.*?) # 2 = inner content
(\\[])]) # 3 = mathjax closing tag (\\[])]) # 3 = mathjax closing tag
" ",
) )
.unwrap(); .unwrap()
} });
mod mathjax_caps { mod mathjax_caps {
pub const OPENING_TAG: usize = 1; pub const OPENING_TAG: usize = 1;

View file

@ -6,10 +6,10 @@ use std::collections::HashMap;
use std::fs::File; use std::fs::File;
use std::io::Write; use std::io::Write;
use std::sync::Arc; use std::sync::Arc;
use std::sync::LazyLock;
use anki_proto::import_export::ExportNoteCsvRequest; use anki_proto::import_export::ExportNoteCsvRequest;
use itertools::Itertools; use itertools::Itertools;
use lazy_static::lazy_static;
use regex::Regex; use regex::Regex;
use super::metadata::Delimiter; use super::metadata::Delimiter;
@ -156,23 +156,22 @@ fn field_to_record_field(field: &str, with_html: bool) -> Cow<str> {
} }
fn strip_redundant_sections(text: &str) -> Cow<str> { fn strip_redundant_sections(text: &str) -> Cow<str> {
lazy_static! { static RE: LazyLock<Regex> = LazyLock::new(|| {
static ref RE: Regex = Regex::new( Regex::new(
r"(?isx) r"(?isx)
<style>.*?</style> # style elements <style>.*?</style> # style elements
| |
\[\[type:[^]]+\]\] # type replacements \[\[type:[^]]+\]\] # type replacements
" ",
) )
.unwrap(); .unwrap()
} });
RE.replace_all(text.as_ref(), "") RE.replace_all(text.as_ref(), "")
} }
fn strip_answer_side_question(text: &str) -> Cow<str> { fn strip_answer_side_question(text: &str) -> Cow<str> {
lazy_static! { static RE: LazyLock<Regex> =
static ref RE: Regex = Regex::new(r"(?is)^.*<hr id=answer>\n*").unwrap(); LazyLock::new(|| Regex::new(r"(?is)^.*<hr id=answer>\n*").unwrap());
}
RE.replace_all(text.as_ref(), "") RE.replace_all(text.as_ref(), "")
} }

View file

@ -2,8 +2,8 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::borrow::Cow; use std::borrow::Cow;
use std::sync::LazyLock;
use lazy_static::lazy_static;
use regex::Captures; use regex::Captures;
use regex::Regex; use regex::Regex;
@ -11,26 +11,28 @@ use crate::cloze::expand_clozes_to_reveal_latex;
use crate::media::files::sha1_of_data; use crate::media::files::sha1_of_data;
use crate::text::strip_html; use crate::text::strip_html;
lazy_static! { pub(crate) static LATEX: LazyLock<Regex> = LazyLock::new(|| {
pub(crate) static ref LATEX: Regex = Regex::new( Regex::new(
r"(?xsi) r"(?xsi)
\[latex\](.+?)\[/latex\] # 1 - standard latex \[latex\](.+?)\[/latex\] # 1 - standard latex
| |
\[\$\](.+?)\[/\$\] # 2 - inline math \[\$\](.+?)\[/\$\] # 2 - inline math
| |
\[\$\$\](.+?)\[/\$\$\] # 3 - math environment \[\$\$\](.+?)\[/\$\$\] # 3 - math environment
" ",
) )
.unwrap(); .unwrap()
static ref LATEX_NEWLINES: Regex = Regex::new( });
static LATEX_NEWLINES: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r#"(?xi) r#"(?xi)
<br( /)?> <br( /)?>
| |
<div> <div>
"# "#,
) )
.unwrap(); .unwrap()
} });
pub(crate) fn contains_latex(text: &str) -> bool { pub(crate) fn contains_latex(text: &str) -> bool {
LATEX.is_match(text) LATEX.is_match(text)

View file

@ -52,9 +52,7 @@ pub mod undo;
pub mod version; pub mod version;
use std::env; use std::env;
use std::sync::LazyLock;
use lazy_static::lazy_static; pub(crate) static PYTHON_UNIT_TESTS: LazyLock<bool> =
LazyLock::new(|| env::var("ANKI_TEST_MODE").is_ok());
lazy_static! {
pub(crate) static ref PYTHON_UNIT_TESTS: bool = env::var("ANKI_TEST_MODE").is_ok();
}

View file

@ -6,11 +6,11 @@ use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::fs; use std::fs;
use std::io; use std::io;
use std::sync::LazyLock;
use anki_i18n::without_unicode_isolation; use anki_i18n::without_unicode_isolation;
use anki_io::write_file; use anki_io::write_file;
use data_encoding::BASE64; use data_encoding::BASE64;
use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use tracing::debug; use tracing::debug;
use tracing::info; use tracing::info;
@ -459,7 +459,7 @@ impl MediaChecker<'_> {
} }
fn maybe_extract_inline_image<'a>(&mut self, fname_decoded: &'a str) -> Result<Cow<'a, str>> { fn maybe_extract_inline_image<'a>(&mut self, fname_decoded: &'a str) -> Result<Cow<'a, str>> {
static BASE64_IMG: Lazy<Regex> = Lazy::new(|| { static BASE64_IMG: LazyLock<Regex> = LazyLock::new(|| {
Regex::new("(?i)^data:image/(jpg|jpeg|png|gif|webp|avif);base64,(.+)$").unwrap() Regex::new("(?i)^data:image/(jpg|jpeg|png|gif|webp|avif);base64,(.+)$").unwrap()
}); });

View file

@ -7,6 +7,7 @@ use std::io;
use std::io::Read; use std::io::Read;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::LazyLock;
use std::time; use std::time;
use anki_io::create_dir; use anki_io::create_dir;
@ -15,7 +16,6 @@ use anki_io::write_file;
use anki_io::FileIoError; use anki_io::FileIoError;
use anki_io::FileIoSnafu; use anki_io::FileIoSnafu;
use anki_io::FileOp; use anki_io::FileOp;
use lazy_static::lazy_static;
use regex::Regex; use regex::Regex;
use sha1::Digest; use sha1::Digest;
use sha1::Sha1; use sha1::Sha1;
@ -27,8 +27,8 @@ use unicode_normalization::UnicodeNormalization;
use crate::prelude::*; use crate::prelude::*;
use crate::sync::media::MAX_MEDIA_FILENAME_LENGTH; use crate::sync::media::MAX_MEDIA_FILENAME_LENGTH;
lazy_static! { static WINDOWS_DEVICE_NAME: LazyLock<Regex> = LazyLock::new(|| {
static ref WINDOWS_DEVICE_NAME: Regex = Regex::new( Regex::new(
r"(?xi) r"(?xi)
# starting with one of the following names # starting with one of the following names
^ ^
@ -39,30 +39,34 @@ lazy_static! {
( (
\. | $ \. | $
) )
" ",
) )
.unwrap(); .unwrap()
static ref WINDOWS_TRAILING_CHAR: Regex = Regex::new( });
static WINDOWS_TRAILING_CHAR: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r"(?x) r"(?x)
# filenames can't end with a space or period # filenames can't end with a space or period
( (
\x20 | \. \x20 | \.
) )
$ $
" ",
) )
.unwrap(); .unwrap()
pub(crate) static ref NONSYNCABLE_FILENAME: Regex = Regex::new( });
pub(crate) static NONSYNCABLE_FILENAME: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r#"(?xi) r#"(?xi)
^ ^
(:? (:?
thumbs.db | .ds_store thumbs.db | .ds_store
) )
$ $
"# "#,
) )
.unwrap(); .unwrap()
} });
/// True if character may cause problems on one or more platforms. /// True if character may cause problems on one or more platforms.
fn disallowed_char(char: char) -> bool { fn disallowed_char(char: char) -> bool {

View file

@ -4,9 +4,9 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::fmt::Write; use std::fmt::Write;
use std::ops::Deref; use std::ops::Deref;
use std::sync::LazyLock;
use anki_i18n::without_unicode_isolation; use anki_i18n::without_unicode_isolation;
use lazy_static::lazy_static;
use regex::Captures; use regex::Captures;
use regex::Match; use regex::Match;
use regex::Regex; use regex::Regex;
@ -24,9 +24,7 @@ struct Template<'a> {
front: bool, front: bool,
} }
lazy_static! { static FIELD_REPLACEMENT: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\{\{.+\}\}").unwrap());
static ref FIELD_REPLACEMENT: Regex = Regex::new(r"\{\{.+\}\}").unwrap();
}
impl Collection { impl Collection {
pub fn report_media_field_referencing_templates(&mut self, buf: &mut String) -> Result<()> { pub fn report_media_field_referencing_templates(&mut self, buf: &mut String) -> Result<()> {

View file

@ -20,6 +20,7 @@ use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::iter::FromIterator; use std::iter::FromIterator;
use std::sync::Arc; use std::sync::Arc;
use std::sync::LazyLock;
pub use anki_proto::notetypes::notetype::config::card_requirement::Kind as CardRequirementKind; pub use anki_proto::notetypes::notetype::config::card_requirement::Kind as CardRequirementKind;
pub use anki_proto::notetypes::notetype::config::CardRequirement; pub use anki_proto::notetypes::notetype::config::CardRequirement;
@ -33,7 +34,6 @@ pub use anki_proto::notetypes::Notetype as NotetypeProto;
pub(crate) use cardgen::AlreadyGeneratedCardInfo; pub(crate) use cardgen::AlreadyGeneratedCardInfo;
pub(crate) use cardgen::CardGenContext; pub(crate) use cardgen::CardGenContext;
pub use fields::NoteField; pub use fields::NoteField;
use lazy_static::lazy_static;
pub use notetypechange::ChangeNotetypeInput; pub use notetypechange::ChangeNotetypeInput;
pub use notetypechange::NotetypeChangeInfo; pub use notetypechange::NotetypeChangeInfo;
use regex::Regex; use regex::Regex;
@ -67,9 +67,9 @@ pub(crate) const DEFAULT_CSS: &str = include_str!("styling.css");
pub(crate) const DEFAULT_CLOZE_CSS: &str = include_str!("cloze_styling.css"); pub(crate) const DEFAULT_CLOZE_CSS: &str = include_str!("cloze_styling.css");
pub(crate) const DEFAULT_LATEX_HEADER: &str = include_str!("header.tex"); pub(crate) const DEFAULT_LATEX_HEADER: &str = include_str!("header.tex");
pub(crate) const DEFAULT_LATEX_FOOTER: &str = r"\end{document}"; pub(crate) const DEFAULT_LATEX_FOOTER: &str = r"\end{document}";
lazy_static! {
/// New entries must be handled in render.rs/add_special_fields(). /// New entries must be handled in render.rs/add_special_fields().
static ref SPECIAL_FIELDS: HashSet<&'static str> = HashSet::from_iter(vec![ static SPECIAL_FIELDS: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
HashSet::from_iter(vec![
"FrontSide", "FrontSide",
"Card", "Card",
"CardFlag", "CardFlag",
@ -77,8 +77,8 @@ lazy_static! {
"Subdeck", "Subdeck",
"Tags", "Tags",
"Type", "Type",
]); ])
} });
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct Notetype { pub struct Notetype {
@ -365,9 +365,8 @@ impl Notetype {
} }
fn ensure_template_fronts_unique(&self) -> Result<(), CardTypeError> { fn ensure_template_fronts_unique(&self) -> Result<(), CardTypeError> {
lazy_static! { static CARD_TAG: LazyLock<Regex> =
static ref CARD_TAG: Regex = Regex::new(r"\{\{\s*Card\s*\}\}").unwrap(); LazyLock::new(|| Regex::new(r"\{\{\s*Card\s*\}\}").unwrap());
}
let mut map = HashMap::new(); let mut map = HashMap::new();
for (index, card) in self.templates.iter().enumerate() { for (index, card) in self.templates.iter().enumerate() {

View file

@ -118,12 +118,11 @@ impl CardQueues {
/// Remove the head of the intraday learning queue, and update counts. /// Remove the head of the intraday learning queue, and update counts.
pub(super) fn pop_intraday_learning(&mut self) -> Option<LearningQueueEntry> { pub(super) fn pop_intraday_learning(&mut self) -> Option<LearningQueueEntry> {
self.intraday_learning.pop_front().map(|head| { self.intraday_learning.pop_front().inspect(|_head| {
// FIXME: // FIXME:
// under normal circumstances this should not go below 0, but currently // under normal circumstances this should not go below 0, but currently
// the Python unit tests answer learning cards before they're due // the Python unit tests answer learning cards before they're due
self.counts.learning = self.counts.learning.saturating_sub(1); self.counts.learning = self.counts.learning.saturating_sub(1);
head
}) })
} }

View file

@ -21,7 +21,7 @@ pub(crate) enum MainQueueEntryKind {
impl CardQueues { impl CardQueues {
/// Remove the head of the main queue, and update counts. /// Remove the head of the main queue, and update counts.
pub(super) fn pop_main(&mut self) -> Option<MainQueueEntry> { pub(super) fn pop_main(&mut self) -> Option<MainQueueEntry> {
self.main.pop_front().map(|head| { self.main.pop_front().inspect(|head| {
match head.kind { match head.kind {
MainQueueEntryKind::New => self.counts.new -= 1, MainQueueEntryKind::New => self.counts.new -= 1,
MainQueueEntryKind::Review => self.counts.review -= 1, MainQueueEntryKind::Review => self.counts.review -= 1,
@ -32,7 +32,6 @@ impl CardQueues {
self.counts.learning = self.counts.learning.saturating_sub(1) self.counts.learning = self.counts.learning.saturating_sub(1)
} }
}; };
head
}) })
} }

View file

@ -2,8 +2,8 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::LazyLock;
use lazy_static::lazy_static;
use rand::distributions::Distribution; use rand::distributions::Distribution;
use rand::distributions::Uniform; use rand::distributions::Uniform;
use regex::Regex; use regex::Regex;
@ -65,8 +65,8 @@ pub struct DueDateSpecifier {
} }
pub fn parse_due_date_str(s: &str) -> Result<DueDateSpecifier> { pub fn parse_due_date_str(s: &str) -> Result<DueDateSpecifier> {
lazy_static! { static RE: LazyLock<Regex> = LazyLock::new(|| {
static ref RE: Regex = Regex::new( Regex::new(
r"(?x)^ r"(?x)^
# a number # a number
(?P<min>\d+) (?P<min>\d+)
@ -78,10 +78,10 @@ pub fn parse_due_date_str(s: &str) -> Result<DueDateSpecifier> {
# optional exclamation mark # optional exclamation mark
(?P<bang>!)? (?P<bang>!)?
$ $
" ",
) )
.unwrap(); .unwrap()
} });
let caps = RE.captures(s).or_invalid(s)?; let caps = RE.captures(s).or_invalid(s)?;
let min: u32 = caps.name("min").unwrap().as_str().parse()?; let min: u32 = caps.name("min").unwrap().as_str().parse()?;
let max = if let Some(max) = caps.name("max") { let max = if let Some(max) = caps.name("max") {

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use lazy_static::lazy_static; use std::sync::LazyLock;
use nom::branch::alt; use nom::branch::alt;
use nom::bytes::complete::escaped; use nom::bytes::complete::escaped;
use nom::bytes::complete::is_not; use nom::bytes::complete::is_not;
@ -621,9 +622,7 @@ fn parse_mid(s: &str) -> ParseResult<SearchNode> {
/// ensure a list of ids contains only numbers and commas, returning unchanged /// ensure a list of ids contains only numbers and commas, returning unchanged
/// if true used by nid: and cid: /// if true used by nid: and cid:
fn check_id_list<'a>(s: &'a str, context: &str) -> ParseResult<'a, &'a str> { fn check_id_list<'a>(s: &'a str, context: &str) -> ParseResult<'a, &'a str> {
lazy_static! { static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"^(\d+,)*\d+$").unwrap());
static ref RE: Regex = Regex::new(r"^(\d+,)*\d+$").unwrap();
}
if RE.is_match(s) { if RE.is_match(s) {
Ok(s) Ok(s)
} else { } else {
@ -700,9 +699,7 @@ fn unescape(txt: &str) -> ParseResult<String> {
)) ))
} else { } else {
Ok(if is_parser_escape(txt) { Ok(if is_parser_escape(txt) {
lazy_static! { static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r#"\\[\\":()-]"#).unwrap());
static ref RE: Regex = Regex::new(r#"\\[\\":()-]"#).unwrap();
}
RE.replace_all(txt, |caps: &Captures| match &caps[0] { RE.replace_all(txt, |caps: &Captures| match &caps[0] {
r"\\" => r"\\", r"\\" => r"\\",
"\\\"" => "\"", "\\\"" => "\"",
@ -722,17 +719,17 @@ fn unescape(txt: &str) -> ParseResult<String> {
/// Return invalid escape sequence if any. /// Return invalid escape sequence if any.
fn invalid_escape_sequence(txt: &str) -> Option<String> { fn invalid_escape_sequence(txt: &str) -> Option<String> {
// odd number of \s not followed by an escapable character // odd number of \s not followed by an escapable character
lazy_static! { static RE: LazyLock<Regex> = LazyLock::new(|| {
static ref RE: Regex = Regex::new( Regex::new(
r#"(?x) r#"(?x)
(?:^|[^\\]) # not a backslash (?:^|[^\\]) # not a backslash
(?:\\\\)* # even number of backslashes (?:\\\\)* # even number of backslashes
(\\ # single backslash (\\ # single backslash
(?:[^\\":*_()-]|$)) # anything but an escapable char (?:[^\\":*_()-]|$)) # anything but an escapable char
"# "#,
) )
.unwrap(); .unwrap()
} });
let caps = RE.captures(txt)?; let caps = RE.captures(txt)?;
Some(caps[1].to_string()) Some(caps[1].to_string())
@ -741,17 +738,17 @@ fn invalid_escape_sequence(txt: &str) -> Option<String> {
/// Check string for escape sequences handled by the parser: ":()- /// Check string for escape sequences handled by the parser: ":()-
fn is_parser_escape(txt: &str) -> bool { fn is_parser_escape(txt: &str) -> bool {
// odd number of \s followed by a char with special meaning to the parser // odd number of \s followed by a char with special meaning to the parser
lazy_static! { static RE: LazyLock<Regex> = LazyLock::new(|| {
static ref RE: Regex = Regex::new( Regex::new(
r#"(?x) r#"(?x)
(?:^|[^\\]) # not a backslash (?:^|[^\\]) # not a backslash
(?:\\\\)* # even number of backslashes (?:\\\\)* # even number of backslashes
\\ # single backslash \\ # single backslash
[":()-] # parser escape [":()-] # parser escape
"# "#,
) )
.unwrap(); .unwrap()
} });
RE.is_match(txt) RE.is_match(txt)
} }

View file

@ -2,8 +2,8 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::mem; use std::mem;
use std::sync::LazyLock;
use lazy_static::lazy_static;
use regex::Regex; use regex::Regex;
use crate::notetype::NotetypeId as NotetypeIdType; use crate::notetype::NotetypeId as NotetypeIdType;
@ -109,9 +109,8 @@ fn maybe_quote(txt: &str) -> String {
/// Checks for the reserved keywords "and" and "or", a prepended hyphen, /// Checks for the reserved keywords "and" and "or", a prepended hyphen,
/// whitespace and brackets. /// whitespace and brackets.
fn needs_quotation(txt: &str) -> bool { fn needs_quotation(txt: &str) -> bool {
lazy_static! { static RE: LazyLock<Regex> =
static ref RE: Regex = Regex::new("(?i)^and$|^or$|^-.| |\u{3000}|\\(|\\)").unwrap(); LazyLock::new(|| Regex::new("(?i)^and$|^or$|^-.| |\u{3000}|\\(|\\)").unwrap());
}
RE.is_match(txt) RE.is_match(txt)
} }

View file

@ -161,10 +161,9 @@ impl SqliteStorage {
.prepare(include_str!("alloc_id.sql"))? .prepare(include_str!("alloc_id.sql"))?
.query_row([TimestampMillis::now()], |r| r.get(0))?; .query_row([TimestampMillis::now()], |r| r.get(0))?;
self.add_or_update_deck_with_existing_id(deck) self.add_or_update_deck_with_existing_id(deck)
.map_err(|err| { .inspect_err(|_err| {
// restore id of 0 // restore id of 0
deck.id.0 = 0; deck.id.0 = 0;
err
}) })
} }

View file

@ -4,9 +4,9 @@
#![cfg(test)] #![cfg(test)]
use std::future::Future; use std::future::Future;
use std::sync::LazyLock;
use axum::http::StatusCode; use axum::http::StatusCode;
use once_cell::sync::Lazy;
use reqwest::Client; use reqwest::Client;
use reqwest::Url; use reqwest::Url;
use serde_json::json; use serde_json::json;
@ -57,7 +57,7 @@ struct TestAuth {
host_key: String, host_key: String,
} }
static AUTH: Lazy<TestAuth> = Lazy::new(|| { static AUTH: LazyLock<TestAuth> = LazyLock::new(|| {
if let Ok(auth) = std::env::var("TEST_AUTH") { if let Ok(auth) = std::env::var("TEST_AUTH") {
let mut auth = auth.split(':'); let mut auth = auth.split(':');
TestAuth { TestAuth {
@ -93,7 +93,7 @@ where
.unwrap(); .unwrap();
tokio::spawn(server_fut.instrument(Span::current())); tokio::spawn(server_fut.instrument(Span::current()));
// when not using ephemeral servers, tests need to be serialized // when not using ephemeral servers, tests need to be serialized
static LOCK: Lazy<Mutex<()>> = Lazy::new(|| Mutex::new(())); static LOCK: LazyLock<Mutex<()>> = LazyLock::new(|| Mutex::new(()));
let _lock: MutexGuard<()>; let _lock: MutexGuard<()>;
// setup client to connect to it // setup client to connect to it
let endpoint = if let Ok(endpoint) = std::env::var("TEST_ENDPOINT") { let endpoint = if let Ok(endpoint) = std::env::var("TEST_ENDPOINT") {

View file

@ -57,10 +57,9 @@ impl User {
// Returning HTTP 400 will inform the client that a DB check+full sync // Returning HTTP 400 will inform the client that a DB check+full sync
// is required to fix the issue. // is required to fix the issue.
op(col, state) op(col, state)
.map_err(|e| { .inspect_err(|_e| {
self.col = None; self.col = None;
self.sync_state = None; self.sync_state = None;
e
}) })
.or_bad_request("op failed in sync_state") .or_bad_request("op failed in sync_state")
} }

View file

@ -77,9 +77,8 @@ impl ServerMediaDatabase {
self.db.execute("commit", [])?; self.db.execute("commit", [])?;
Ok(meta.last_usn) Ok(meta.last_usn)
}) })
.map_err(|e| { .inspect_err(|_e| {
let _ = self.db.execute("rollback", []); let _ = self.db.execute("rollback", []);
e
}) })
} }

View file

@ -8,6 +8,7 @@ use std::any::Any;
use std::env; use std::env;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::net::IpAddr; use std::net::IpAddr;
use std::sync::LazyLock;
use async_trait::async_trait; use async_trait::async_trait;
use axum::body::Body; use axum::body::Body;
@ -19,7 +20,6 @@ use axum::RequestPartsExt;
use axum_client_ip::SecureClientIp; use axum_client_ip::SecureClientIp;
use axum_extra::TypedHeader; use axum_extra::TypedHeader;
use header_and_stream::SyncHeader; use header_and_stream::SyncHeader;
use once_cell::sync::Lazy;
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
use serde::Serialize; use serde::Serialize;
use serde_json::Error; use serde_json::Error;
@ -179,7 +179,7 @@ where
} }
} }
pub static MAXIMUM_SYNC_PAYLOAD_BYTES: Lazy<usize> = Lazy::new(|| { pub static MAXIMUM_SYNC_PAYLOAD_BYTES: LazyLock<usize> = LazyLock::new(|| {
env::var("MAX_SYNC_PAYLOAD_MEGS") env::var("MAX_SYNC_PAYLOAD_MEGS")
.map(|v| v.parse().expect("invalid upload limit")) .map(|v| v.parse().expect("invalid upload limit"))
.unwrap_or(100) .unwrap_or(100)
@ -189,5 +189,5 @@ pub static MAXIMUM_SYNC_PAYLOAD_BYTES: Lazy<usize> = Lazy::new(|| {
/// Client ignores this when a non-AnkiWeb endpoint is configured. Controls the /// Client ignores this when a non-AnkiWeb endpoint is configured. Controls the
/// maximum size of a payload after decompression, which effectively limits the /// maximum size of a payload after decompression, which effectively limits the
/// how large a collection file can be uploaded. /// how large a collection file can be uploaded.
pub static MAXIMUM_SYNC_PAYLOAD_BYTES_UNCOMPRESSED: Lazy<u64> = pub static MAXIMUM_SYNC_PAYLOAD_BYTES_UNCOMPRESSED: LazyLock<u64> =
Lazy::new(|| (*MAXIMUM_SYNC_PAYLOAD_BYTES * 3) as u64); LazyLock::new(|| (*MAXIMUM_SYNC_PAYLOAD_BYTES * 3) as u64);

View file

@ -6,9 +6,9 @@ use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::fmt::Write; use std::fmt::Write;
use std::iter; use std::iter;
use std::sync::LazyLock;
use anki_i18n::I18n; use anki_i18n::I18n;
use lazy_static::lazy_static;
use nom::branch::alt; use nom::branch::alt;
use nom::bytes::complete::tag; use nom::bytes::complete::tag;
use nom::bytes::complete::take_until; use nom::bytes::complete::take_until;
@ -546,18 +546,18 @@ fn append_str_to_nodes(nodes: &mut Vec<RenderedNode>, text: &str) {
/// True if provided text contains only whitespace and/or empty BR/DIV tags. /// True if provided text contains only whitespace and/or empty BR/DIV tags.
pub(crate) fn field_is_empty(text: &str) -> bool { pub(crate) fn field_is_empty(text: &str) -> bool {
lazy_static! { static RE: LazyLock<Regex> = LazyLock::new(|| {
static ref RE: Regex = Regex::new( Regex::new(
r"(?xsi) r"(?xsi)
^(?: ^(?:
[[:space:]] [[:space:]]
| |
</?(?:br|div)\ ?/?> </?(?:br|div)\ ?/?>
)*$ )*$
" ",
) )
.unwrap(); .unwrap()
} });
RE.is_match(text) RE.is_match(text)
} }

View file

@ -2,9 +2,9 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::borrow::Cow; use std::borrow::Cow;
use std::sync::LazyLock;
use blake3::Hasher; use blake3::Hasher;
use lazy_static::lazy_static;
use regex::Captures; use regex::Captures;
use regex::Regex; use regex::Regex;
@ -110,9 +110,7 @@ fn apply_filter(
// Ruby filters // Ruby filters
//---------------------------------------- //----------------------------------------
lazy_static! { static FURIGANA: LazyLock<Regex> = LazyLock::new(|| Regex::new(r" ?([^ >]+?)\[(.+?)\]").unwrap());
static ref FURIGANA: Regex = Regex::new(r" ?([^ >]+?)\[(.+?)\]").unwrap();
}
/// Did furigana regex match a sound tag? /// Did furigana regex match a sound tag?
fn captured_sound(caps: &Captures) -> bool { fn captured_sound(caps: &Captures) -> bool {

View file

@ -2,8 +2,8 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::borrow::Cow; use std::borrow::Cow;
use std::sync::LazyLock;
use lazy_static::lazy_static;
use percent_encoding_iri::percent_decode_str; use percent_encoding_iri::percent_decode_str;
use percent_encoding_iri::utf8_percent_encode; use percent_encoding_iri::utf8_percent_encode;
use percent_encoding_iri::AsciiSet; use percent_encoding_iri::AsciiSet;
@ -79,17 +79,18 @@ pub enum AvTag {
}, },
} }
lazy_static! { static HTML: LazyLock<Regex> = LazyLock::new(|| {
static ref HTML: Regex = Regex::new(concat!( Regex::new(concat!(
"(?si)", "(?si)",
// wrapped text // wrapped text
r"(<!--.*?-->)|(<style.*?>.*?</style>)|(<script.*?>.*?</script>)", r"(<!--.*?-->)|(<style.*?>.*?</style>)|(<script.*?>.*?</script>)",
// html tags // html tags
r"|(<.*?>)", r"|(<.*?>)",
)) ))
.unwrap(); .unwrap()
});
static ref HTML_LINEBREAK_TAGS: Regex = Regex::new( static HTML_LINEBREAK_TAGS: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r#"(?xsi) r#"(?xsi)
</? </?
(?: (?:
@ -99,10 +100,13 @@ lazy_static! {
|output|p|pre|section|table|tfoot|ul|video |output|p|pre|section|table|tfoot|ul|video
) )
> >
"# "#,
).unwrap(); )
.unwrap()
});
pub static ref HTML_MEDIA_TAGS: Regex = Regex::new( pub static HTML_MEDIA_TAGS: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r#"(?xsi) r#"(?xsi)
# the start of the image, audio, or object tag # the start of the image, audio, or object tag
<\b(?:img|audio|video|object)\b <\b(?:img|audio|video|object)\b
@ -141,11 +145,14 @@ lazy_static! {
> >
) )
) )
"# "#,
).unwrap(); )
.unwrap()
});
// videos are also in sound tags // videos are also in sound tags
static ref AV_TAGS: Regex = Regex::new( static AV_TAGS: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r"(?xs) r"(?xs)
\[sound:(.+?)\] # 1 - the filename in a sound tag \[sound:(.+?)\] # 1 - the filename in a sound tag
| |
@ -153,15 +160,21 @@ lazy_static! {
\[(.*?)\] # 2 - arguments to tts call \[(.*?)\] # 2 - arguments to tts call
(.*?) # 3 - field text (.*?) # 3 - field text
\[/anki:tts\] \[/anki:tts\]
").unwrap(); ",
)
.unwrap()
});
static ref PERSISTENT_HTML_SPACERS: Regex = Regex::new(r"(?i)<br\s*/?>|<div>|\n").unwrap(); static PERSISTENT_HTML_SPACERS: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?i)<br\s*/?>|<div>|\n").unwrap());
static ref TYPE_TAG: Regex = Regex::new(r"\[\[type:[^]]+\]\]").unwrap(); static TYPE_TAG: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\[\[type:[^]]+\]\]").unwrap());
pub(crate) static ref SOUND_TAG: Regex = Regex::new(r"\[sound:([^]]+)\]").unwrap(); pub(crate) static SOUND_TAG: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\[sound:([^]]+)\]").unwrap());
/// Files included in CSS with a leading underscore. /// Files included in CSS with a leading underscore.
static ref UNDERSCORED_CSS_IMPORTS: Regex = Regex::new( static UNDERSCORED_CSS_IMPORTS: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r#"(?xi) r#"(?xi)
(?:@import\s+ # import statement with a bare (?:@import\s+ # import statement with a bare
"(_[^"]*.css)" # double quoted "(_[^"]*.css)" # double quoted
@ -176,10 +189,14 @@ lazy_static! {
| # or | # or
(_.+) # unquoted filename (_.+) # unquoted filename
\s*\)) \s*\))
"#).unwrap(); "#,
)
.unwrap()
});
/// Strings, src and data attributes with a leading underscore. /// Strings, src and data attributes with a leading underscore.
static ref UNDERSCORED_REFERENCES: Regex = Regex::new( static UNDERSCORED_REFERENCES: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r#"(?x) r#"(?x)
\[sound:(_[^]]+)\] # a filename in an Anki sound tag \[sound:(_[^]]+)\] # a filename in an Anki sound tag
| # or | # or
@ -190,8 +207,10 @@ lazy_static! {
\b(?:src|data) # a 'src' or 'data' attribute \b(?:src|data) # a 'src' or 'data' attribute
= # followed by = # followed by
(_[^ >]+) # an unquoted value (_[^ >]+) # an unquoted value
"#).unwrap(); "#,
} )
.unwrap()
});
pub fn is_html(text: impl AsRef<str>) -> bool { pub fn is_html(text: impl AsRef<str>) -> bool {
HTML.is_match(text.as_ref()) HTML.is_match(text.as_ref())
@ -446,16 +465,16 @@ pub(crate) fn without_combining(s: &str) -> Cow<str> {
/// Check if string contains an unescaped wildcard. /// Check if string contains an unescaped wildcard.
pub(crate) fn is_glob(txt: &str) -> bool { pub(crate) fn is_glob(txt: &str) -> bool {
// even number of \s followed by a wildcard // even number of \s followed by a wildcard
lazy_static! { static RE: LazyLock<Regex> = LazyLock::new(|| {
static ref RE: Regex = Regex::new( Regex::new(
r"(?x) r"(?x)
(?:^|[^\\]) # not a backslash (?:^|[^\\]) # not a backslash
(?:\\\\)* # even number of backslashes (?:\\\\)* # even number of backslashes
[*_] # wildcard [*_] # wildcard
" ",
) )
.unwrap(); .unwrap()
} });
RE.is_match(txt) RE.is_match(txt)
} }
@ -467,9 +486,7 @@ pub(crate) fn to_re(txt: &str) -> Cow<str> {
/// Convert Anki style to RegEx using the provided wildcard. /// Convert Anki style to RegEx using the provided wildcard.
pub(crate) fn to_custom_re<'a>(txt: &'a str, wildcard: &str) -> Cow<'a, str> { pub(crate) fn to_custom_re<'a>(txt: &'a str, wildcard: &str) -> Cow<'a, str> {
lazy_static! { static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\\?.").unwrap());
static ref RE: Regex = Regex::new(r"\\?.").unwrap();
}
RE.replace_all(txt, |caps: &Captures| { RE.replace_all(txt, |caps: &Captures| {
let s = &caps[0]; let s = &caps[0];
match s { match s {
@ -485,9 +502,7 @@ pub(crate) fn to_custom_re<'a>(txt: &'a str, wildcard: &str) -> Cow<'a, str> {
/// Convert to SQL respecting Anki wildcards. /// Convert to SQL respecting Anki wildcards.
pub(crate) fn to_sql(txt: &str) -> Cow<str> { pub(crate) fn to_sql(txt: &str) -> Cow<str> {
// escape sequences and unescaped special characters which need conversion // escape sequences and unescaped special characters which need conversion
lazy_static! { static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\\[\\*]|[*%]").unwrap());
static ref RE: Regex = Regex::new(r"\\[\\*]|[*%]").unwrap();
}
RE.replace_all(txt, |caps: &Captures| { RE.replace_all(txt, |caps: &Captures| {
let s = &caps[0]; let s = &caps[0];
match s { match s {
@ -502,17 +517,13 @@ pub(crate) fn to_sql(txt: &str) -> Cow<str> {
/// Unescape everything. /// Unescape everything.
pub(crate) fn to_text(txt: &str) -> Cow<str> { pub(crate) fn to_text(txt: &str) -> Cow<str> {
lazy_static! { static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\\(.)").unwrap());
static ref RE: Regex = Regex::new(r"\\(.)").unwrap();
}
RE.replace_all(txt, "$1") RE.replace_all(txt, "$1")
} }
/// Escape Anki wildcards and the backslash for escaping them: \*_ /// Escape Anki wildcards and the backslash for escaping them: \*_
pub(crate) fn escape_anki_wildcards(txt: &str) -> String { pub(crate) fn escape_anki_wildcards(txt: &str) -> String {
lazy_static! { static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\\*_]").unwrap());
static ref RE: Regex = Regex::new(r"[\\*_]").unwrap();
}
RE.replace_all(txt, r"\$0").into() RE.replace_all(txt, r"\$0").into()
} }
@ -545,9 +556,8 @@ pub(crate) fn glob_matcher(search: &str) -> impl Fn(&str) -> bool + '_ {
} }
} }
lazy_static! { pub(crate) static REMOTE_FILENAME: LazyLock<Regex> =
pub(crate) static ref REMOTE_FILENAME: Regex = Regex::new("(?i)^https?://").unwrap(); LazyLock::new(|| Regex::new("(?i)^https?://").unwrap());
}
/// https://url.spec.whatwg.org/#fragment-percent-encode-set /// https://url.spec.whatwg.org/#fragment-percent-encode-set
const FRAGMENT_QUERY_UNION: &AsciiSet = &CONTROLS const FRAGMENT_QUERY_UNION: &AsciiSet = &CONTROLS

View file

@ -2,9 +2,9 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::borrow::Cow; use std::borrow::Cow;
use std::sync::LazyLock;
use difflib::sequencematcher::SequenceMatcher; use difflib::sequencematcher::SequenceMatcher;
use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use unic_ucd_category::GeneralCategory; use unic_ucd_category::GeneralCategory;
@ -12,7 +12,7 @@ use crate::card_rendering::strip_av_tags;
use crate::text::normalize_to_nfkd; use crate::text::normalize_to_nfkd;
use crate::text::strip_html; use crate::text::strip_html;
static LINEBREAKS: Lazy<Regex> = Lazy::new(|| { static LINEBREAKS: LazyLock<Regex> = LazyLock::new(|| {
Regex::new( Regex::new(
r"(?six) r"(?six)
( (

View file

@ -2,8 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::env; use std::env;
use std::sync::LazyLock;
use lazy_static::lazy_static;
pub fn version() -> &'static str { pub fn version() -> &'static str {
include_str!("../../.version").trim() include_str!("../../.version").trim()
@ -14,25 +13,25 @@ pub fn buildhash() -> &'static str {
} }
pub(crate) fn sync_client_version() -> &'static str { pub(crate) fn sync_client_version() -> &'static str {
lazy_static! { static VER: LazyLock<String> = LazyLock::new(|| {
static ref VER: String = format!( format!(
"anki,{version} ({buildhash}),{platform}", "anki,{version} ({buildhash}),{platform}",
version = version(), version = version(),
buildhash = buildhash(), buildhash = buildhash(),
platform = env::var("PLATFORM").unwrap_or_else(|_| env::consts::OS.to_string()) platform = env::var("PLATFORM").unwrap_or_else(|_| env::consts::OS.to_string())
); )
} });
&VER &VER
} }
pub(crate) fn sync_client_version_short() -> &'static str { pub(crate) fn sync_client_version_short() -> &'static str {
lazy_static! { static VER: LazyLock<String> = LazyLock::new(|| {
static ref VER: String = format!( format!(
"{version},{buildhash},{platform}", "{version},{buildhash},{platform}",
version = version(), version = version(),
buildhash = buildhash(), buildhash = buildhash(),
platform = env::consts::OS platform = env::consts::OS
); )
} });
&VER &VER
} }

View file

@ -12,6 +12,5 @@ anki_io.workspace = true
anki_process.workspace = true anki_process.workspace = true
anyhow.workspace = true anyhow.workspace = true
camino.workspace = true camino.workspace = true
once_cell.workspace = true
walkdir.workspace = true walkdir.workspace = true
which.workspace = true which.workspace = true

View file

@ -1,6 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::cell::LazyCell;
use std::collections::HashSet; use std::collections::HashSet;
use std::env; use std::env;
use std::fs; use std::fs;
@ -16,7 +17,6 @@ use anki_process::CommandExt;
use anyhow::Context; use anyhow::Context;
use anyhow::Result; use anyhow::Result;
use camino::Utf8Path; use camino::Utf8Path;
use once_cell::unsync::Lazy;
use walkdir::WalkDir; use walkdir::WalkDir;
const NONSTANDARD_HEADER: &[&str] = &[ const NONSTANDARD_HEADER: &[&str] = &[
@ -63,7 +63,7 @@ fn main() -> Result<()> {
struct LintContext { struct LintContext {
want_fix: bool, want_fix: bool,
unstaged_changes: Lazy<()>, unstaged_changes: LazyCell<()>,
found_problems: bool, found_problems: bool,
nonstandard_headers: HashSet<&'static Utf8Path>, nonstandard_headers: HashSet<&'static Utf8Path>,
} }
@ -72,7 +72,7 @@ impl LintContext {
pub fn new(want_fix: bool) -> Self { pub fn new(want_fix: bool) -> Self {
Self { Self {
want_fix, want_fix,
unstaged_changes: Lazy::new(check_for_unstaged_changes), unstaged_changes: LazyCell::new(check_for_unstaged_changes),
found_problems: false, found_problems: false,
nonstandard_headers: NONSTANDARD_HEADER.iter().map(Utf8Path::new).collect(), nonstandard_headers: NONSTANDARD_HEADER.iter().map(Utf8Path::new).collect(),
} }
@ -113,7 +113,7 @@ impl LintContext {
let missing = !head.contains("Ankitects Pty Ltd and contributors"); let missing = !head.contains("Ankitects Pty Ltd and contributors");
if missing { if missing {
if self.want_fix { if self.want_fix {
Lazy::force(&self.unstaged_changes); LazyCell::force(&self.unstaged_changes);
fix_copyright(path)?; fix_copyright(path)?;
} else { } else {
println!("missing standard copyright header: {:?}", path); println!("missing standard copyright header: {:?}", path);