mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 14:02:21 -04:00
lazy_static → once_cell → stabilized versions (#3447)
* Anki: Replace lazy_static with once_cell Unify to once_cell, lazy_static's replacement. The latter in unmaintained. * Anki: Replace once_cell with stabilized LazyCell / LazyLock as far as possible Since 1.80: https://github.com/rust-lang/rust/issues/109736 and https://github.com/rust-lang/rust/pull/98165 Non-Thread-Safe Lazy → std::cell::LazyCell https://doc.rust-lang.org/nightly/std/cell/struct.LazyCell.html Thread-safe SyncLazy → std::sync::LazyLock https://doc.rust-lang.org/nightly/std/sync/struct.LazyLock.html The compiler accepted LazyCell only in minilints. The final use in rslib/src/log.rs couldn't be replaced since get_or_try_init has not yet been standardized: https://github.com/rust-lang/rust/issues/109737 * Declare correct MSRV (dae) Some of our deps require newer Rust versions, so this was misleading. Updating the MSRV also allows us to use .inspect() on Option now
This commit is contained in:
parent
e2124cd790
commit
d9969a9f4f
39 changed files with 200 additions and 220 deletions
6
Cargo.lock
generated
6
Cargo.lock
generated
|
@ -134,7 +134,6 @@ dependencies = [
|
|||
"id_tree",
|
||||
"inflections",
|
||||
"itertools 0.13.0",
|
||||
"lazy_static",
|
||||
"nom",
|
||||
"num_cpus",
|
||||
"num_enum",
|
||||
|
@ -249,7 +248,6 @@ dependencies = [
|
|||
"camino",
|
||||
"inflections",
|
||||
"itertools 0.13.0",
|
||||
"once_cell",
|
||||
"prost-reflect",
|
||||
"prost-types",
|
||||
"regex",
|
||||
|
@ -1891,7 +1889,6 @@ dependencies = [
|
|||
"clap",
|
||||
"fluent-syntax",
|
||||
"itertools 0.13.0",
|
||||
"lazy_static",
|
||||
"regex",
|
||||
"serde_json",
|
||||
"snafu",
|
||||
|
@ -3167,7 +3164,6 @@ dependencies = [
|
|||
"anki",
|
||||
"futures",
|
||||
"itertools 0.13.0",
|
||||
"lazy_static",
|
||||
"linkcheck",
|
||||
"regex",
|
||||
"reqwest 0.12.7",
|
||||
|
@ -3445,7 +3441,6 @@ dependencies = [
|
|||
"anki_process",
|
||||
"anyhow",
|
||||
"camino",
|
||||
"once_cell",
|
||||
"walkdir",
|
||||
"which",
|
||||
]
|
||||
|
@ -3609,7 +3604,6 @@ dependencies = [
|
|||
"dunce",
|
||||
"globset",
|
||||
"itertools 0.13.0",
|
||||
"lazy_static",
|
||||
"maplit",
|
||||
"num_cpus",
|
||||
"walkdir",
|
||||
|
|
|
@ -3,7 +3,7 @@ version = "0.0.0"
|
|||
authors = ["Ankitects Pty Ltd and contributors <https://help.ankiweb.net>"]
|
||||
edition = "2021"
|
||||
license = "AGPL-3.0-or-later"
|
||||
rust-version = "1.65"
|
||||
rust-version = "1.80"
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
|
|
|
@ -14,7 +14,6 @@ camino.workspace = true
|
|||
dunce.workspace = true
|
||||
globset.workspace = true
|
||||
itertools.workspace = true
|
||||
lazy_static.workspace = true
|
||||
maplit.workspace = true
|
||||
num_cpus.workspace = true
|
||||
walkdir.workspace = true
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Display;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use camino::Utf8PathBuf;
|
||||
|
||||
|
@ -118,9 +119,7 @@ pub struct Glob {
|
|||
pub exclude: Option<String>,
|
||||
}
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref CACHED_FILES: Vec<Utf8PathBuf> = cache_files();
|
||||
}
|
||||
static CACHED_FILES: LazyLock<Vec<Utf8PathBuf>> = LazyLock::new(cache_files);
|
||||
|
||||
/// Walking the source tree once instead of for each glob yields ~4x speed
|
||||
/// improvements.
|
||||
|
|
|
@ -16,7 +16,6 @@ camino.workspace = true
|
|||
clap.workspace = true
|
||||
fluent-syntax.workspace = true
|
||||
itertools.workspace = true
|
||||
lazy_static.workspace = true
|
||||
regex.workspace = true
|
||||
serde_json.workspace = true
|
||||
snafu.workspace = true
|
||||
|
|
|
@ -6,6 +6,7 @@ use std::fs;
|
|||
use std::io::BufReader;
|
||||
use std::iter::FromIterator;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anki_io::create_file;
|
||||
use anyhow::Context;
|
||||
|
@ -14,7 +15,6 @@ use clap::Args;
|
|||
use fluent_syntax::ast;
|
||||
use fluent_syntax::ast::Resource;
|
||||
use fluent_syntax::parser;
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use walkdir::DirEntry;
|
||||
use walkdir::WalkDir;
|
||||
|
@ -144,9 +144,8 @@ fn extract_nested_messages_and_terms(
|
|||
ftl_roots: &[impl AsRef<str>],
|
||||
used_ftls: &mut HashSet<String>,
|
||||
) {
|
||||
lazy_static! {
|
||||
static ref REFERENCE: Regex = Regex::new(r"\{\s*-?([-0-9a-z]+)\s*\}").unwrap();
|
||||
}
|
||||
static REFERENCE: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"\{\s*-?([-0-9a-z]+)\s*\}").unwrap());
|
||||
for_files_with_ending(ftl_roots, ".ftl", |entry| {
|
||||
let source = fs::read_to_string(entry.path()).expect("file not readable");
|
||||
for caps in REFERENCE.captures_iter(&source) {
|
||||
|
@ -198,11 +197,12 @@ fn entry_use_check(used_ftls: &HashSet<String>) -> impl Fn(&ast::Entry<&str>) ->
|
|||
}
|
||||
|
||||
fn extract_references_from_file(refs: &mut HashSet<String>, entry: &DirEntry) {
|
||||
lazy_static! {
|
||||
static ref SNAKECASE_TR: Regex = Regex::new(r"\Wtr\s*\.([0-9a-z_]+)\W").unwrap();
|
||||
static ref CAMELCASE_TR: Regex = Regex::new(r"\Wtr2?\.([0-9A-Za-z_]+)\W").unwrap();
|
||||
static ref DESIGNER_STYLE_TR: Regex = Regex::new(r"<string>([0-9a-z_]+)</string>").unwrap();
|
||||
}
|
||||
static SNAKECASE_TR: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"\Wtr\s*\.([0-9a-z_]+)\W").unwrap());
|
||||
static CAMELCASE_TR: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"\Wtr2?\.([0-9A-Za-z_]+)\W").unwrap());
|
||||
static DESIGNER_STYLE_TR: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"<string>([0-9a-z_]+)</string>").unwrap());
|
||||
|
||||
let file_name = entry.file_name().to_str().expect("non-unicode filename");
|
||||
|
||||
|
|
|
@ -69,7 +69,6 @@ htmlescape.workspace = true
|
|||
hyper.workspace = true
|
||||
id_tree.workspace = true
|
||||
itertools.workspace = true
|
||||
lazy_static.workspace = true
|
||||
nom.workspace = true
|
||||
num_cpus.workspace = true
|
||||
num_enum.workspace = true
|
||||
|
|
|
@ -11,7 +11,6 @@ rust-version.workspace = true
|
|||
anki.workspace = true
|
||||
futures.workspace = true
|
||||
itertools.workspace = true
|
||||
lazy_static.workspace = true
|
||||
linkcheck.workspace = true
|
||||
regex.workspace = true
|
||||
reqwest.workspace = true
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
use std::borrow::Cow;
|
||||
use std::env;
|
||||
use std::iter;
|
||||
use std::sync::LazyLock;
|
||||
use std::time::Duration;
|
||||
|
||||
use anki::links::help_page_link_suffix;
|
||||
|
@ -13,7 +14,6 @@ use anki::links::help_page_to_link;
|
|||
use anki::links::HelpPage;
|
||||
use futures::StreamExt;
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use linkcheck::validation::check_web;
|
||||
use linkcheck::validation::Context;
|
||||
use linkcheck::validation::Reason;
|
||||
|
@ -70,9 +70,8 @@ impl From<&'static str> for CheckableUrl {
|
|||
}
|
||||
|
||||
fn ts_help_pages() -> impl Iterator<Item = &'static str> {
|
||||
lazy_static! {
|
||||
static ref QUOTED_URL: Regex = Regex::new("\"(http.+)\"").unwrap();
|
||||
}
|
||||
static QUOTED_URL: LazyLock<Regex> = LazyLock::new(|| Regex::new("\"(http.+)\"").unwrap());
|
||||
|
||||
QUOTED_URL
|
||||
.captures_iter(include_str!("../../../ts/lib/tslib/help-page.ts"))
|
||||
.map(|caps| caps.get(1).unwrap().as_str())
|
||||
|
|
|
@ -15,7 +15,6 @@ anyhow.workspace = true
|
|||
camino.workspace = true
|
||||
inflections.workspace = true
|
||||
itertools.workspace = true
|
||||
once_cell.workspace = true
|
||||
prost-reflect.workspace = true
|
||||
prost-types.workspace = true
|
||||
regex.workspace = true
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anki_io::read_to_string;
|
||||
use anki_io::write_file_if_changed;
|
||||
|
@ -16,7 +17,6 @@ use camino::Utf8Path;
|
|||
use inflections::Inflect;
|
||||
use itertools::Either;
|
||||
use itertools::Itertools;
|
||||
use once_cell::sync::Lazy;
|
||||
use prost_reflect::DescriptorPool;
|
||||
use prost_reflect::MessageDescriptor;
|
||||
use prost_reflect::MethodDescriptor;
|
||||
|
@ -238,8 +238,8 @@ pub fn add_must_use_annotations_to_file<E>(path: &Utf8Path, is_empty: E) -> Resu
|
|||
where
|
||||
E: Fn(&Utf8Path, &str) -> bool,
|
||||
{
|
||||
static MESSAGE_OR_ENUM_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"pub (struct|enum) ([[:alnum:]]+?)\s").unwrap());
|
||||
static MESSAGE_OR_ENUM_RE: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"pub (struct|enum) ([[:alnum:]]+?)\s").unwrap());
|
||||
let contents = read_to_string(path)?;
|
||||
let contents = MESSAGE_OR_ENUM_RE.replace_all(&contents, |caps: &Captures| {
|
||||
let is_enum = caps.get(1).unwrap().as_str() == "enum";
|
||||
|
|
|
@ -5,6 +5,7 @@ use std::collections::HashMap;
|
|||
use std::mem::size_of;
|
||||
use std::sync::atomic::AtomicI32;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anki_proto::ankidroid::sql_value::Data;
|
||||
|
@ -16,7 +17,6 @@ use itertools::FoldWhile;
|
|||
use itertools::FoldWhile::Continue;
|
||||
use itertools::FoldWhile::Done;
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use rusqlite::ToSql;
|
||||
use serde::Deserialize;
|
||||
|
||||
|
@ -110,10 +110,8 @@ fn select_slice_of_size<'a>(
|
|||
|
||||
type SequenceNumber = i32;
|
||||
|
||||
lazy_static! {
|
||||
static ref HASHMAP: Mutex<HashMap<CollectionId, HashMap<SequenceNumber, DbResponse>>> =
|
||||
Mutex::new(HashMap::new());
|
||||
}
|
||||
static HASHMAP: LazyLock<Mutex<HashMap<CollectionId, HashMap<SequenceNumber, DbResponse>>>> =
|
||||
LazyLock::new(|| Mutex::new(HashMap::new()));
|
||||
|
||||
pub(crate) fn flush_single_result(col: &Collection, sequence_number: i32) {
|
||||
HASHMAP
|
||||
|
@ -244,10 +242,9 @@ pub(crate) fn next_sequence_number() -> i32 {
|
|||
SEQUENCE_NUMBER.fetch_add(1, Ordering::SeqCst)
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
// same as we get from io.requery.android.database.CursorWindow.sCursorWindowSize
|
||||
static ref DB_COMMAND_PAGE_SIZE: Mutex<usize> = Mutex::new(1024 * 1024 * 2);
|
||||
}
|
||||
// same as we get from
|
||||
// io.requery.android.database.CursorWindow.sCursorWindowSize
|
||||
static DB_COMMAND_PAGE_SIZE: LazyLock<Mutex<usize>> = LazyLock::new(|| Mutex::new(1024 * 1024 * 2));
|
||||
|
||||
pub(crate) fn set_max_page_size(size: usize) {
|
||||
let mut state = DB_COMMAND_PAGE_SIZE.lock().expect("Could not lock mutex");
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use regex::Regex;
|
||||
use reqwest::Client;
|
||||
use serde;
|
||||
|
@ -31,11 +32,9 @@ pub async fn ankihub_login<S: Into<String>>(
|
|||
client: Client,
|
||||
) -> Result<LoginResponse> {
|
||||
let client = HttpAnkiHubClient::new("", client);
|
||||
lazy_static! {
|
||||
static ref EMAIL_RE: Regex =
|
||||
Regex::new(r"^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$")
|
||||
.unwrap();
|
||||
}
|
||||
static EMAIL_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$").unwrap()
|
||||
});
|
||||
let mut request = LoginRequest {
|
||||
username: None,
|
||||
email: None,
|
||||
|
|
|
@ -19,10 +19,10 @@ use std::ops::Deref;
|
|||
use std::result;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use std::sync::OnceLock;
|
||||
use std::thread::JoinHandle;
|
||||
|
||||
use futures::future::AbortHandle;
|
||||
use once_cell::sync::OnceCell;
|
||||
use prost::Message;
|
||||
use reqwest::Client;
|
||||
use tokio::runtime;
|
||||
|
@ -53,7 +53,7 @@ pub struct BackendInner {
|
|||
server: bool,
|
||||
sync_abort: Mutex<Option<AbortHandle>>,
|
||||
progress_state: Arc<Mutex<ProgressState>>,
|
||||
runtime: OnceCell<Runtime>,
|
||||
runtime: OnceLock<Runtime>,
|
||||
state: Mutex<BackendState>,
|
||||
backup_task: Mutex<Option<JoinHandle<Result<()>>>>,
|
||||
media_sync_task: Mutex<Option<JoinHandle<Result<()>>>>,
|
||||
|
@ -88,7 +88,7 @@ impl Backend {
|
|||
want_abort: false,
|
||||
last_progress: None,
|
||||
})),
|
||||
runtime: OnceCell::new(),
|
||||
runtime: OnceLock::new(),
|
||||
state: Mutex::new(BackendState::default()),
|
||||
backup_task: Mutex::new(None),
|
||||
media_sync_task: Mutex::new(None),
|
||||
|
|
|
@ -5,11 +5,11 @@ use std::borrow::Cow;
|
|||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::fmt::Write;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion;
|
||||
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape;
|
||||
use htmlescape::encode_attribute;
|
||||
use lazy_static::lazy_static;
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::bytes::complete::take_while;
|
||||
|
@ -24,16 +24,16 @@ use crate::latex::contains_latex;
|
|||
use crate::template::RenderContext;
|
||||
use crate::text::strip_html_preserving_entities;
|
||||
|
||||
lazy_static! {
|
||||
static ref MATHJAX: Regex = Regex::new(
|
||||
static MATHJAX: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"(?xsi)
|
||||
(\\[(\[]) # 1 = mathjax opening tag
|
||||
(.*?) # 2 = inner content
|
||||
(\\[])]) # 3 = mathjax closing tag
|
||||
"
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
mod mathjax_caps {
|
||||
pub const OPENING_TAG: usize = 1;
|
||||
|
|
|
@ -6,10 +6,10 @@ use std::collections::HashMap;
|
|||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::sync::Arc;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anki_proto::import_export::ExportNoteCsvRequest;
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
|
||||
use super::metadata::Delimiter;
|
||||
|
@ -156,23 +156,22 @@ fn field_to_record_field(field: &str, with_html: bool) -> Cow<str> {
|
|||
}
|
||||
|
||||
fn strip_redundant_sections(text: &str) -> Cow<str> {
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(
|
||||
static RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"(?isx)
|
||||
<style>.*?</style> # style elements
|
||||
|
|
||||
\[\[type:[^]]+\]\] # type replacements
|
||||
"
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
.unwrap()
|
||||
});
|
||||
RE.replace_all(text.as_ref(), "")
|
||||
}
|
||||
|
||||
fn strip_answer_side_question(text: &str) -> Cow<str> {
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(r"(?is)^.*<hr id=answer>\n*").unwrap();
|
||||
}
|
||||
static RE: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"(?is)^.*<hr id=answer>\n*").unwrap());
|
||||
RE.replace_all(text.as_ref(), "")
|
||||
}
|
||||
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
|
||||
|
@ -11,26 +11,28 @@ use crate::cloze::expand_clozes_to_reveal_latex;
|
|||
use crate::media::files::sha1_of_data;
|
||||
use crate::text::strip_html;
|
||||
|
||||
lazy_static! {
|
||||
pub(crate) static ref LATEX: Regex = Regex::new(
|
||||
pub(crate) static LATEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"(?xsi)
|
||||
\[latex\](.+?)\[/latex\] # 1 - standard latex
|
||||
|
|
||||
\[\$\](.+?)\[/\$\] # 2 - inline math
|
||||
|
|
||||
\[\$\$\](.+?)\[/\$\$\] # 3 - math environment
|
||||
"
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
static ref LATEX_NEWLINES: Regex = Regex::new(
|
||||
.unwrap()
|
||||
});
|
||||
static LATEX_NEWLINES: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r#"(?xi)
|
||||
<br( /)?>
|
||||
|
|
||||
<div>
|
||||
"#
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
pub(crate) fn contains_latex(text: &str) -> bool {
|
||||
LATEX.is_match(text)
|
||||
|
|
|
@ -52,9 +52,7 @@ pub mod undo;
|
|||
pub mod version;
|
||||
|
||||
use std::env;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
lazy_static! {
|
||||
pub(crate) static ref PYTHON_UNIT_TESTS: bool = env::var("ANKI_TEST_MODE").is_ok();
|
||||
}
|
||||
pub(crate) static PYTHON_UNIT_TESTS: LazyLock<bool> =
|
||||
LazyLock::new(|| env::var("ANKI_TEST_MODE").is_ok());
|
||||
|
|
|
@ -6,11 +6,11 @@ use std::collections::HashMap;
|
|||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anki_i18n::without_unicode_isolation;
|
||||
use anki_io::write_file;
|
||||
use data_encoding::BASE64;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use tracing::debug;
|
||||
use tracing::info;
|
||||
|
@ -459,7 +459,7 @@ impl MediaChecker<'_> {
|
|||
}
|
||||
|
||||
fn maybe_extract_inline_image<'a>(&mut self, fname_decoded: &'a str) -> Result<Cow<'a, str>> {
|
||||
static BASE64_IMG: Lazy<Regex> = Lazy::new(|| {
|
||||
static BASE64_IMG: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new("(?i)^data:image/(jpg|jpeg|png|gif|webp|avif);base64,(.+)$").unwrap()
|
||||
});
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ use std::io;
|
|||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::LazyLock;
|
||||
use std::time;
|
||||
|
||||
use anki_io::create_dir;
|
||||
|
@ -15,7 +16,6 @@ use anki_io::write_file;
|
|||
use anki_io::FileIoError;
|
||||
use anki_io::FileIoSnafu;
|
||||
use anki_io::FileOp;
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use sha1::Digest;
|
||||
use sha1::Sha1;
|
||||
|
@ -27,8 +27,8 @@ use unicode_normalization::UnicodeNormalization;
|
|||
use crate::prelude::*;
|
||||
use crate::sync::media::MAX_MEDIA_FILENAME_LENGTH;
|
||||
|
||||
lazy_static! {
|
||||
static ref WINDOWS_DEVICE_NAME: Regex = Regex::new(
|
||||
static WINDOWS_DEVICE_NAME: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"(?xi)
|
||||
# starting with one of the following names
|
||||
^
|
||||
|
@ -39,30 +39,34 @@ lazy_static! {
|
|||
(
|
||||
\. | $
|
||||
)
|
||||
"
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
static ref WINDOWS_TRAILING_CHAR: Regex = Regex::new(
|
||||
.unwrap()
|
||||
});
|
||||
static WINDOWS_TRAILING_CHAR: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"(?x)
|
||||
# filenames can't end with a space or period
|
||||
(
|
||||
\x20 | \.
|
||||
)
|
||||
$
|
||||
"
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
pub(crate) static ref NONSYNCABLE_FILENAME: Regex = Regex::new(
|
||||
.unwrap()
|
||||
});
|
||||
pub(crate) static NONSYNCABLE_FILENAME: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r#"(?xi)
|
||||
^
|
||||
(:?
|
||||
thumbs.db | .ds_store
|
||||
)
|
||||
$
|
||||
"#
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
/// True if character may cause problems on one or more platforms.
|
||||
fn disallowed_char(char: char) -> bool {
|
||||
|
|
|
@ -4,9 +4,9 @@
|
|||
use std::borrow::Cow;
|
||||
use std::fmt::Write;
|
||||
use std::ops::Deref;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anki_i18n::without_unicode_isolation;
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Captures;
|
||||
use regex::Match;
|
||||
use regex::Regex;
|
||||
|
@ -24,9 +24,7 @@ struct Template<'a> {
|
|||
front: bool,
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref FIELD_REPLACEMENT: Regex = Regex::new(r"\{\{.+\}\}").unwrap();
|
||||
}
|
||||
static FIELD_REPLACEMENT: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\{\{.+\}\}").unwrap());
|
||||
|
||||
impl Collection {
|
||||
pub fn report_media_field_referencing_templates(&mut self, buf: &mut String) -> Result<()> {
|
||||
|
|
|
@ -20,6 +20,7 @@ use std::collections::HashMap;
|
|||
use std::collections::HashSet;
|
||||
use std::iter::FromIterator;
|
||||
use std::sync::Arc;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
pub use anki_proto::notetypes::notetype::config::card_requirement::Kind as CardRequirementKind;
|
||||
pub use anki_proto::notetypes::notetype::config::CardRequirement;
|
||||
|
@ -33,7 +34,6 @@ pub use anki_proto::notetypes::Notetype as NotetypeProto;
|
|||
pub(crate) use cardgen::AlreadyGeneratedCardInfo;
|
||||
pub(crate) use cardgen::CardGenContext;
|
||||
pub use fields::NoteField;
|
||||
use lazy_static::lazy_static;
|
||||
pub use notetypechange::ChangeNotetypeInput;
|
||||
pub use notetypechange::NotetypeChangeInfo;
|
||||
use regex::Regex;
|
||||
|
@ -67,9 +67,9 @@ pub(crate) const DEFAULT_CSS: &str = include_str!("styling.css");
|
|||
pub(crate) const DEFAULT_CLOZE_CSS: &str = include_str!("cloze_styling.css");
|
||||
pub(crate) const DEFAULT_LATEX_HEADER: &str = include_str!("header.tex");
|
||||
pub(crate) const DEFAULT_LATEX_FOOTER: &str = r"\end{document}";
|
||||
lazy_static! {
|
||||
/// New entries must be handled in render.rs/add_special_fields().
|
||||
static ref SPECIAL_FIELDS: HashSet<&'static str> = HashSet::from_iter(vec![
|
||||
static SPECIAL_FIELDS: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
|
||||
HashSet::from_iter(vec![
|
||||
"FrontSide",
|
||||
"Card",
|
||||
"CardFlag",
|
||||
|
@ -77,8 +77,8 @@ lazy_static! {
|
|||
"Subdeck",
|
||||
"Tags",
|
||||
"Type",
|
||||
]);
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Notetype {
|
||||
|
@ -365,9 +365,8 @@ impl Notetype {
|
|||
}
|
||||
|
||||
fn ensure_template_fronts_unique(&self) -> Result<(), CardTypeError> {
|
||||
lazy_static! {
|
||||
static ref CARD_TAG: Regex = Regex::new(r"\{\{\s*Card\s*\}\}").unwrap();
|
||||
}
|
||||
static CARD_TAG: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"\{\{\s*Card\s*\}\}").unwrap());
|
||||
|
||||
let mut map = HashMap::new();
|
||||
for (index, card) in self.templates.iter().enumerate() {
|
||||
|
|
|
@ -118,12 +118,11 @@ impl CardQueues {
|
|||
|
||||
/// Remove the head of the intraday learning queue, and update counts.
|
||||
pub(super) fn pop_intraday_learning(&mut self) -> Option<LearningQueueEntry> {
|
||||
self.intraday_learning.pop_front().map(|head| {
|
||||
self.intraday_learning.pop_front().inspect(|_head| {
|
||||
// FIXME:
|
||||
// under normal circumstances this should not go below 0, but currently
|
||||
// the Python unit tests answer learning cards before they're due
|
||||
self.counts.learning = self.counts.learning.saturating_sub(1);
|
||||
head
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ pub(crate) enum MainQueueEntryKind {
|
|||
impl CardQueues {
|
||||
/// Remove the head of the main queue, and update counts.
|
||||
pub(super) fn pop_main(&mut self) -> Option<MainQueueEntry> {
|
||||
self.main.pop_front().map(|head| {
|
||||
self.main.pop_front().inspect(|head| {
|
||||
match head.kind {
|
||||
MainQueueEntryKind::New => self.counts.new -= 1,
|
||||
MainQueueEntryKind::Review => self.counts.review -= 1,
|
||||
|
@ -32,7 +32,6 @@ impl CardQueues {
|
|||
self.counts.learning = self.counts.learning.saturating_sub(1)
|
||||
}
|
||||
};
|
||||
head
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use rand::distributions::Distribution;
|
||||
use rand::distributions::Uniform;
|
||||
use regex::Regex;
|
||||
|
@ -65,8 +65,8 @@ pub struct DueDateSpecifier {
|
|||
}
|
||||
|
||||
pub fn parse_due_date_str(s: &str) -> Result<DueDateSpecifier> {
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(
|
||||
static RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"(?x)^
|
||||
# a number
|
||||
(?P<min>\d+)
|
||||
|
@ -78,10 +78,10 @@ pub fn parse_due_date_str(s: &str) -> Result<DueDateSpecifier> {
|
|||
# optional exclamation mark
|
||||
(?P<bang>!)?
|
||||
$
|
||||
"
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
.unwrap()
|
||||
});
|
||||
let caps = RE.captures(s).or_invalid(s)?;
|
||||
let min: u32 = caps.name("min").unwrap().as_str().parse()?;
|
||||
let max = if let Some(max) = caps.name("max") {
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::escaped;
|
||||
use nom::bytes::complete::is_not;
|
||||
|
@ -621,9 +622,7 @@ fn parse_mid(s: &str) -> ParseResult<SearchNode> {
|
|||
/// ensure a list of ids contains only numbers and commas, returning unchanged
|
||||
/// if true used by nid: and cid:
|
||||
fn check_id_list<'a>(s: &'a str, context: &str) -> ParseResult<'a, &'a str> {
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(r"^(\d+,)*\d+$").unwrap();
|
||||
}
|
||||
static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"^(\d+,)*\d+$").unwrap());
|
||||
if RE.is_match(s) {
|
||||
Ok(s)
|
||||
} else {
|
||||
|
@ -700,9 +699,7 @@ fn unescape(txt: &str) -> ParseResult<String> {
|
|||
))
|
||||
} else {
|
||||
Ok(if is_parser_escape(txt) {
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(r#"\\[\\":()-]"#).unwrap();
|
||||
}
|
||||
static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r#"\\[\\":()-]"#).unwrap());
|
||||
RE.replace_all(txt, |caps: &Captures| match &caps[0] {
|
||||
r"\\" => r"\\",
|
||||
"\\\"" => "\"",
|
||||
|
@ -722,17 +719,17 @@ fn unescape(txt: &str) -> ParseResult<String> {
|
|||
/// Return invalid escape sequence if any.
|
||||
fn invalid_escape_sequence(txt: &str) -> Option<String> {
|
||||
// odd number of \s not followed by an escapable character
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(
|
||||
static RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r#"(?x)
|
||||
(?:^|[^\\]) # not a backslash
|
||||
(?:\\\\)* # even number of backslashes
|
||||
(\\ # single backslash
|
||||
(?:[^\\":*_()-]|$)) # anything but an escapable char
|
||||
"#
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
.unwrap()
|
||||
});
|
||||
let caps = RE.captures(txt)?;
|
||||
|
||||
Some(caps[1].to_string())
|
||||
|
@ -741,17 +738,17 @@ fn invalid_escape_sequence(txt: &str) -> Option<String> {
|
|||
/// Check string for escape sequences handled by the parser: ":()-
|
||||
fn is_parser_escape(txt: &str) -> bool {
|
||||
// odd number of \s followed by a char with special meaning to the parser
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(
|
||||
static RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r#"(?x)
|
||||
(?:^|[^\\]) # not a backslash
|
||||
(?:\\\\)* # even number of backslashes
|
||||
\\ # single backslash
|
||||
[":()-] # parser escape
|
||||
"#
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
RE.is_match(txt)
|
||||
}
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::mem;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::notetype::NotetypeId as NotetypeIdType;
|
||||
|
@ -109,9 +109,8 @@ fn maybe_quote(txt: &str) -> String {
|
|||
/// Checks for the reserved keywords "and" and "or", a prepended hyphen,
|
||||
/// whitespace and brackets.
|
||||
fn needs_quotation(txt: &str) -> bool {
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new("(?i)^and$|^or$|^-.| |\u{3000}|\\(|\\)").unwrap();
|
||||
}
|
||||
static RE: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new("(?i)^and$|^or$|^-.| |\u{3000}|\\(|\\)").unwrap());
|
||||
RE.is_match(txt)
|
||||
}
|
||||
|
||||
|
|
|
@ -161,10 +161,9 @@ impl SqliteStorage {
|
|||
.prepare(include_str!("alloc_id.sql"))?
|
||||
.query_row([TimestampMillis::now()], |r| r.get(0))?;
|
||||
self.add_or_update_deck_with_existing_id(deck)
|
||||
.map_err(|err| {
|
||||
.inspect_err(|_err| {
|
||||
// restore id of 0
|
||||
deck.id.0 = 0;
|
||||
err
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -4,9 +4,9 @@
|
|||
#![cfg(test)]
|
||||
|
||||
use std::future::Future;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use axum::http::StatusCode;
|
||||
use once_cell::sync::Lazy;
|
||||
use reqwest::Client;
|
||||
use reqwest::Url;
|
||||
use serde_json::json;
|
||||
|
@ -57,7 +57,7 @@ struct TestAuth {
|
|||
host_key: String,
|
||||
}
|
||||
|
||||
static AUTH: Lazy<TestAuth> = Lazy::new(|| {
|
||||
static AUTH: LazyLock<TestAuth> = LazyLock::new(|| {
|
||||
if let Ok(auth) = std::env::var("TEST_AUTH") {
|
||||
let mut auth = auth.split(':');
|
||||
TestAuth {
|
||||
|
@ -93,7 +93,7 @@ where
|
|||
.unwrap();
|
||||
tokio::spawn(server_fut.instrument(Span::current()));
|
||||
// when not using ephemeral servers, tests need to be serialized
|
||||
static LOCK: Lazy<Mutex<()>> = Lazy::new(|| Mutex::new(()));
|
||||
static LOCK: LazyLock<Mutex<()>> = LazyLock::new(|| Mutex::new(()));
|
||||
let _lock: MutexGuard<()>;
|
||||
// setup client to connect to it
|
||||
let endpoint = if let Ok(endpoint) = std::env::var("TEST_ENDPOINT") {
|
||||
|
|
|
@ -57,10 +57,9 @@ impl User {
|
|||
// Returning HTTP 400 will inform the client that a DB check+full sync
|
||||
// is required to fix the issue.
|
||||
op(col, state)
|
||||
.map_err(|e| {
|
||||
.inspect_err(|_e| {
|
||||
self.col = None;
|
||||
self.sync_state = None;
|
||||
e
|
||||
})
|
||||
.or_bad_request("op failed in sync_state")
|
||||
}
|
||||
|
|
|
@ -77,9 +77,8 @@ impl ServerMediaDatabase {
|
|||
self.db.execute("commit", [])?;
|
||||
Ok(meta.last_usn)
|
||||
})
|
||||
.map_err(|e| {
|
||||
.inspect_err(|_e| {
|
||||
let _ = self.db.execute("rollback", []);
|
||||
e
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ use std::any::Any;
|
|||
use std::env;
|
||||
use std::marker::PhantomData;
|
||||
use std::net::IpAddr;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use axum::body::Body;
|
||||
|
@ -19,7 +20,6 @@ use axum::RequestPartsExt;
|
|||
use axum_client_ip::SecureClientIp;
|
||||
use axum_extra::TypedHeader;
|
||||
use header_and_stream::SyncHeader;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::Serialize;
|
||||
use serde_json::Error;
|
||||
|
@ -179,7 +179,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
pub static MAXIMUM_SYNC_PAYLOAD_BYTES: Lazy<usize> = Lazy::new(|| {
|
||||
pub static MAXIMUM_SYNC_PAYLOAD_BYTES: LazyLock<usize> = LazyLock::new(|| {
|
||||
env::var("MAX_SYNC_PAYLOAD_MEGS")
|
||||
.map(|v| v.parse().expect("invalid upload limit"))
|
||||
.unwrap_or(100)
|
||||
|
@ -189,5 +189,5 @@ pub static MAXIMUM_SYNC_PAYLOAD_BYTES: Lazy<usize> = Lazy::new(|| {
|
|||
/// Client ignores this when a non-AnkiWeb endpoint is configured. Controls the
|
||||
/// maximum size of a payload after decompression, which effectively limits the
|
||||
/// how large a collection file can be uploaded.
|
||||
pub static MAXIMUM_SYNC_PAYLOAD_BYTES_UNCOMPRESSED: Lazy<u64> =
|
||||
Lazy::new(|| (*MAXIMUM_SYNC_PAYLOAD_BYTES * 3) as u64);
|
||||
pub static MAXIMUM_SYNC_PAYLOAD_BYTES_UNCOMPRESSED: LazyLock<u64> =
|
||||
LazyLock::new(|| (*MAXIMUM_SYNC_PAYLOAD_BYTES * 3) as u64);
|
||||
|
|
|
@ -6,9 +6,9 @@ use std::collections::HashMap;
|
|||
use std::collections::HashSet;
|
||||
use std::fmt::Write;
|
||||
use std::iter;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anki_i18n::I18n;
|
||||
use lazy_static::lazy_static;
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::bytes::complete::take_until;
|
||||
|
@ -546,18 +546,18 @@ fn append_str_to_nodes(nodes: &mut Vec<RenderedNode>, text: &str) {
|
|||
|
||||
/// True if provided text contains only whitespace and/or empty BR/DIV tags.
|
||||
pub(crate) fn field_is_empty(text: &str) -> bool {
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(
|
||||
static RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"(?xsi)
|
||||
^(?:
|
||||
[[:space:]]
|
||||
|
|
||||
</?(?:br|div)\ ?/?>
|
||||
)*$
|
||||
"
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
.unwrap()
|
||||
});
|
||||
RE.is_match(text)
|
||||
}
|
||||
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use blake3::Hasher;
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
|
||||
|
@ -110,9 +110,7 @@ fn apply_filter(
|
|||
// Ruby filters
|
||||
//----------------------------------------
|
||||
|
||||
lazy_static! {
|
||||
static ref FURIGANA: Regex = Regex::new(r" ?([^ >]+?)\[(.+?)\]").unwrap();
|
||||
}
|
||||
static FURIGANA: LazyLock<Regex> = LazyLock::new(|| Regex::new(r" ?([^ >]+?)\[(.+?)\]").unwrap());
|
||||
|
||||
/// Did furigana regex match a sound tag?
|
||||
fn captured_sound(caps: &Captures) -> bool {
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use percent_encoding_iri::percent_decode_str;
|
||||
use percent_encoding_iri::utf8_percent_encode;
|
||||
use percent_encoding_iri::AsciiSet;
|
||||
|
@ -79,17 +79,18 @@ pub enum AvTag {
|
|||
},
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref HTML: Regex = Regex::new(concat!(
|
||||
static HTML: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(concat!(
|
||||
"(?si)",
|
||||
// wrapped text
|
||||
r"(<!--.*?-->)|(<style.*?>.*?</style>)|(<script.*?>.*?</script>)",
|
||||
// html tags
|
||||
r"|(<.*?>)",
|
||||
))
|
||||
.unwrap();
|
||||
|
||||
static ref HTML_LINEBREAK_TAGS: Regex = Regex::new(
|
||||
.unwrap()
|
||||
});
|
||||
static HTML_LINEBREAK_TAGS: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r#"(?xsi)
|
||||
</?
|
||||
(?:
|
||||
|
@ -99,10 +100,13 @@ lazy_static! {
|
|||
|output|p|pre|section|table|tfoot|ul|video
|
||||
)
|
||||
>
|
||||
"#
|
||||
).unwrap();
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
pub static ref HTML_MEDIA_TAGS: Regex = Regex::new(
|
||||
pub static HTML_MEDIA_TAGS: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r#"(?xsi)
|
||||
# the start of the image, audio, or object tag
|
||||
<\b(?:img|audio|video|object)\b
|
||||
|
@ -141,11 +145,14 @@ lazy_static! {
|
|||
>
|
||||
)
|
||||
)
|
||||
"#
|
||||
).unwrap();
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
// videos are also in sound tags
|
||||
static ref AV_TAGS: Regex = Regex::new(
|
||||
static AV_TAGS: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"(?xs)
|
||||
\[sound:(.+?)\] # 1 - the filename in a sound tag
|
||||
|
|
||||
|
@ -153,15 +160,21 @@ lazy_static! {
|
|||
\[(.*?)\] # 2 - arguments to tts call
|
||||
(.*?) # 3 - field text
|
||||
\[/anki:tts\]
|
||||
").unwrap();
|
||||
",
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
static ref PERSISTENT_HTML_SPACERS: Regex = Regex::new(r"(?i)<br\s*/?>|<div>|\n").unwrap();
|
||||
static PERSISTENT_HTML_SPACERS: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"(?i)<br\s*/?>|<div>|\n").unwrap());
|
||||
|
||||
static ref TYPE_TAG: Regex = Regex::new(r"\[\[type:[^]]+\]\]").unwrap();
|
||||
pub(crate) static ref SOUND_TAG: Regex = Regex::new(r"\[sound:([^]]+)\]").unwrap();
|
||||
static TYPE_TAG: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\[\[type:[^]]+\]\]").unwrap());
|
||||
pub(crate) static SOUND_TAG: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"\[sound:([^]]+)\]").unwrap());
|
||||
|
||||
/// Files included in CSS with a leading underscore.
|
||||
static ref UNDERSCORED_CSS_IMPORTS: Regex = Regex::new(
|
||||
static UNDERSCORED_CSS_IMPORTS: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r#"(?xi)
|
||||
(?:@import\s+ # import statement with a bare
|
||||
"(_[^"]*.css)" # double quoted
|
||||
|
@ -176,10 +189,14 @@ lazy_static! {
|
|||
| # or
|
||||
(_.+) # unquoted filename
|
||||
\s*\))
|
||||
"#).unwrap();
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
/// Strings, src and data attributes with a leading underscore.
|
||||
static ref UNDERSCORED_REFERENCES: Regex = Regex::new(
|
||||
static UNDERSCORED_REFERENCES: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r#"(?x)
|
||||
\[sound:(_[^]]+)\] # a filename in an Anki sound tag
|
||||
| # or
|
||||
|
@ -190,8 +207,10 @@ lazy_static! {
|
|||
\b(?:src|data) # a 'src' or 'data' attribute
|
||||
= # followed by
|
||||
(_[^ >]+) # an unquoted value
|
||||
"#).unwrap();
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
pub fn is_html(text: impl AsRef<str>) -> bool {
|
||||
HTML.is_match(text.as_ref())
|
||||
|
@ -446,16 +465,16 @@ pub(crate) fn without_combining(s: &str) -> Cow<str> {
|
|||
/// Check if string contains an unescaped wildcard.
|
||||
pub(crate) fn is_glob(txt: &str) -> bool {
|
||||
// even number of \s followed by a wildcard
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(
|
||||
static RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"(?x)
|
||||
(?:^|[^\\]) # not a backslash
|
||||
(?:\\\\)* # even number of backslashes
|
||||
[*_] # wildcard
|
||||
"
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
RE.is_match(txt)
|
||||
}
|
||||
|
@ -467,9 +486,7 @@ pub(crate) fn to_re(txt: &str) -> Cow<str> {
|
|||
|
||||
/// Convert Anki style to RegEx using the provided wildcard.
|
||||
pub(crate) fn to_custom_re<'a>(txt: &'a str, wildcard: &str) -> Cow<'a, str> {
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(r"\\?.").unwrap();
|
||||
}
|
||||
static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\\?.").unwrap());
|
||||
RE.replace_all(txt, |caps: &Captures| {
|
||||
let s = &caps[0];
|
||||
match s {
|
||||
|
@ -485,9 +502,7 @@ pub(crate) fn to_custom_re<'a>(txt: &'a str, wildcard: &str) -> Cow<'a, str> {
|
|||
/// Convert to SQL respecting Anki wildcards.
|
||||
pub(crate) fn to_sql(txt: &str) -> Cow<str> {
|
||||
// escape sequences and unescaped special characters which need conversion
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(r"\\[\\*]|[*%]").unwrap();
|
||||
}
|
||||
static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\\[\\*]|[*%]").unwrap());
|
||||
RE.replace_all(txt, |caps: &Captures| {
|
||||
let s = &caps[0];
|
||||
match s {
|
||||
|
@ -502,17 +517,13 @@ pub(crate) fn to_sql(txt: &str) -> Cow<str> {
|
|||
|
||||
/// Unescape everything.
|
||||
pub(crate) fn to_text(txt: &str) -> Cow<str> {
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(r"\\(.)").unwrap();
|
||||
}
|
||||
static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\\(.)").unwrap());
|
||||
RE.replace_all(txt, "$1")
|
||||
}
|
||||
|
||||
/// Escape Anki wildcards and the backslash for escaping them: \*_
|
||||
pub(crate) fn escape_anki_wildcards(txt: &str) -> String {
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(r"[\\*_]").unwrap();
|
||||
}
|
||||
static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\\*_]").unwrap());
|
||||
RE.replace_all(txt, r"\$0").into()
|
||||
}
|
||||
|
||||
|
@ -545,9 +556,8 @@ pub(crate) fn glob_matcher(search: &str) -> impl Fn(&str) -> bool + '_ {
|
|||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub(crate) static ref REMOTE_FILENAME: Regex = Regex::new("(?i)^https?://").unwrap();
|
||||
}
|
||||
pub(crate) static REMOTE_FILENAME: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new("(?i)^https?://").unwrap());
|
||||
|
||||
/// https://url.spec.whatwg.org/#fragment-percent-encode-set
|
||||
const FRAGMENT_QUERY_UNION: &AsciiSet = &CONTROLS
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use difflib::sequencematcher::SequenceMatcher;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use unic_ucd_category::GeneralCategory;
|
||||
|
||||
|
@ -12,7 +12,7 @@ use crate::card_rendering::strip_av_tags;
|
|||
use crate::text::normalize_to_nfkd;
|
||||
use crate::text::strip_html;
|
||||
|
||||
static LINEBREAKS: Lazy<Regex> = Lazy::new(|| {
|
||||
static LINEBREAKS: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r"(?six)
|
||||
(
|
||||
|
|
|
@ -2,8 +2,7 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::env;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
pub fn version() -> &'static str {
|
||||
include_str!("../../.version").trim()
|
||||
|
@ -14,25 +13,25 @@ pub fn buildhash() -> &'static str {
|
|||
}
|
||||
|
||||
pub(crate) fn sync_client_version() -> &'static str {
|
||||
lazy_static! {
|
||||
static ref VER: String = format!(
|
||||
static VER: LazyLock<String> = LazyLock::new(|| {
|
||||
format!(
|
||||
"anki,{version} ({buildhash}),{platform}",
|
||||
version = version(),
|
||||
buildhash = buildhash(),
|
||||
platform = env::var("PLATFORM").unwrap_or_else(|_| env::consts::OS.to_string())
|
||||
);
|
||||
}
|
||||
)
|
||||
});
|
||||
&VER
|
||||
}
|
||||
|
||||
pub(crate) fn sync_client_version_short() -> &'static str {
|
||||
lazy_static! {
|
||||
static ref VER: String = format!(
|
||||
static VER: LazyLock<String> = LazyLock::new(|| {
|
||||
format!(
|
||||
"{version},{buildhash},{platform}",
|
||||
version = version(),
|
||||
buildhash = buildhash(),
|
||||
platform = env::consts::OS
|
||||
);
|
||||
}
|
||||
)
|
||||
});
|
||||
&VER
|
||||
}
|
||||
|
|
|
@ -12,6 +12,5 @@ anki_io.workspace = true
|
|||
anki_process.workspace = true
|
||||
anyhow.workspace = true
|
||||
camino.workspace = true
|
||||
once_cell.workspace = true
|
||||
walkdir.workspace = true
|
||||
which.workspace = true
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::cell::LazyCell;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::fs;
|
||||
|
@ -16,7 +17,6 @@ use anki_process::CommandExt;
|
|||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use camino::Utf8Path;
|
||||
use once_cell::unsync::Lazy;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
const NONSTANDARD_HEADER: &[&str] = &[
|
||||
|
@ -63,7 +63,7 @@ fn main() -> Result<()> {
|
|||
|
||||
struct LintContext {
|
||||
want_fix: bool,
|
||||
unstaged_changes: Lazy<()>,
|
||||
unstaged_changes: LazyCell<()>,
|
||||
found_problems: bool,
|
||||
nonstandard_headers: HashSet<&'static Utf8Path>,
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ impl LintContext {
|
|||
pub fn new(want_fix: bool) -> Self {
|
||||
Self {
|
||||
want_fix,
|
||||
unstaged_changes: Lazy::new(check_for_unstaged_changes),
|
||||
unstaged_changes: LazyCell::new(check_for_unstaged_changes),
|
||||
found_problems: false,
|
||||
nonstandard_headers: NONSTANDARD_HEADER.iter().map(Utf8Path::new).collect(),
|
||||
}
|
||||
|
@ -113,7 +113,7 @@ impl LintContext {
|
|||
let missing = !head.contains("Ankitects Pty Ltd and contributors");
|
||||
if missing {
|
||||
if self.want_fix {
|
||||
Lazy::force(&self.unstaged_changes);
|
||||
LazyCell::force(&self.unstaged_changes);
|
||||
fix_copyright(path)?;
|
||||
} else {
|
||||
println!("missing standard copyright header: {:?}", path);
|
||||
|
|
Loading…
Reference in a new issue