mirror of
https://github.com/ankitects/anki.git
synced 2025-09-19 06:22:22 -04:00
Fix clippy issues in Rust 1.72
This commit is contained in:
parent
95f7d5a941
commit
408b48834b
17 changed files with 41 additions and 39 deletions
|
@ -236,7 +236,7 @@ where
|
|||
E: Fn(&Utf8Path, &str) -> bool,
|
||||
{
|
||||
static MESSAGE_OR_ENUM_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r#"pub (struct|enum) ([[:alnum:]]+?)\s"#).unwrap());
|
||||
Lazy::new(|| Regex::new(r"pub (struct|enum) ([[:alnum:]]+?)\s").unwrap());
|
||||
let contents = read_to_string(path)?;
|
||||
let contents = MESSAGE_OR_ENUM_RE.replace_all(&contents, |caps: &Captures| {
|
||||
let is_enum = caps.get(1).unwrap().as_str() == "enum";
|
||||
|
|
|
@ -22,11 +22,11 @@ use crate::text::strip_html_preserving_entities;
|
|||
|
||||
lazy_static! {
|
||||
static ref MATHJAX: Regex = Regex::new(
|
||||
r#"(?xsi)
|
||||
r"(?xsi)
|
||||
(\\[(\[]) # 1 = mathjax opening tag
|
||||
(.*?) # 2 = inner content
|
||||
(\\[])]) # 3 = mathjax closing tag
|
||||
"#
|
||||
"
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
|
|
@ -135,12 +135,12 @@ impl Collection {
|
|||
&mut self,
|
||||
config: &mut DeckConfig,
|
||||
) -> Result<()> {
|
||||
let usn = Some(self.usn()?);
|
||||
let usn = self.usn()?;
|
||||
|
||||
if config.id.0 == 0 {
|
||||
self.add_deck_config_inner(config, usn)
|
||||
self.add_deck_config_inner(config, Some(usn))
|
||||
} else {
|
||||
config.set_modified(usn.unwrap());
|
||||
config.set_modified(usn);
|
||||
self.storage
|
||||
.add_or_update_deck_config_with_existing_id(config)
|
||||
}
|
||||
|
|
|
@ -141,7 +141,7 @@ impl Collection {
|
|||
|
||||
fn is_image_file(&mut self, path: &PathBuf) -> Result<bool> {
|
||||
let file_path = Path::new(&path);
|
||||
let supported_extensions = vec![
|
||||
let supported_extensions = [
|
||||
"jpg", "jpeg", "png", "tif", "tiff", "gif", "svg", "webp", "ico",
|
||||
];
|
||||
|
||||
|
|
|
@ -6,13 +6,13 @@ mod colpkg;
|
|||
mod media;
|
||||
mod meta;
|
||||
|
||||
pub(self) use anki_proto::import_export::media_entries::MediaEntry;
|
||||
pub(self) use anki_proto::import_export::MediaEntries;
|
||||
use anki_proto::import_export::media_entries::MediaEntry;
|
||||
use anki_proto::import_export::MediaEntries;
|
||||
pub(crate) use apkg::NoteMeta;
|
||||
pub(crate) use colpkg::export::export_colpkg_from_data;
|
||||
pub use colpkg::import::import_colpkg;
|
||||
pub use media::MediaIter;
|
||||
pub use media::MediaIterEntry;
|
||||
pub use media::MediaIterError;
|
||||
pub(self) use meta::Meta;
|
||||
pub(self) use meta::Version;
|
||||
use meta::Meta;
|
||||
use meta::Version;
|
||||
|
|
|
@ -198,7 +198,7 @@ impl NoteContext {
|
|||
.max()
|
||||
.unwrap_or_default();
|
||||
let deck_ids = col.storage.all_decks_of_search_notes()?;
|
||||
let deck_names = HashMap::from_iter(col.storage.get_all_deck_names()?.into_iter());
|
||||
let deck_names = HashMap::from_iter(col.storage.get_all_deck_names()?);
|
||||
|
||||
Ok(Self {
|
||||
with_html: request.with_html,
|
||||
|
@ -236,10 +236,10 @@ impl NoteContext {
|
|||
self.with_guid
|
||||
.then(|| Cow::from(note.guid.as_bytes()))
|
||||
.into_iter()
|
||||
.chain(self.notetype_name(note).into_iter())
|
||||
.chain(self.deck_name(note).into_iter())
|
||||
.chain(self.notetype_name(note))
|
||||
.chain(self.deck_name(note))
|
||||
.chain(self.note_fields(note))
|
||||
.chain(self.tags(note).into_iter())
|
||||
.chain(self.tags(note))
|
||||
}
|
||||
|
||||
fn notetype_name(&self, note: &Note) -> Option<Cow<[u8]>> {
|
||||
|
|
|
@ -558,7 +558,7 @@ impl ForeignNote {
|
|||
note.tags.extend(extra_tags.into_iter().cloned());
|
||||
note.fields_mut()
|
||||
.iter_mut()
|
||||
.zip(self.fields.into_iter())
|
||||
.zip(self.fields)
|
||||
.for_each(|(field, new)| {
|
||||
if let Some(s) = new {
|
||||
*field = s;
|
||||
|
|
|
@ -13,13 +13,13 @@ use crate::text::strip_html;
|
|||
|
||||
lazy_static! {
|
||||
pub(crate) static ref LATEX: Regex = Regex::new(
|
||||
r#"(?xsi)
|
||||
r"(?xsi)
|
||||
\[latex\](.+?)\[/latex\] # 1 - standard latex
|
||||
|
|
||||
\[\$\](.+?)\[/\$\] # 2 - inline math
|
||||
|
|
||||
\[\$\$\](.+?)\[/\$\$\] # 3 - math environment
|
||||
"#
|
||||
"
|
||||
)
|
||||
.unwrap();
|
||||
static ref LATEX_NEWLINES: Regex = Regex::new(
|
||||
|
|
|
@ -29,7 +29,7 @@ use crate::sync::media::MAX_MEDIA_FILENAME_LENGTH;
|
|||
|
||||
lazy_static! {
|
||||
static ref WINDOWS_DEVICE_NAME: Regex = Regex::new(
|
||||
r#"(?xi)
|
||||
r"(?xi)
|
||||
# starting with one of the following names
|
||||
^
|
||||
(
|
||||
|
@ -39,17 +39,17 @@ lazy_static! {
|
|||
(
|
||||
\. | $
|
||||
)
|
||||
"#
|
||||
"
|
||||
)
|
||||
.unwrap();
|
||||
static ref WINDOWS_TRAILING_CHAR: Regex = Regex::new(
|
||||
r#"(?x)
|
||||
r"(?x)
|
||||
# filenames can't end with a space or period
|
||||
(
|
||||
\x20 | \.
|
||||
)
|
||||
$
|
||||
"#
|
||||
"
|
||||
)
|
||||
.unwrap();
|
||||
pub(crate) static ref NONSYNCABLE_FILENAME: Regex = Regex::new(
|
||||
|
|
|
@ -164,7 +164,7 @@ fn default_template_map(
|
|||
new_templates
|
||||
.iter_mut()
|
||||
.filter(|o| o.is_none())
|
||||
.zip(remaining_templates.into_iter())
|
||||
.zip(remaining_templates)
|
||||
.for_each(|(template, old_idx)| *template = Some(*old_idx));
|
||||
|
||||
Some(new_templates)
|
||||
|
@ -202,7 +202,7 @@ fn default_field_map(current_notetype: &Notetype, new_notetype: &Notetype) -> Ve
|
|||
new_fields
|
||||
.iter_mut()
|
||||
.filter(|o| o.is_none())
|
||||
.zip(remaining_fields.into_iter())
|
||||
.zip(remaining_fields)
|
||||
.for_each(|(field, old_idx)| *field = Some(*old_idx));
|
||||
|
||||
new_fields
|
||||
|
|
|
@ -67,7 +67,7 @@ pub struct DueDateSpecifier {
|
|||
pub fn parse_due_date_str(s: &str) -> Result<DueDateSpecifier> {
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(
|
||||
r#"(?x)^
|
||||
r"(?x)^
|
||||
# a number
|
||||
(?P<min>\d+)
|
||||
# an optional hyphen and another number
|
||||
|
@ -78,7 +78,7 @@ pub fn parse_due_date_str(s: &str) -> Result<DueDateSpecifier> {
|
|||
# optional exclamation mark
|
||||
(?P<bang>!)?
|
||||
$
|
||||
"#
|
||||
"
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
|
|
@ -824,8 +824,8 @@ mod test {
|
|||
|
||||
// parser doesn't unescape unescape \*_
|
||||
assert_eq!(
|
||||
parse(r#"\\\*\_"#)?,
|
||||
vec![Search(UnqualifiedText(r#"\\\*\_"#.into())),]
|
||||
parse(r"\\\*\_")?,
|
||||
vec![Search(UnqualifiedText(r"\\\*\_".into())),]
|
||||
);
|
||||
|
||||
// escaping parentheses is optional (only) inside quotes
|
||||
|
|
|
@ -987,7 +987,7 @@ mod test {
|
|||
);
|
||||
assert_eq!(s(ctx, "te%st").1, vec![r"%te\%st%".to_string()]);
|
||||
// user should be able to escape wildcards
|
||||
assert_eq!(s(ctx, r#"te\*s\_t"#).1, vec!["%te*s\\_t%".to_string()]);
|
||||
assert_eq!(s(ctx, r"te\*s\_t").1, vec!["%te*s\\_t%".to_string()]);
|
||||
|
||||
// field search
|
||||
assert_eq!(
|
||||
|
@ -1255,6 +1255,7 @@ c.odue != 0 then c.odue else c.due end) != {days}) or (c.queue in (1,4) and
|
|||
);
|
||||
}
|
||||
|
||||
#[allow(clippy::single_range_in_vec_init)]
|
||||
#[test]
|
||||
fn ranges() {
|
||||
assert_eq!([1, 2, 3].collect_ranges(), [1..4]);
|
||||
|
|
|
@ -550,13 +550,13 @@ fn append_str_to_nodes(nodes: &mut Vec<RenderedNode>, text: &str) {
|
|||
pub(crate) fn field_is_empty(text: &str) -> bool {
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(
|
||||
r#"(?xsi)
|
||||
r"(?xsi)
|
||||
^(?:
|
||||
[[:space:]]
|
||||
|
|
||||
</?(?:br|div)\ ?/?>
|
||||
)*$
|
||||
"#
|
||||
"
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
@ -986,7 +986,7 @@ mod test {
|
|||
|
||||
#[test]
|
||||
fn requirements() {
|
||||
let field_map: FieldMap = vec!["a", "b", "c"]
|
||||
let field_map: FieldMap = ["a", "b", "c"]
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(a, b)| (*b, a as u16))
|
||||
|
|
|
@ -218,6 +218,7 @@ mod test {
|
|||
);
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_raw_string_hashes)]
|
||||
#[test]
|
||||
fn hint() {
|
||||
assert_eq!(
|
||||
|
|
|
@ -133,16 +133,16 @@ lazy_static! {
|
|||
|
||||
// videos are also in sound tags
|
||||
static ref AV_TAGS: Regex = Regex::new(
|
||||
r#"(?xs)
|
||||
r"(?xs)
|
||||
\[sound:(.+?)\] # 1 - the filename in a sound tag
|
||||
|
|
||||
\[anki:tts\]
|
||||
\[(.*?)\] # 2 - arguments to tts call
|
||||
(.*?) # 3 - field text
|
||||
\[/anki:tts\]
|
||||
"#).unwrap();
|
||||
").unwrap();
|
||||
|
||||
static ref PERSISTENT_HTML_SPACERS: Regex = Regex::new(r#"(?i)<br\s*/?>|<div>|\n"#).unwrap();
|
||||
static ref PERSISTENT_HTML_SPACERS: Regex = Regex::new(r"(?i)<br\s*/?>|<div>|\n").unwrap();
|
||||
|
||||
static ref TYPE_TAG: Regex = Regex::new(r"\[\[type:[^]]+\]\]").unwrap();
|
||||
pub(crate) static ref SOUND_TAG: Regex = Regex::new(r"\[sound:([^]]+)\]").unwrap();
|
||||
|
@ -391,11 +391,11 @@ pub(crate) fn is_glob(txt: &str) -> bool {
|
|||
// even number of \s followed by a wildcard
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(
|
||||
r#"(?x)
|
||||
r"(?x)
|
||||
(?:^|[^\\]) # not a backslash
|
||||
(?:\\\\)* # even number of backslashes
|
||||
[*_] # wildcard
|
||||
"#
|
||||
"
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ use crate::text::strip_html;
|
|||
|
||||
lazy_static! {
|
||||
static ref LINEBREAKS: Regex = Regex::new(
|
||||
r#"(?six)
|
||||
r"(?six)
|
||||
(
|
||||
\n
|
||||
|
|
||||
|
@ -24,7 +24,7 @@ lazy_static! {
|
|||
|
|
||||
</?div>
|
||||
)+
|
||||
"#
|
||||
"
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue