mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 14:02:21 -04:00
Bump Rust to 1.84
+ fix new warnings/lints + update pyo3 to fix some other warnings
This commit is contained in:
parent
a05e41e035
commit
92b1144d97
20 changed files with 40 additions and 39 deletions
24
Cargo.lock
generated
24
Cargo.lock
generated
|
@ -4470,7 +4470,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "0c1318b19085f08681016926435853bbf7858f9c082d0999b80550ff5d9abe15"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"heck 0.5.0",
|
||||
"heck 0.4.1",
|
||||
"itertools 0.13.0",
|
||||
"log",
|
||||
"multimap",
|
||||
|
@ -4573,9 +4573,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.22.5"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d922163ba1f79c04bc49073ba7b32fd5a8d3b76a87c955921234b8e77333c51"
|
||||
checksum = "57fe09249128b3173d092de9523eaa75136bf7ba85e0d69eca241c7939c933cc"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"indoc",
|
||||
|
@ -4591,9 +4591,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.22.5"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc38c5feeb496c8321091edf3d63e9a6829eab4b863b4a6a65f26f3e9cc6b179"
|
||||
checksum = "1cd3927b5a78757a0d71aa9dff669f903b1eb64b54142a9bd9f757f8fde65fd7"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
|
@ -4601,9 +4601,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.22.5"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94845622d88ae274d2729fcefc850e63d7a3ddff5e3ce11bd88486db9f1d357d"
|
||||
checksum = "dab6bb2102bd8f991e7749f130a70d05dd557613e39ed2deeee8e9ca0c4d548d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
|
@ -4611,9 +4611,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.22.5"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e655aad15e09b94ffdb3ce3d217acf652e26bbc37697ef012f5e5e348c716e5e"
|
||||
checksum = "91871864b353fd5ffcb3f91f2f703a22a9797c91b9ab497b1acac7b07ae509c7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
|
@ -4623,9 +4623,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.22.5"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae1e3f09eecd94618f60a455a23def79f79eba4dc561a97324bf9ac8c6df30ce"
|
||||
checksum = "43abc3b80bc20f3facd86cd3c60beed58c3e2aa26213f3cda368de39c60a27e4"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
|
@ -5592,7 +5592,7 @@ version = "0.8.5"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "03c3c6b7927ffe7ecaa769ee0e3994da3b8cafc8f444578982c83ecb161af917"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"heck 0.4.1",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.82",
|
||||
|
|
|
@ -110,7 +110,7 @@ prost-build = "0.13"
|
|||
prost-reflect = "0.14"
|
||||
prost-types = "0.13"
|
||||
pulldown-cmark = "0.9.6"
|
||||
pyo3 = { version = "0.22.5", features = ["extension-module", "abi3", "abi3-py39"] }
|
||||
pyo3 = { version = "0.23.4", features = ["extension-module", "abi3", "abi3-py39"] }
|
||||
rand = "0.8.5"
|
||||
regex = "1.11.0"
|
||||
reqwest = { version = "0.12.8", default-features = false, features = ["json", "socks", "stream", "multipart"] }
|
||||
|
|
|
@ -56,7 +56,7 @@ impl Backend {
|
|||
let in_bytes = input.as_bytes();
|
||||
py.allow_threads(|| self.backend.run_service_method(service, method, in_bytes))
|
||||
.map(|out_bytes| {
|
||||
let out_obj = PyBytes::new_bound(py, &out_bytes);
|
||||
let out_obj = PyBytes::new(py, &out_bytes);
|
||||
out_obj.into()
|
||||
})
|
||||
.map_err(BackendError::new_err)
|
||||
|
@ -72,7 +72,7 @@ impl Backend {
|
|||
.map_err(BackendError::new_err)
|
||||
});
|
||||
let out_bytes = out_res?;
|
||||
let out_obj = PyBytes::new_bound(py, &out_bytes);
|
||||
let out_obj = PyBytes::new(py, &out_bytes);
|
||||
Ok(out_obj.into())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -175,9 +175,9 @@ impl Backend {
|
|||
// currently limited to http1, as nginx doesn't support http2 proxies
|
||||
let mut web_client = self.web_client.lock().unwrap();
|
||||
|
||||
return web_client
|
||||
web_client
|
||||
.get_or_insert_with(|| Client::builder().http1_only().build().unwrap())
|
||||
.clone();
|
||||
.clone()
|
||||
}
|
||||
|
||||
fn db_command(&self, input: &[u8]) -> Result<Vec<u8>> {
|
||||
|
|
|
@ -168,6 +168,7 @@ impl crate::services::BackendSyncService for Backend {
|
|||
|
||||
impl Backend {
|
||||
/// Return a handle for regular (non-media) syncing.
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn sync_abort_handle(
|
||||
&self,
|
||||
) -> Result<(
|
||||
|
|
|
@ -83,9 +83,9 @@ impl<'a> Directive<'a> {
|
|||
}
|
||||
|
||||
/// Consume 0 or more of anything in " \t\r\n" after `parser`.
|
||||
fn trailing_whitespace0<'parser, 's, P, O>(parser: P) -> impl FnMut(&'s str) -> IResult<O>
|
||||
fn trailing_whitespace0<'parser, 's, P, O>(parser: P) -> impl FnMut(&'s str) -> IResult<'s, O>
|
||||
where
|
||||
P: FnMut(&'s str) -> IResult<O> + 'parser,
|
||||
P: FnMut(&'s str) -> IResult<'s, O> + 'parser,
|
||||
{
|
||||
terminated(parser, multispace0)
|
||||
}
|
||||
|
@ -93,7 +93,7 @@ where
|
|||
/// Parse until char in `arr` is found. Always succeeds.
|
||||
fn is_not0<'parser, 'arr: 'parser, 's: 'parser>(
|
||||
arr: &'arr str,
|
||||
) -> impl FnMut(&'s str) -> IResult<&'s str> + 'parser {
|
||||
) -> impl FnMut(&'s str) -> IResult<'s, &'s str> + 'parser {
|
||||
alt((is_not(arr), success("")))
|
||||
}
|
||||
|
||||
|
@ -120,7 +120,7 @@ fn tag_node(s: &str) -> IResult<Node> {
|
|||
/// Return a parser to match an opening `name` tag and return its options.
|
||||
fn opening_parser<'name, 's: 'name>(
|
||||
name: &'name str,
|
||||
) -> impl FnMut(&'s str) -> IResult<Vec<(&str, &str)>> + 'name {
|
||||
) -> impl FnMut(&'s str) -> IResult<'s, Vec<(&'s str, &'s str)>> + 'name {
|
||||
/// List of whitespace-separated `key=val` tuples, where `val` may be
|
||||
/// empty.
|
||||
fn options(s: &str) -> IResult<Vec<(&str, &str)>> {
|
||||
|
@ -148,7 +148,7 @@ fn tag_node(s: &str) -> IResult<Node> {
|
|||
/// Return a parser to match a closing `name` tag.
|
||||
fn closing_parser<'parser, 'name: 'parser, 's: 'parser>(
|
||||
name: &'name str,
|
||||
) -> impl FnMut(&'s str) -> IResult<()> + 'parser {
|
||||
) -> impl FnMut(&'s str) -> IResult<'s, ()> + 'parser {
|
||||
value((), tuple((tag("[/anki:"), tag(name), tag("]"))))
|
||||
}
|
||||
|
||||
|
@ -156,7 +156,7 @@ fn tag_node(s: &str) -> IResult<Node> {
|
|||
/// is found.
|
||||
fn content_parser<'parser, 'name: 'parser, 's: 'parser>(
|
||||
name: &'name str,
|
||||
) -> impl FnMut(&'s str) -> IResult<&str> + 'parser {
|
||||
) -> impl FnMut(&'s str) -> IResult<'s, &'s str> + 'parser {
|
||||
recognize(many0(pair(not(closing_parser(name)), anychar)))
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ use crate::prelude::*;
|
|||
use crate::text::decode_entities;
|
||||
use crate::text::strip_html_for_tts;
|
||||
|
||||
impl<'a> CardNodes<'a> {
|
||||
impl CardNodes<'_> {
|
||||
pub(super) fn write_without_av_tags(&self) -> String {
|
||||
AvStripper::new().write(self)
|
||||
}
|
||||
|
|
|
@ -108,11 +108,11 @@ impl SafeMediaEntry {
|
|||
get_checksum: &mut impl FnMut(&str) -> Result<Option<Sha1Hash>>,
|
||||
) -> Result<bool> {
|
||||
get_checksum(&self.name)
|
||||
.map(|opt| opt.map_or(false, |sha1| sha1 == self.sha1.expect("sha1 not set")))
|
||||
.map(|opt| opt.is_some_and(|sha1| sha1 == self.sha1.expect("sha1 not set")))
|
||||
}
|
||||
|
||||
pub(super) fn has_size_equal_to(&self, other_path: &Path) -> bool {
|
||||
fs::metadata(other_path).map_or(false, |metadata| metadata.len() == self.size as u64)
|
||||
fs::metadata(other_path).is_ok_and(|metadata| metadata.len() == self.size as u64)
|
||||
}
|
||||
|
||||
/// Copy the archived file to the target folder, setting its hash if
|
||||
|
|
|
@ -211,7 +211,7 @@ fn remove_tags_line_from_reader(reader: &mut (impl Read + Seek)) -> Result<()> {
|
|||
let mut first_line = String::new();
|
||||
buf_reader.read_line(&mut first_line)?;
|
||||
let offset = if strip_utf8_bom(&first_line).starts_with("tags:") {
|
||||
first_line.as_bytes().len()
|
||||
first_line.len()
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
|
|
@ -492,7 +492,7 @@ impl NoteContext<'_> {
|
|||
fn is_guid_dupe(&self) -> bool {
|
||||
self.dupes
|
||||
.first()
|
||||
.map_or(false, |d| d.note.guid == self.note.guid)
|
||||
.is_some_and(|d| d.note.guid == self.note.guid)
|
||||
}
|
||||
|
||||
fn has_first_field(&self) -> bool {
|
||||
|
|
|
@ -66,7 +66,7 @@ fn references_media_field(format: &str) -> bool {
|
|||
fn captures_contain_field_replacement(caps: Captures) -> bool {
|
||||
caps.iter()
|
||||
.skip(1)
|
||||
.any(|opt| opt.map_or(false, match_contains_field_replacement))
|
||||
.any(|opt| opt.is_some_and(match_contains_field_replacement))
|
||||
}
|
||||
|
||||
fn match_contains_field_replacement(m: Match) -> bool {
|
||||
|
|
|
@ -699,7 +699,7 @@ fn missing_cloze_filter(
|
|||
fn has_cloze(template: &Option<ParsedTemplate>) -> bool {
|
||||
template
|
||||
.as_ref()
|
||||
.map_or(false, |t| !t.all_referenced_cloze_field_names().is_empty())
|
||||
.is_some_and(|t| !t.all_referenced_cloze_field_names().is_empty())
|
||||
}
|
||||
|
||||
impl From<Notetype> for NotetypeProto {
|
||||
|
|
|
@ -31,7 +31,7 @@ static FUZZ_RANGES: [FuzzRange; 3] = [
|
|||
},
|
||||
];
|
||||
|
||||
impl<'a> StateContext<'a> {
|
||||
impl StateContext<'_> {
|
||||
/// Apply fuzz, respecting the passed bounds.
|
||||
pub(crate) fn with_review_fuzz(&self, interval: f32, minimum: u32, maximum: u32) -> u32 {
|
||||
self.load_balancer
|
||||
|
|
|
@ -90,7 +90,7 @@ pub struct LoadBalancerContext<'a> {
|
|||
fuzz_seed: Option<u64>,
|
||||
}
|
||||
|
||||
impl<'a> LoadBalancerContext<'a> {
|
||||
impl LoadBalancerContext<'_> {
|
||||
pub fn find_interval(&self, interval: f32, minimum: u32, maximum: u32) -> Option<u32> {
|
||||
self.load_balancer.find_interval(
|
||||
interval,
|
||||
|
|
|
@ -114,7 +114,7 @@ pub(crate) struct StateContext<'a> {
|
|||
pub preview_delays: PreviewDelays,
|
||||
}
|
||||
|
||||
impl<'a> StateContext<'a> {
|
||||
impl StateContext<'_> {
|
||||
/// Return the minimum and maximum review intervals.
|
||||
/// - `maximum` is `self.maximum_review_interval`, but at least 1.
|
||||
/// - `minimum` is as passed, but at least 1, and at most `maximum`.
|
||||
|
|
|
@ -13,7 +13,7 @@ fn to_secs(v: f32) -> u32 {
|
|||
(v * 60.0) as u32
|
||||
}
|
||||
|
||||
impl<'a> LearningSteps<'a> {
|
||||
impl LearningSteps<'_> {
|
||||
/// Takes `steps` as minutes.
|
||||
pub(crate) fn new(steps: &[f32]) -> LearningSteps<'_> {
|
||||
LearningSteps { steps }
|
||||
|
|
|
@ -121,7 +121,7 @@ fn validate_custom_data(json_str: &str) -> Result<()> {
|
|||
let object: HashMap<&str, Value> =
|
||||
serde_json::from_str(json_str).or_invalid("custom data not an object")?;
|
||||
require!(
|
||||
object.keys().all(|k| k.as_bytes().len() <= 8),
|
||||
object.keys().all(|k| k.len() <= 8),
|
||||
"custom data keys must be <= 8 bytes"
|
||||
);
|
||||
require!(
|
||||
|
|
|
@ -512,7 +512,7 @@ fn render_into(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
impl<'a> RenderContext<'a> {
|
||||
impl RenderContext<'_> {
|
||||
fn evaluate_conditional(&self, key: &str, negated: bool) -> TemplateResult<bool> {
|
||||
if self.nonempty_fields.contains(key) {
|
||||
Ok(true ^ negated)
|
||||
|
@ -904,7 +904,7 @@ fn find_field_references<'a>(
|
|||
|
||||
fn is_cloze_conditional(key: &str) -> bool {
|
||||
key.strip_prefix('c')
|
||||
.map_or(false, |s| s.parse::<u32>().is_ok())
|
||||
.is_some_and(|s| s.parse::<u32>().is_ok())
|
||||
}
|
||||
|
||||
// Tests
|
||||
|
|
|
@ -140,7 +140,7 @@ fn isolate_leading_mark(text: &str) -> Cow<str> {
|
|||
if text
|
||||
.chars()
|
||||
.next()
|
||||
.map_or(false, |c| GeneralCategory::of(c).is_mark())
|
||||
.is_some_and(|c| GeneralCategory::of(c).is_mark())
|
||||
{
|
||||
Cow::Owned(format!("\u{a0}{text}"))
|
||||
} else {
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
[toolchain]
|
||||
# older versions may fail to compile; newer versions may fail the clippy tests
|
||||
channel = "1.82.0"
|
||||
channel = "1.84.0"
|
||||
|
|
Loading…
Reference in a new issue