Rust dep updates

- Rust 1.87 for now (1.88 due out in around a week)
- Nom looks involved, so I left it for now
- prost-reflect depends on a new prost version that got yanked
This commit is contained in:
Damien Elmes 2025-06-17 14:36:53 +07:00
parent b86ffe5d7d
commit 033285b936
25 changed files with 1115 additions and 1194 deletions

View file

@ -14,6 +14,7 @@ allow = [
"MIT",
"Apache-2.0",
"Apache-2.0 WITH LLVM-exception",
"CDLA-Permissive-2.0",
"ISC",
"MPL-2.0",
"BSD-2-Clause",

1464
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -33,7 +33,7 @@ git = "https://github.com/ankitects/linkcheck.git"
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
[workspace.dependencies.fsrs]
version = "4.0.0"
version = "4.1.1"
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
# rev = "a7f7efc10f0a26b14ee348cc7402155685f2a24f"
# path = "../open-spaced-repetition/fsrs-rs"
@ -52,103 +52,98 @@ ninja_gen = { "path" = "build/ninja_gen" }
unicase = "=2.6.0" # any changes could invalidate sqlite indexes
# normal
ammonia = "4.0.0"
anyhow = "1.0.90"
apple-bundles = "0.17.0"
async-compression = { version = "0.4.17", features = ["zstd", "tokio"] }
ammonia = "4.1.0"
anyhow = "1.0.98"
async-compression = { version = "0.4.24", features = ["zstd", "tokio"] }
async-stream = "0.3.6"
async-trait = "0.1.83"
axum = { version = "0.7", features = ["multipart", "macros"] }
axum-client-ip = "0.6"
axum-extra = { version = "0.9.4", features = ["typed-header"] }
blake3 = "1.5.4"
bytes = "1.7.2"
camino = "1.1.9"
chrono = { version = "0.4.38", default-features = false, features = ["std", "clock"] }
clap = { version = "4.5.20", features = ["derive"] }
coarsetime = "0.1.34"
convert_case = "0.6.0"
criterion = { version = "0.5.1" }
csv = "1.3.0"
data-encoding = "2.6.0"
async-trait = "0.1.88"
axum = { version = "0.8.4", features = ["multipart", "macros"] }
axum-client-ip = "1.1.3"
axum-extra = { version = "0.10.1", features = ["typed-header"] }
blake3 = "1.8.2"
bytes = "1.10.1"
camino = "1.1.10"
chrono = { version = "0.4.41", default-features = false, features = ["std", "clock"] }
clap = { version = "4.5.40", features = ["derive"] }
coarsetime = "0.1.36"
convert_case = "0.8.0"
criterion = { version = "0.6.0" }
csv = "1.3.1"
data-encoding = "2.9.0"
difflib = "0.4.0"
dirs = "5.0.1"
dirs = "6.0.0"
dunce = "1.0.5"
embed-resource = "2.4"
embed-resource = "3.0.4"
envy = "0.4.2"
flate2 = "1.0.34"
fluent = "0.16.1"
fluent-bundle = "0.15.3"
fluent-syntax = "0.11.1"
flate2 = "1.1.2"
fluent = "0.17.0"
fluent-bundle = "0.16.0"
fluent-syntax = "0.12.0"
fnv = "1.0.7"
futures = "0.3.31"
glob = "0.3.1"
globset = "0.4.15"
globset = "0.4.16"
hex = "0.4.3"
htmlescape = "0.3.1"
hyper = "1"
id_tree = "1.8.0"
inflections = "1.1.1"
intl-memoizer = "0.5.2"
itertools = "0.13.0"
intl-memoizer = "0.5.3"
itertools = "0.14.0"
junction = "1.2.0"
lazy_static = "1.5.0"
libc = "0.2"
libc-stdhandle = "0.1"
maplit = "1.0.2"
nom = "7.1.3"
num-format = "0.4.4"
num_cpus = "1.16.0"
num_cpus = "1.17.0"
num_enum = "0.7.3"
once_cell = "1.20.2"
once_cell = "1.21.3"
pbkdf2 = { version = "0.12", features = ["simple"] }
phf = { version = "0.11.2", features = ["macros"] }
pin-project = "1.1.6"
plist = "1.7.0"
prettyplease = "0.2.24"
phf = { version = "0.11.3", features = ["macros"] }
pin-project = "1.1.10"
prettyplease = "0.2.34"
prost = "0.13"
prost-build = "0.13"
prost-reflect = "0.14"
prost-reflect = "0.14.7"
prost-types = "0.13"
pulldown-cmark = "0.9.6"
pyo3 = { version = "0.24", features = ["extension-module", "abi3", "abi3-py39"] }
rand = "0.8.5"
regex = "1.11.0"
reqwest = { version = "0.12.8", default-features = false, features = ["json", "socks", "stream", "multipart"] }
rusqlite = { version = "0.30.0", features = ["trace", "functions", "collation", "bundled"] }
pulldown-cmark = "0.13.0"
pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] }
rand = "0.9.1"
regex = "1.11.1"
reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] }
rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] }
rustls-pemfile = "2.2.0"
scopeguard = "1.2.0"
serde = { version = "1.0.210", features = ["derive"] }
serde-aux = "4.5.0"
serde_json = "1.0.132"
serde_repr = "0.1.19"
serde_tuple = "0.5.0"
serde = { version = "1.0.219", features = ["derive"] }
serde-aux = "4.7.0"
serde_json = "1.0.140"
serde_repr = "0.1.20"
serde_tuple = "1.1.0"
sha1 = "0.10.6"
sha2 = { version = "0.10.8" }
simple-file-manifest = "0.11.0"
sha2 = { version = "0.10.9" }
snafu = { version = "0.8.6", features = ["rust_1_61"] }
strum = { version = "0.26.3", features = ["derive"] }
syn = { version = "2.0.82", features = ["parsing", "printing"] }
tar = "0.4.42"
tempfile = "3.13.0"
strum = { version = "0.27.1", features = ["derive"] }
syn = { version = "2.0.103", features = ["parsing", "printing"] }
tar = "0.4.44"
tempfile = "3.20.0"
termcolor = "1.4.1"
tokio = { version = "1.40", features = ["fs", "rt-multi-thread", "macros", "signal"] }
tokio-util = { version = "0.7.12", features = ["io"] }
tower-http = { version = "0.5", features = ["trace"] }
tracing = { version = "0.1.40", features = ["max_level_trace", "release_max_level_debug"] }
tokio = { version = "1.45", features = ["fs", "rt-multi-thread", "macros", "signal"] }
tokio-util = { version = "0.7.15", features = ["io"] }
tower-http = { version = "0.6.6", features = ["trace"] }
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] }
tracing-appender = "0.2.3"
tracing-subscriber = { version = "0.3.18", features = ["fmt", "env-filter"] }
tugger-windows-codesign = "0.10.0"
unic-langid = { version = "0.9.5", features = ["macros"] }
tracing-subscriber = { version = "0.3.19", features = ["fmt", "env-filter"] }
unic-langid = { version = "0.9.6", features = ["macros"] }
unic-ucd-category = "0.9.0"
unicode-normalization = "0.1.24"
walkdir = "2.5.0"
which = "5.0.0"
which = "8.0.0"
winapi = { version = "0.3", features = ["wincon"] }
wiremock = "0.6.2"
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Foundation_Collections", "Storage_Streams"] }
wiremock = "0.6.3"
xz2 = "0.1.7"
zip = { version = "0.6.6", default-features = false, features = ["deflate", "time"] }
zstd = { version = "0.13.2", features = ["zstdmt"] }
zip = { version = "4.1.0", default-features = false, features = ["deflate", "time"] }
zstd = { version = "0.13.3", features = ["zstdmt"] }
# Apply mild optimizations to our dependencies in dev mode, which among other things
# improves sha2 performance by about 21x. Opt 1 chosen due to

View file

@ -1,7 +1,6 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::io::ErrorKind;
use std::process::Command;
use anki_io::create_dir_all;
@ -44,7 +43,7 @@ fn split_env(s: &str) -> Result<(String, String), std::io::Error> {
if let Some((k, v)) = s.split_once('=') {
Ok((k.into(), v.into()))
} else {
Err(std::io::Error::new(ErrorKind::Other, "invalid env var"))
Err(std::io::Error::other("invalid env var"))
}
}

File diff suppressed because it is too large Load diff

View file

@ -106,6 +106,5 @@ unicode-normalization.workspace = true
zip.workspace = true
zstd.workspace = true
[target.'cfg(windows)'.dependencies.windows]
version = "0.56.0"
features = ["Media_SpeechSynthesis", "Foundation_Collections", "Storage_Streams"]
[target.'cfg(windows)'.dependencies]
windows.workspace = true

View file

@ -52,7 +52,7 @@ impl CheckableUrl {
fn anchor(&self) -> Cow<str> {
match *self {
Self::HelpPage(page) => help_page_link_suffix(page).into(),
Self::String(s) => s.split('#').last().unwrap_or_default().into(),
Self::String(s) => s.split('#').next_back().unwrap_or_default().into(),
}
}
}

View file

@ -146,8 +146,12 @@ pub(crate) fn export_collection(
Ok(())
}
fn file_options_stored() -> FileOptions {
FileOptions::default().compression_method(CompressionMethod::Stored)
fn file_options_stored() -> FileOptions<'static, ()> {
FileOptions::<'static, ()>::default().compression_method(CompressionMethod::Stored)
}
fn file_options_default() -> FileOptions<'static, ()> {
FileOptions::<'static, ()>::default()
}
fn write_collection(
@ -160,7 +164,7 @@ fn write_collection(
zip.start_file(meta.collection_filename(), file_options_stored())?;
zstd_copy(col, zip, size)?;
} else {
zip.start_file(meta.collection_filename(), FileOptions::default())?;
zip.start_file(meta.collection_filename(), file_options_default())?;
io::copy(col, zip)?;
}
Ok(())

View file

@ -124,7 +124,7 @@ fn maybe_restore_media_file(
Ok(())
}
fn restore_media_file(meta: &Meta, zip_file: &mut ZipFile, path: &Path) -> Result<()> {
fn restore_media_file(meta: &Meta, zip_file: &mut ZipFile<File>, path: &Path) -> Result<()> {
let mut tempfile = new_tempfile_in_parent_of(path)?;
meta.copy(zip_file, &mut tempfile)
.with_context(|_| FileIoSnafu {

View file

@ -96,7 +96,10 @@ impl SafeMediaEntry {
media_folder.join(&self.name)
}
pub(super) fn fetch_file<'a>(&self, archive: &'a mut ZipArchive<File>) -> Result<ZipFile<'a>> {
pub(super) fn fetch_file<'a>(
&self,
archive: &'a mut ZipArchive<File>,
) -> Result<ZipFile<'a, File>> {
match archive.by_name(&self.index.to_string()) {
Ok(file) => Ok(file),
Err(err) => invalid_input!(err, "{} missing from archive", self.index),

View file

@ -352,7 +352,7 @@ impl Collection {
fn random_position(highest_position: u32) -> u32 {
let mut rng = StdRng::seed_from_u64(highest_position as u64);
rng.gen_range(1..highest_position.max(1000))
rng.random_range(1..highest_position.max(1000))
}
#[cfg(test)]

View file

@ -87,7 +87,7 @@ impl CardStateUpdater {
if secs >= upper_exclusive {
secs
} else {
rng.gen_range(secs..upper_exclusive)
rng.random_range(secs..upper_exclusive)
}
} else {
secs

View file

@ -630,7 +630,7 @@ fn get_fuzz_seed_for_id_and_reps(card_id: CardId, card_reps: u32) -> Option<u64>
/// Return a fuzz factor from the range `0.0..1.0`, using the provided seed.
/// None if seed is None.
fn get_fuzz_factor(seed: Option<u64>) -> Option<f32> {
seed.map(|s| StdRng::seed_from_u64(s).gen_range(0.0..1.0))
seed.map(|s| StdRng::seed_from_u64(s).random_range(0.0..1.0))
}
#[cfg(test)]

View file

@ -68,7 +68,7 @@ pub(crate) fn apply_load_balance_and_easy_days(
sibling_modifier: 1.0,
easy_days_modifier: easy_days_modifier[interval_index],
});
let fuzz_seed = rng.gen();
let fuzz_seed = rng.random();
select_weighted_interval(intervals, Some(fuzz_seed)).unwrap() as f32
}
@ -106,7 +106,7 @@ fn create_review_priority_fn(
// Random ordering
Random => {
wrap!(move |_c, _w| rand::thread_rng().gen_range(0..deck_size) as i32)
wrap!(move |_c, _w| rand::rng().random_range(0..deck_size) as i32)
}
// Not implemented yet

View file

@ -127,7 +127,7 @@ fn nids_in_desired_order(cards: &[Card], order: NewCardDueOrder) -> Vec<NoteId>
nids.sort_unstable();
}
NewCardDueOrder::Random => {
nids.shuffle(&mut rand::thread_rng());
nids.shuffle(&mut rand::rng());
}
NewCardDueOrder::Preserve => unreachable!(),
}

View file

@ -4,8 +4,8 @@
use std::collections::HashMap;
use std::sync::LazyLock;
use rand::distributions::Distribution;
use rand::distributions::Uniform;
use rand::distr::Distribution;
use rand::distr::Uniform;
use regex::Regex;
use super::answering::CardAnswer;
@ -114,8 +114,8 @@ impl Collection {
let spec = parse_due_date_str(days)?;
let usn = self.usn()?;
let today = self.timing_today()?.days_elapsed;
let mut rng = rand::thread_rng();
let distribution = Uniform::from(spec.min..=spec.max);
let mut rng = rand::rng();
let distribution = Uniform::new_inclusive(spec.min, spec.max).unwrap();
let mut decks_initial_ease: HashMap<DeckId, f32> = HashMap::new();
self.transact(Op::SetDueDate, |col| {
for mut card in col.all_cards_for_ids(cids, false)? {

View file

@ -5,8 +5,8 @@ use std::collections::HashMap;
use std::collections::HashSet;
use chrono::Datelike;
use rand::distributions::Distribution;
use rand::distributions::WeightedIndex;
use rand::distr::weighted::WeightedIndex;
use rand::distr::Distribution;
use rand::rngs::StdRng;
use rand::SeedableRng;

View file

@ -15,6 +15,7 @@ use fsrs::FSRS5_DEFAULT_DECAY;
use regex::Regex;
use rusqlite::functions::FunctionFlags;
use rusqlite::params;
use rusqlite::trace::TraceEvent;
use rusqlite::Connection;
use serde_json::Value;
use unicase::UniCase;
@ -47,10 +48,13 @@ pub struct SqliteStorage {
}
fn open_or_create_collection_db(path: &Path) -> Result<Connection> {
let mut db = Connection::open(path)?;
let db = Connection::open(path)?;
if std::env::var("TRACESQL").is_ok() {
db.trace(Some(trace));
db.trace_v2(
rusqlite::trace::TraceEventCodes::SQLITE_TRACE_STMT,
Some(trace),
);
}
db.busy_timeout(std::time::Duration::from_secs(0))?;
@ -415,8 +419,10 @@ fn schema_version(db: &Connection) -> Result<(bool, u8)> {
))
}
fn trace(s: &str) {
println!("sql: {}", s.trim().replace('\n', " "));
fn trace(event: TraceEvent) {
if let TraceEvent::Stmt(_, sql) = event {
println!("sql: {}", sql.trim().replace('\n', " "));
}
}
impl SqliteStorage {

View file

@ -22,7 +22,7 @@ use anki_io::create_dir_all;
use axum::extract::DefaultBodyLimit;
use axum::routing::get;
use axum::Router;
use axum_client_ip::SecureClientIpSource;
use axum_client_ip::ClientIpSource;
use pbkdf2::password_hash::PasswordHash;
use pbkdf2::password_hash::PasswordHasher;
use pbkdf2::password_hash::PasswordVerifier;
@ -69,7 +69,7 @@ pub struct SyncServerConfig {
#[serde(default = "default_base", rename = "base")]
pub base_folder: PathBuf,
#[serde(default = "default_ip_header")]
pub ip_header: SecureClientIpSource,
pub ip_header: ClientIpSource,
}
fn default_host() -> IpAddr {
@ -86,8 +86,8 @@ fn default_base() -> PathBuf {
.join(".syncserver")
}
pub fn default_ip_header() -> SecureClientIpSource {
SecureClientIpSource::ConnectInfo
pub fn default_ip_header() -> ClientIpSource {
ClientIpSource::ConnectInfo
}
impl SimpleServerInner {

View file

@ -53,7 +53,7 @@ async fn sync_handler<P: SyncProtocol>(
}
pub fn collection_sync_router<P: SyncProtocol + Clone>() -> Router<P> {
Router::new().route("/:method", post(sync_handler::<P>))
Router::new().route("/{method}", post(sync_handler::<P>))
}
/// The Rust code used to send a GET with query params, which was inconsistent
@ -112,5 +112,5 @@ pub fn media_sync_router<P: MediaSyncProtocol + Clone>() -> Router<P> {
"/begin",
get(media_begin_get::<P>).post(media_begin_post::<P>),
)
.route("/:method", post(media_sync_handler::<P>))
.route("/{method}", post(media_sync_handler::<P>))
}

View file

@ -283,15 +283,20 @@ fn row_to_name_and_checksum(row: &Row) -> error::Result<(String, Sha1Hash)> {
Ok((file_name, sha1))
}
fn trace(s: &str) {
println!("sql: {}", s)
fn trace(event: rusqlite::trace::TraceEvent) {
if let rusqlite::trace::TraceEvent::Stmt(_, sql) = event {
println!("sql: {}", sql);
}
}
pub(crate) fn open_or_create<P: AsRef<Path>>(path: P) -> error::Result<Connection> {
let mut db = Connection::open(path)?;
if std::env::var("TRACESQL").is_ok() {
db.trace(Some(trace));
db.trace_v2(
rusqlite::trace::TraceEventCodes::SQLITE_TRACE_STMT,
Some(trace),
);
}
db.pragma_update(None, "page_size", 4096)?;

View file

@ -27,7 +27,8 @@ pub struct ZipFileMetadata {
/// The metadata is in a different format to the upload case, since deletions
/// don't need to be represented.
pub fn zip_files_for_download(files: Vec<(String, Vec<u8>)>) -> Result<Vec<u8>> {
let options = FileOptions::default().compression_method(zip::CompressionMethod::Stored);
let options: FileOptions<'_, ()> =
FileOptions::default().compression_method(zip::CompressionMethod::Stored);
let mut zip = ZipWriter::new(io::Cursor::new(vec![]));
let mut entries = HashMap::new();
@ -47,7 +48,8 @@ pub fn zip_files_for_download(files: Vec<(String, Vec<u8>)>) -> Result<Vec<u8>>
}
pub fn zip_files_for_upload(entries_: Vec<(String, Option<Vec<u8>>)>) -> Result<Vec<u8>> {
let options = FileOptions::default().compression_method(zip::CompressionMethod::Stored);
let options: FileOptions<'_, ()> =
FileOptions::default().compression_method(zip::CompressionMethod::Stored);
let mut zip = ZipWriter::new(io::Cursor::new(vec![]));
let mut entries = vec![];

View file

@ -10,14 +10,13 @@ use std::marker::PhantomData;
use std::net::IpAddr;
use std::sync::LazyLock;
use async_trait::async_trait;
use axum::body::Body;
use axum::extract::FromRequest;
use axum::extract::Multipart;
use axum::http::Request;
use axum::http::StatusCode;
use axum::RequestPartsExt;
use axum_client_ip::SecureClientIp;
use axum_client_ip::ClientIp;
use axum_extra::TypedHeader;
use header_and_stream::SyncHeader;
use serde::de::DeserializeOwned;
@ -101,19 +100,18 @@ where
}
}
#[async_trait]
impl<S, T> FromRequest<S, Body> for SyncRequest<T>
impl<S, T> FromRequest<S> for SyncRequest<T>
where
S: Send + Sync,
T: DeserializeOwned,
{
type Rejection = HttpError;
async fn from_request(req: Request<Body>, state: &S) -> HttpResult<Self, Self::Rejection> {
async fn from_request(req: Request<Body>, state: &S) -> Result<Self, Self::Rejection> {
let (mut parts, body) = req.into_parts();
let ip = parts
.extract::<SecureClientIp>()
.extract::<ClientIp>()
.await
.map_err(|_| {
HttpError::new_without_source(StatusCode::INTERNAL_SERVER_ERROR, "missing ip")

View file

@ -1,3 +1,3 @@
[toolchain]
# older versions may fail to compile; newer versions may fail the clippy tests
channel = "1.85.0"
channel = "1.87.0"

View file

@ -206,7 +206,7 @@ fn sveltekit_temp_file(path: &str) -> bool {
}
fn check_cargo_deny() -> Result<()> {
Command::run("cargo install cargo-deny@0.18.2")?;
Command::run("cargo install cargo-deny@0.18.3")?;
Command::run("cargo deny check")?;
Ok(())
}