Merge branch 'main' into ruff

This commit is contained in:
Damien Elmes 2025-06-29 13:54:55 +07:00 committed by GitHub
commit 013a46ad63
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
79 changed files with 786 additions and 281 deletions

View file

@ -138,7 +138,7 @@ unic-ucd-category = "0.9.0"
unicode-normalization = "0.1.24"
walkdir = "2.5.0"
which = "8.0.0"
winapi = { version = "0.3", features = ["wincon"] }
winapi = { version = "0.3", features = ["wincon", "winreg"] }
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams"] }
wiremock = "0.6.3"
xz2 = "0.1.7"

View file

@ -48,7 +48,7 @@ fn normalize_version(version: &str) -> String {
part.to_string()
} else {
let normalized_prefix = numeric_prefix.parse::<u32>().unwrap_or(0).to_string();
format!("{}{}", normalized_prefix, rest)
format!("{normalized_prefix}{rest}")
}
}
})

View file

@ -72,12 +72,11 @@ fn fetch_protoc_release_info() -> Result<String, Box<dyn Error>> {
"MacArm" => continue, // Skip MacArm since it's handled with MacX64
"WindowsX64" => "Platform::WindowsX64 | Platform::WindowsArm",
"WindowsArm" => continue, // Skip WindowsArm since it's handled with WindowsX64
_ => &format!("Platform::{}", platform),
_ => &format!("Platform::{platform}"),
};
match_blocks.push(format!(
" {} => {{\n OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }}\n }}",
match_pattern, download_url, sha256
" {match_pattern} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}"
));
}

View file

@ -53,7 +53,7 @@ fn fetch_uv_release_info() -> Result<String, Box<dyn Error>> {
// Find the corresponding .sha256 or .sha256sum asset
let sha_asset = assets.iter().find(|a| {
let name = a["name"].as_str().unwrap_or("");
name == format!("{}.sha256", asset_name) || name == format!("{}.sha256sum", asset_name)
name == format!("{asset_name}.sha256") || name == format!("{asset_name}.sha256sum")
});
if sha_asset.is_none() {
eprintln!("No sha256 asset found for {asset_name}");
@ -71,8 +71,7 @@ fn fetch_uv_release_info() -> Result<String, Box<dyn Error>> {
let sha256 = sha_text.split_whitespace().next().unwrap_or("");
match_blocks.push(format!(
" Platform::{} => {{\n OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }}\n }}",
platform, download_url, sha256
" Platform::{platform} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}"
));
}
@ -135,10 +134,7 @@ mod tests {
assert_eq!(
updated_lines,
original_lines - EXPECTED_LINES_REMOVED,
"Expected line count to decrease by exactly {} lines (original: {}, updated: {})",
EXPECTED_LINES_REMOVED,
original_lines,
updated_lines
"Expected line count to decrease by exactly {EXPECTED_LINES_REMOVED} lines (original: {original_lines}, updated: {updated_lines})"
);
}
}

View file

@ -300,7 +300,7 @@ impl BuildStatement<'_> {
writeln!(buf, "build {outputs_str}: {action_name} {inputs_str}").unwrap();
for (key, value) in self.variables.iter().sorted() {
writeln!(buf, " {key} = {}", value).unwrap();
writeln!(buf, " {key} = {value}").unwrap();
}
writeln!(buf).unwrap();
@ -476,7 +476,7 @@ impl FilesHandle for BuildStatement<'_> {
let outputs = outputs.into_iter().map(|v| {
let v = v.as_ref();
let v = if !v.starts_with("$builddir/") && !v.starts_with("$builddir\\") {
format!("$builddir/{}", v)
format!("$builddir/{v}")
} else {
v.to_owned()
};

View file

@ -148,7 +148,7 @@ impl BuildAction for PythonEnvironment {
// Add --python flag to extra_args if PYTHON_BINARY is set
let mut args = self.extra_args.to_string();
if let Ok(python_binary) = env::var("PYTHON_BINARY") {
args = format!("--python {} {}", python_binary, args);
args = format!("--python {python_binary} {args}");
}
build.add_variable("extra_args", args);
}

View file

@ -30,12 +30,12 @@ impl Build {
)
.unwrap();
for (key, value) in &self.variables {
writeln!(&mut buf, "{} = {}", key, value).unwrap();
writeln!(&mut buf, "{key} = {value}").unwrap();
}
buf.push('\n');
for (key, value) in &self.pools {
writeln!(&mut buf, "pool {}\n depth = {}", key, value).unwrap();
writeln!(&mut buf, "pool {key}\n depth = {value}").unwrap();
}
buf.push('\n');

View file

@ -65,7 +65,7 @@ fn sha2_data(data: &[u8]) -> String {
let mut digest = sha2::Sha256::new();
digest.update(data);
let result = digest.finalize();
format!("{:x}", result)
format!("{result:x}")
}
enum CompressionKind {

View file

@ -138,7 +138,7 @@ fn setup_build_root() -> Utf8PathBuf {
true
};
if create {
println!("Switching build root to {}", new_target);
println!("Switching build root to {new_target}");
std::os::unix::fs::symlink(new_target, build_root).unwrap();
}
}

View file

@ -83,7 +83,7 @@ fn split_args(args: Vec<String>) -> Vec<Vec<String>> {
pub fn run_command(command: &mut Command) {
if let Err(err) = command.ensure_success() {
println!("{}", err);
println!("{err}");
std::process::exit(1);
}
}

View file

@ -435,7 +435,7 @@ impl TextWriter {
item = item.trim_start_matches(' ');
}
write!(self.buffer, "{}", item)
write!(self.buffer, "{item}")
}
fn write_char_into_indent(&mut self, ch: char) {

View file

@ -67,7 +67,7 @@ fn additional_template_folder(dst_folder: &Utf8Path) -> Option<Utf8PathBuf> {
fn all_langs(lang_folder: &Utf8Path) -> Result<Vec<Utf8PathBuf>> {
std::fs::read_dir(lang_folder)
.with_context(|| format!("reading {:?}", lang_folder))?
.with_context(|| format!("reading {lang_folder:?}"))?
.filter_map(Result::ok)
.map(|e| Ok(e.path().utf8()?))
.collect()

View file

@ -28,6 +28,6 @@ fn main() {
.to_string();
let libs_path = stdlib_path + "s";
println!("cargo:rustc-link-search={}", libs_path);
println!("cargo:rustc-link-search={libs_path}");
}
}

View file

@ -36,7 +36,7 @@ from anki.hooks import runFilter
from anki.httpclient import HttpClient
from anki.models import NotetypeDict, NotetypeId, StockNotetype
from anki.notes import Note, NoteFieldsCheckResult, NoteId
from anki.utils import checksum, is_lin, is_mac, is_win, namedtmp
from anki.utils import checksum, is_lin, is_win, namedtmp
from aqt import AnkiQt, colors, gui_hooks
from aqt.operations import QueryOp
from aqt.operations.note import update_note
@ -1734,10 +1734,9 @@ class EditorWebView(AnkiWebView):
assert a is not None
qconnect(a.triggered, lambda: openFolder(path))
if is_win or is_mac:
a = menu.addAction(tr.editing_show_in_folder())
assert a is not None
qconnect(a.triggered, lambda: show_in_folder(path))
a = menu.addAction(tr.editing_show_in_folder())
assert a is not None
qconnect(a.triggered, lambda: show_in_folder(path))
def _clipboard(self) -> QClipboard:
clipboard = self.editor.mw.app.clipboard()

View file

@ -936,9 +936,34 @@ def show_in_folder(path: str) -> None:
"""
call(osascript_to_args(script))
else:
# Just open the file in any other platform
with no_bundled_libs():
QDesktopServices.openUrl(QUrl.fromLocalFile(path))
# For linux, there are multiple file managers. Let's test if one of the
# most common file managers is found and use it in case it is installed.
# If none of this list are installed, use a fallback. The fallback
# might open the image in a web browser, image viewer or others,
# depending on the users defaults.
file_managers = [
"nautilus", # GNOME
"dolphin", # KDE
"pcmanfm", # LXDE
"thunar", # XFCE
"nemo", # Cinnamon
"caja", # MATE
]
available_file_manager = None
# Test if a file manager is installed and use it, fallback otherwise
for file_manager in file_managers:
if shutil.which(file_manager):
available_file_manager = file_manager
break
if available_file_manager:
subprocess.run([available_file_manager, path], check=False)
else:
# Just open the file in any other platform
with no_bundled_libs():
QDesktopServices.openUrl(QUrl.fromLocalFile(path))
def _show_in_folder_win32(path: str) -> None:

View file

@ -0,0 +1,135 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
from __future__ import annotations
import contextlib
import os
import subprocess
import sys
from pathlib import Path
import aqt.sound
from anki.utils import pointVersion
from aqt import mw
from aqt.qt import QAction
from aqt.utils import askUser, is_mac, is_win, showInfo
def _anki_launcher_path() -> str | None:
return os.getenv("ANKI_LAUNCHER")
def have_launcher() -> bool:
return _anki_launcher_path() is not None
def update_and_restart() -> None:
from aqt import mw
launcher = _anki_launcher_path()
assert launcher
_trigger_launcher_run()
with contextlib.suppress(ResourceWarning):
env = os.environ.copy()
creationflags = 0
if sys.platform == "win32":
creationflags = (
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
)
subprocess.Popen(
[launcher],
start_new_session=True,
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
env=env,
creationflags=creationflags,
)
mw.app.quit()
def _trigger_launcher_run() -> None:
"""Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run."""
try:
# Get the local data directory equivalent to Rust's dirs::data_local_dir()
if is_win:
from aqt.winpaths import get_local_appdata
data_dir = Path(get_local_appdata())
elif is_mac:
data_dir = Path.home() / "Library" / "Application Support"
else: # Linux
data_dir = Path(
os.environ.get("XDG_DATA_HOME", Path.home() / ".local" / "share")
)
pyproject_path = data_dir / "AnkiProgramFiles" / "pyproject.toml"
if pyproject_path.exists():
# Touch the file to update its mtime
pyproject_path.touch()
except Exception as e:
print(e)
def confirm_then_upgrade():
if not askUser("Change to a different Anki version?"):
return
update_and_restart()
# return modified command array that points to bundled command, and return
# required environment
def _packagedCmd(cmd: list[str]) -> tuple[Any, dict[str, str]]:
cmd = cmd[:]
env = os.environ.copy()
# keep LD_LIBRARY_PATH when in snap environment
if "LD_LIBRARY_PATH" in env and "SNAP" not in env:
del env["LD_LIBRARY_PATH"]
# Try to find binary in anki-audio package for Windows/Mac
if is_win or is_mac:
try:
import anki_audio
audio_pkg_path = Path(anki_audio.__file__).parent
if is_win:
packaged_path = audio_pkg_path / (cmd[0] + ".exe")
else: # is_mac
packaged_path = audio_pkg_path / cmd[0]
if packaged_path.exists():
cmd[0] = str(packaged_path)
return cmd, env
except ImportError:
# anki-audio not available, fall back to old behavior
pass
packaged_path = Path(sys.prefix) / cmd[0]
if packaged_path.exists():
cmd[0] = str(packaged_path)
return cmd, env
def setup():
if pointVersion() >= 250600:
return
if not have_launcher():
return
# Add action to tools menu
action = QAction("Upgrade/Downgrade", mw)
action.triggered.connect(confirm_then_upgrade)
mw.form.menuTools.addAction(action)
# Monkey-patch audio tools to use anki-audio
if is_win or is_mac:
aqt.sound._packagedCmd = _packagedCmd
setup()

View file

@ -0,0 +1,6 @@
{
"name": "Anki Launcher",
"package": "anki-launcher",
"min_point_version": 50,
"max_point_version": 250600
}

View file

@ -12,7 +12,7 @@ use anyhow::Result;
fn main() {
if let Err(e) = run() {
eprintln!("Error: {:#}", e);
eprintln!("Error: {e:#}");
std::process::exit(1);
}
}

View file

@ -221,7 +221,7 @@ fn generate_install_manifest(output_dir: &Path) -> Result<()> {
// Convert to Windows-style backslashes for NSIS
let windows_path = relative_path.display().to_string().replace('/', "\\");
// Use Windows line endings (\r\n) as expected by NSIS
manifest_content.push_str(&format!("{}\r\n", windows_path));
manifest_content.push_str(&format!("{windows_path}\r\n"));
}
}
}

View file

@ -17,15 +17,15 @@ use anki_io::read_file;
use anki_io::remove_file;
use anki_io::write_file;
use anki_io::ToUtf8Path;
use anki_process::CommandExt;
use anki_process::CommandExt as AnkiCommandExt;
use anyhow::Context;
use anyhow::Result;
use crate::platform::ensure_terminal_shown;
use crate::platform::get_exe_and_resources_dirs;
use crate::platform::get_uv_binary_name;
use crate::platform::launch_anki_after_update;
use crate::platform::launch_anki_normally;
use crate::platform::respawn_launcher;
mod platform;
@ -35,7 +35,11 @@ struct State {
has_existing_install: bool,
prerelease_marker: std::path::PathBuf,
uv_install_root: std::path::PathBuf,
uv_cache_dir: std::path::PathBuf,
no_cache_marker: std::path::PathBuf,
anki_base_folder: std::path::PathBuf,
uv_path: std::path::PathBuf,
uv_python_install_dir: std::path::PathBuf,
user_pyproject_path: std::path::PathBuf,
user_python_version_path: std::path::PathBuf,
dist_pyproject_path: std::path::PathBuf,
@ -56,12 +60,14 @@ pub enum MainMenuChoice {
KeepExisting,
Version(VersionKind),
ToggleBetas,
ToggleCache,
Uninstall,
Quit,
}
fn main() {
if let Err(e) = run() {
eprintln!("Error: {:#}", e);
eprintln!("Error: {e:#}");
eprintln!("Press enter to close...");
let mut input = String::new();
let _ = stdin().read_line(&mut input);
@ -81,7 +87,11 @@ fn run() -> Result<()> {
has_existing_install: uv_install_root.join(".sync_complete").exists(),
prerelease_marker: uv_install_root.join("prerelease"),
uv_install_root: uv_install_root.clone(),
uv_cache_dir: uv_install_root.join("cache"),
no_cache_marker: uv_install_root.join("nocache"),
anki_base_folder: get_anki_base_path()?,
uv_path: exe_dir.join(get_uv_binary_name()),
uv_python_install_dir: uv_install_root.join("python"),
user_pyproject_path: uv_install_root.join("pyproject.toml"),
user_python_version_path: uv_install_root.join(".python-version"),
dist_pyproject_path: resources_dir.join("pyproject.toml"),
@ -90,6 +100,13 @@ fn run() -> Result<()> {
sync_complete_marker: uv_install_root.join(".sync_complete"),
};
// Check for uninstall request from Windows uninstaller
if std::env::var("ANKI_LAUNCHER_UNINSTALL").is_ok() {
ensure_terminal_shown()?;
handle_uninstall(&state)?;
return Ok(());
}
// Create install directory and copy project files in
create_dir_all(&state.uv_install_root)?;
let had_user_pyproj = state.user_pyproject_path.exists();
@ -133,7 +150,7 @@ fn run() -> Result<()> {
#[cfg(target_os = "macos")]
{
let cmd = build_python_command(&state.uv_install_root, &[])?;
platform::mac::prepare_for_launch_after_update(cmd)?;
platform::mac::prepare_for_launch_after_update(cmd, &uv_install_root)?;
}
if cfg!(unix) && !cfg!(target_os = "macos") {
@ -143,20 +160,24 @@ fn run() -> Result<()> {
} else {
// on Windows/macOS, the user needs to close the terminal/console
// currently, but ideas on how we can avoid this would be good!
println!();
println!("Anki will start shortly.");
println!("\x1B[1mYou can close this window.\x1B[0m\n");
}
let cmd = build_python_command(&state.uv_install_root, &[])?;
launch_anki_after_update(cmd)?;
// respawn the launcher as a disconnected subprocess for normal startup
respawn_launcher()?;
Ok(())
}
fn main_menu_loop(state: &State) -> Result<()> {
loop {
let menu_choice =
get_main_menu_choice(state.has_existing_install, &state.prerelease_marker);
let menu_choice = get_main_menu_choice(
state.has_existing_install,
&state.prerelease_marker,
&state.no_cache_marker,
);
match menu_choice {
MainMenuChoice::Quit => std::process::exit(0),
@ -176,10 +197,32 @@ fn main_menu_loop(state: &State) -> Result<()> {
println!();
continue;
}
MainMenuChoice::ToggleCache => {
// Toggle cache disable file
if state.no_cache_marker.exists() {
let _ = remove_file(&state.no_cache_marker);
println!("Download caching enabled.");
} else {
write_file(&state.no_cache_marker, "")?;
// Delete the cache directory and everything in it
if state.uv_cache_dir.exists() {
let _ = anki_io::remove_dir_all(&state.uv_cache_dir);
}
println!("Download caching disabled and cache cleared.");
}
println!();
continue;
}
MainMenuChoice::Uninstall => {
if handle_uninstall(state)? {
std::process::exit(0);
}
continue;
}
choice @ (MainMenuChoice::Latest | MainMenuChoice::Version(_)) => {
// For other choices, update project files and sync
update_pyproject_for_version(
choice,
choice.clone(),
state.dist_pyproject_path.clone(),
state.user_pyproject_path.clone(),
state.dist_python_version_path.clone(),
@ -191,11 +234,11 @@ fn main_menu_loop(state: &State) -> Result<()> {
// Sync the venv
let mut command = Command::new(&state.uv_path);
command.current_dir(&state.uv_install_root).args([
"sync",
"--upgrade",
"--managed-python",
]);
command
.current_dir(&state.uv_install_root)
.env("UV_CACHE_DIR", &state.uv_cache_dir)
.env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir)
.args(["sync", "--upgrade", "--managed-python"]);
// Add python version if .python-version file exists
if state.user_python_version_path.exists() {
@ -211,18 +254,25 @@ fn main_menu_loop(state: &State) -> Result<()> {
command.env("UV_PRERELEASE", "allow");
}
if state.no_cache_marker.exists() {
command.env("UV_NO_CACHE", "1");
}
println!("\x1B[1mUpdating Anki...\x1B[0m\n");
match command.ensure_success() {
Ok(_) => {
// Sync succeeded, break out of loop
// Sync succeeded
if matches!(&choice, MainMenuChoice::Version(VersionKind::PyOxidizer(_))) {
inject_helper_addon(&state.uv_install_root)?;
}
break;
}
Err(e) => {
// If sync fails due to things like a missing wheel on pypi,
// we need to remove the lockfile or uv will cache the bad result.
let _ = remove_file(&state.uv_lock_path);
println!("Install failed: {:#}", e);
println!("Install failed: {e:#}");
println!();
continue;
}
@ -245,6 +295,7 @@ fn write_sync_marker(sync_complete_marker: &std::path::Path) -> Result<()> {
fn get_main_menu_choice(
has_existing_install: bool,
prerelease_marker: &std::path::Path,
no_cache_marker: &std::path::Path,
) -> MainMenuChoice {
loop {
println!("1) Latest Anki (just press enter)");
@ -259,7 +310,14 @@ fn get_main_menu_choice(
"4) Allow betas: {}",
if betas_enabled { "on" } else { "off" }
);
println!("5) Quit");
let cache_enabled = !no_cache_marker.exists();
println!(
"5) Cache downloads: {}",
if cache_enabled { "on" } else { "off" }
);
println!();
println!("6) Uninstall");
println!("7) Quit");
print!("> ");
let _ = stdout().flush();
@ -281,7 +339,9 @@ fn get_main_menu_choice(
}
}
"4" => MainMenuChoice::ToggleBetas,
"5" => MainMenuChoice::Quit,
"5" => MainMenuChoice::ToggleCache,
"6" => MainMenuChoice::Uninstall,
"7" => MainMenuChoice::Quit,
_ => {
println!("Invalid input. Please try again.");
continue;
@ -336,8 +396,13 @@ fn update_pyproject_for_version(
// Do nothing - keep existing pyproject.toml and .python-version
}
MainMenuChoice::ToggleBetas => {
// This should not be reached as ToggleBetas is handled in the loop
unreachable!("ToggleBetas should be handled in the main loop");
unreachable!();
}
MainMenuChoice::ToggleCache => {
unreachable!();
}
MainMenuChoice::Uninstall => {
unreachable!();
}
MainMenuChoice::Version(version_kind) => {
let content = read_file(&dist_pyproject_path)?;
@ -351,6 +416,7 @@ fn update_pyproject_for_version(
&format!(
concat!(
"aqt[qt6]=={}\",\n",
" \"anki-audio==0.1.0; sys.platform == 'win32' or sys.platform == 'darwin'\",\n",
" \"pyqt6==6.6.1\",\n",
" \"pyqt6-qt6==6.6.2\",\n",
" \"pyqt6-webengine==6.6.0\",\n",
@ -362,7 +428,7 @@ fn update_pyproject_for_version(
)
}
VersionKind::Uv(version) => {
content_str.replace("anki-release", &format!("anki-release=={}", version))
content_str.replace("anki-release", &format!("anki-release=={version}"))
}
};
write_file(&user_pyproject_path, &updated_content)?;
@ -427,6 +493,108 @@ fn parse_version_kind(version: &str) -> Option<VersionKind> {
}
}
fn inject_helper_addon(_uv_install_root: &std::path::Path) -> Result<()> {
let addons21_path = get_anki_addons21_path()?;
if !addons21_path.exists() {
return Ok(());
}
let addon_folder = addons21_path.join("anki-launcher");
// Remove existing anki-launcher folder if it exists
if addon_folder.exists() {
anki_io::remove_dir_all(&addon_folder)?;
}
// Create the anki-launcher folder
create_dir_all(&addon_folder)?;
// Write the embedded files
let init_py_content = include_str!("../addon/__init__.py");
let manifest_json_content = include_str!("../addon/manifest.json");
write_file(addon_folder.join("__init__.py"), init_py_content)?;
write_file(addon_folder.join("manifest.json"), manifest_json_content)?;
Ok(())
}
fn get_anki_base_path() -> Result<std::path::PathBuf> {
let anki_base_path = if cfg!(target_os = "windows") {
// Windows: %APPDATA%\Anki2
dirs::config_dir()
.context("Unable to determine config directory")?
.join("Anki2")
} else if cfg!(target_os = "macos") {
// macOS: ~/Library/Application Support/Anki2
dirs::data_dir()
.context("Unable to determine data directory")?
.join("Anki2")
} else {
// Linux: ~/.local/share/Anki2
dirs::data_dir()
.context("Unable to determine data directory")?
.join("Anki2")
};
Ok(anki_base_path)
}
fn get_anki_addons21_path() -> Result<std::path::PathBuf> {
Ok(get_anki_base_path()?.join("addons21"))
}
fn handle_uninstall(state: &State) -> Result<bool> {
println!("Uninstall Anki's program files? (y/n)");
print!("> ");
let _ = stdout().flush();
let mut input = String::new();
let _ = stdin().read_line(&mut input);
let input = input.trim().to_lowercase();
if input != "y" {
println!("Uninstall cancelled.");
println!();
return Ok(false);
}
// Remove program files
if state.uv_install_root.exists() {
anki_io::remove_dir_all(&state.uv_install_root)?;
println!("Program files removed.");
}
println!();
println!("Remove all profiles/cards? (y/n)");
print!("> ");
let _ = stdout().flush();
let mut input = String::new();
let _ = stdin().read_line(&mut input);
let input = input.trim().to_lowercase();
if input == "y" && state.anki_base_folder.exists() {
anki_io::remove_dir_all(&state.anki_base_folder)?;
println!("User data removed.");
}
println!();
// Platform-specific messages
#[cfg(target_os = "macos")]
platform::mac::finalize_uninstall();
#[cfg(target_os = "windows")]
platform::windows::finalize_uninstall();
#[cfg(all(unix, not(target_os = "macos")))]
platform::unix::finalize_uninstall();
Ok(true)
}
fn build_python_command(uv_install_root: &std::path::Path, args: &[String]) -> Result<Command> {
let python_exe = if cfg!(target_os = "windows") {
let show_console = std::env::var("ANKI_CONSOLE").is_ok();
@ -440,7 +608,7 @@ fn build_python_command(uv_install_root: &std::path::Path, args: &[String]) -> R
};
let mut cmd = Command::new(python_exe);
cmd.args(["-c", "import aqt; aqt.run()"]);
cmd.args(["-c", "import aqt, sys; sys.argv[0] = 'Anki'; aqt.run()"]);
cmd.args(args);
// tell the Python code it was invoked by the launcher, and updating is
// available

View file

@ -3,6 +3,7 @@
use std::io;
use std::io::Write;
use std::path::Path;
use std::process::Command;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
@ -14,7 +15,7 @@ use anki_process::CommandExt as AnkiCommandExt;
use anyhow::Context;
use anyhow::Result;
pub fn prepare_for_launch_after_update(mut cmd: Command) -> Result<()> {
pub fn prepare_for_launch_after_update(mut cmd: Command, root: &Path) -> Result<()> {
// Pre-validate by running --version to trigger any Gatekeeper checks
print!("\n\x1B[1mThis may take a few minutes. Please wait\x1B[0m");
io::stdout().flush().unwrap();
@ -37,6 +38,20 @@ pub fn prepare_for_launch_after_update(mut cmd: Command) -> Result<()> {
.stderr(std::process::Stdio::null())
.ensure_success();
if cfg!(target_os = "macos") {
// older Anki versions had a short mpv timeout and didn't support
// ANKI_FIRST_RUN, so we need to ensure mpv passes Gatekeeper
// validation prior to launch
let mpv_path = root.join(".venv/lib/python3.9/site-packages/anki_audio/mpv");
if mpv_path.exists() {
let _ = Command::new(&mpv_path)
.arg("--version")
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.ensure_success();
}
}
// Stop progress indicator
running.store(false, Ordering::Relaxed);
progress_thread.join().unwrap();
@ -52,3 +67,32 @@ pub fn relaunch_in_terminal() -> Result<()> {
.ensure_spawn()?;
std::process::exit(0);
}
pub fn finalize_uninstall() {
if let Ok(exe_path) = std::env::current_exe() {
// Find the .app bundle by walking up the directory tree
let mut app_bundle_path = exe_path.as_path();
while let Some(parent) = app_bundle_path.parent() {
if let Some(name) = parent.file_name() {
if name.to_string_lossy().ends_with(".app") {
let result = Command::new("trash").arg(parent).output();
match result {
Ok(output) if output.status.success() => {
println!("Anki has been uninstalled.");
return;
}
_ => {
// Fall back to manual instructions
println!(
"Please manually drag Anki.app to the trash to complete uninstall."
);
}
}
return;
}
}
app_bundle_path = parent;
}
}
}

View file

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
#[cfg(all(unix, not(target_os = "macos")))]
mod unix;
pub mod unix;
#[cfg(target_os = "macos")]
pub mod mac;
@ -49,10 +49,32 @@ pub fn get_uv_binary_name() -> &'static str {
}
}
pub fn launch_anki_after_update(mut cmd: std::process::Command) -> Result<()> {
pub fn respawn_launcher() -> Result<()> {
use std::process::Stdio;
cmd.stdin(Stdio::null())
let mut launcher_cmd = if cfg!(target_os = "macos") {
// On macOS, we need to launch the .app bundle, not the executable directly
let current_exe =
std::env::current_exe().context("Failed to get current executable path")?;
// Navigate from Contents/MacOS/launcher to the .app bundle
let app_bundle = current_exe
.parent() // MacOS
.and_then(|p| p.parent()) // Contents
.and_then(|p| p.parent()) // .app
.context("Failed to find .app bundle")?;
let mut cmd = std::process::Command::new("open");
cmd.arg(app_bundle);
cmd
} else {
let current_exe =
std::env::current_exe().context("Failed to get current executable path")?;
std::process::Command::new(current_exe)
};
launcher_cmd
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null());
@ -61,16 +83,16 @@ pub fn launch_anki_after_update(mut cmd: std::process::Command) -> Result<()> {
use std::os::windows::process::CommandExt;
const CREATE_NEW_PROCESS_GROUP: u32 = 0x00000200;
const DETACHED_PROCESS: u32 = 0x00000008;
cmd.creation_flags(CREATE_NEW_PROCESS_GROUP | DETACHED_PROCESS);
launcher_cmd.creation_flags(CREATE_NEW_PROCESS_GROUP | DETACHED_PROCESS);
}
#[cfg(unix)]
#[cfg(all(unix, not(target_os = "macos")))]
{
use std::os::unix::process::CommandExt;
cmd.process_group(0);
launcher_cmd.process_group(0);
}
let child = cmd.ensure_spawn()?;
let child = launcher_cmd.ensure_spawn()?;
std::mem::forget(child);
Ok(())

View file

@ -47,3 +47,21 @@ pub fn relaunch_in_terminal() -> Result<()> {
// If no terminal worked, continue without relaunching
Ok(())
}
pub fn finalize_uninstall() {
use std::io::stdin;
use std::io::stdout;
use std::io::Write;
let uninstall_script = std::path::Path::new("/usr/local/share/anki/uninstall.sh");
if uninstall_script.exists() {
println!("To finish uninstalling, run 'sudo /usr/local/share/anki/uninstall.sh'");
} else {
println!("Anki has been uninstalled.");
}
println!("Press enter to quit.");
let _ = stdout().flush();
let mut input = String::new();
let _ = stdin().read_line(&mut input);
}

View file

@ -1,11 +1,17 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::ffi::OsStr;
use std::io::stdin;
use std::os::windows::ffi::OsStrExt;
use std::process::Command;
use anyhow::Context;
use anyhow::Result;
use winapi::shared::minwindef::HKEY;
use winapi::um::wincon;
use winapi::um::winnt::KEY_READ;
use winapi::um::winreg;
pub fn ensure_terminal_shown() -> Result<()> {
unsafe {
@ -79,3 +85,122 @@ fn reconnect_stdio_to_console() {
libc::freopen(conout.as_ptr(), w.as_ptr(), stderr());
}
}
pub fn finalize_uninstall() {
let uninstaller_path = get_uninstaller_path();
match uninstaller_path {
Some(path) => {
println!("Launching Windows uninstaller...");
let result = Command::new(&path).env("ANKI_LAUNCHER", "1").spawn();
match result {
Ok(_) => {
println!("Uninstaller launched successfully.");
return;
}
Err(e) => {
println!("Failed to launch uninstaller: {e}");
println!("You can manually run: {}", path.display());
}
}
}
None => {
println!("Windows uninstaller not found.");
println!("You may need to uninstall via Windows Settings > Apps.");
}
}
println!("Press enter to close...");
let mut input = String::new();
let _ = stdin().read_line(&mut input);
}
fn get_uninstaller_path() -> Option<std::path::PathBuf> {
// Try to read install directory from registry
if let Some(install_dir) = read_registry_install_dir() {
let uninstaller = install_dir.join("uninstall.exe");
if uninstaller.exists() {
return Some(uninstaller);
}
}
// Fall back to default location
let default_dir = dirs::data_local_dir()?.join("Programs").join("Anki");
let uninstaller = default_dir.join("uninstall.exe");
if uninstaller.exists() {
return Some(uninstaller);
}
None
}
fn read_registry_install_dir() -> Option<std::path::PathBuf> {
unsafe {
let mut hkey: HKEY = std::ptr::null_mut();
// Convert the registry path to wide string
let subkey: Vec<u16> = OsStr::new("SOFTWARE\\Anki")
.encode_wide()
.chain(std::iter::once(0))
.collect();
// Open the registry key
let result = winreg::RegOpenKeyExW(
winreg::HKEY_CURRENT_USER,
subkey.as_ptr(),
0,
KEY_READ,
&mut hkey,
);
if result != 0 {
return None;
}
// Query the Install_Dir64 value
let value_name: Vec<u16> = OsStr::new("Install_Dir64")
.encode_wide()
.chain(std::iter::once(0))
.collect();
let mut value_type = 0u32;
let mut data_size = 0u32;
// First call to get the size
let result = winreg::RegQueryValueExW(
hkey,
value_name.as_ptr(),
std::ptr::null_mut(),
&mut value_type,
std::ptr::null_mut(),
&mut data_size,
);
if result != 0 || data_size == 0 {
winreg::RegCloseKey(hkey);
return None;
}
// Allocate buffer and read the value
let mut buffer: Vec<u16> = vec![0; (data_size / 2) as usize];
let result = winreg::RegQueryValueExW(
hkey,
value_name.as_ptr(),
std::ptr::null_mut(),
&mut value_type,
buffer.as_mut_ptr() as *mut u8,
&mut data_size,
);
winreg::RegCloseKey(hkey);
if result == 0 {
// Convert wide string back to PathBuf
let len = buffer.iter().position(|&x| x == 0).unwrap_or(buffer.len());
let path_str = String::from_utf16_lossy(&buffer[..len]);
Some(std::path::PathBuf::from(path_str))
} else {
None
}
}
}

View file

@ -250,8 +250,18 @@ FunctionEnd
; Uninstaller
function un.onInit
MessageBox MB_OKCANCEL "This will remove Anki's program files, but will not delete your card data. If you wish to delete your card data as well, you can do so via File>Switch Profile inside Anki first. Are you sure you wish to uninstall Anki?" /SD IDOK IDOK next
Quit
; Check for ANKI_LAUNCHER environment variable
ReadEnvStr $R0 "ANKI_LAUNCHER"
${If} $R0 != ""
; Wait for launcher to exit
Sleep 2000
Goto next
${Else}
; Try to launch anki.exe with ANKI_LAUNCHER_UNINSTALL=1
IfFileExists "$INSTDIR\anki.exe" 0 next
nsExec::Exec 'cmd /c "set ANKI_LAUNCHER_UNINSTALL=1 && start /b "" "$INSTDIR\anki.exe""'
Quit
${EndIf}
next:
functionEnd

View file

@ -21,14 +21,11 @@ pub fn check(lang_map: &TranslationsByLang) {
fn check_content(lang: &str, fname: &str, content: &str) {
let lang_id: LanguageIdentifier = "en-US".parse().unwrap();
let resource = FluentResource::try_new(content.into()).unwrap_or_else(|e| {
panic!("{}\nUnable to parse {}/{}: {:?}", content, lang, fname, e);
panic!("{content}\nUnable to parse {lang}/{fname}: {e:?}");
});
let mut bundle: FluentBundle<FluentResource> = FluentBundle::new(vec![lang_id]);
bundle.add_resource(resource).unwrap_or_else(|e| {
panic!(
"{}\nUnable to bundle - duplicate key? {}/{}: {:?}",
content, lang, fname, e
);
panic!("{content}\nUnable to bundle - duplicate key? {lang}/{fname}: {e:?}");
});
}

View file

@ -48,8 +48,7 @@ fn add_folder(map: &mut TranslationsByLang, folder: &Path, lang: &str) {
let text = fs::read_to_string(entry.path()).unwrap();
assert!(
text.ends_with('\n'),
"file was missing final newline: {:?}",
entry
"file was missing final newline: {entry:?}"
);
map_entry.entry(module).or_default().push_str(&text);
println!("cargo:rerun-if-changed={}", entry.path().to_str().unwrap());

View file

@ -130,7 +130,7 @@ fn get_bundle(
) -> Option<FluentBundle<FluentResource>> {
let res = FluentResource::try_new(text.into())
.map_err(|e| {
println!("Unable to parse translations file: {:?}", e);
println!("Unable to parse translations file: {e:?}");
})
.ok()?;
@ -138,14 +138,14 @@ fn get_bundle(
bundle
.add_resource(res)
.map_err(|e| {
println!("Duplicate key detected in translation file: {:?}", e);
println!("Duplicate key detected in translation file: {e:?}");
})
.ok()?;
if !extra_text.is_empty() {
match FluentResource::try_new(extra_text) {
Ok(res) => bundle.add_resource_overriding(res),
Err((_res, e)) => println!("Unable to parse translations file: {:?}", e),
Err((_res, e)) => println!("Unable to parse translations file: {e:?}"),
}
}
@ -291,7 +291,7 @@ impl I18n {
let mut errs = vec![];
let out = bundle.format_pattern(pat, args.as_ref(), &mut errs);
if !errs.is_empty() {
println!("Error(s) in translation '{}': {:?}", key, errs);
println!("Error(s) in translation '{key}': {errs:?}");
}
// clone so we can discard args
return out.to_string().into();

View file

@ -81,7 +81,7 @@ fn get_args(variables: &[Variable]) -> String {
.iter()
.map(|v| format!("\"{}\": args.{}", v.name, typescript_arg_name(&v.name)))
.join(", ");
format!("{{{}}}", out)
format!("{{{out}}}")
}
}

View file

@ -69,12 +69,6 @@ impl I18n {
{var_build}
self.translate("{key}"{out_args})
}}"#,
func = func,
key = key,
doc = doc,
in_args = in_args,
out_args = out_args,
var_build = var_build,
)
.unwrap();
}
@ -103,9 +97,6 @@ fn build_vars(translation: &Translation) -> String {
writeln!(
buf,
r#" args.set("{fluent_name}", {rust_name}{trailer});"#,
fluent_name = fluent_name,
rust_name = rust_name,
trailer = trailer,
)
.unwrap();
}
@ -204,13 +195,7 @@ pub(crate) const {lang_name}: phf::Map<&str, &str> = phf::phf_map! {{",
.unwrap();
for (module, contents) in modules {
writeln!(
buf,
r###" "{module}" => r##"{contents}"##,"###,
module = module,
contents = contents
)
.unwrap();
writeln!(buf, r###" "{module}" => r##"{contents}"##,"###).unwrap();
}
buf.push_str("};\n");

View file

@ -183,9 +183,9 @@ fn python_type(field: &FieldDescriptor, output: bool) -> String {
};
if field.is_list() {
if output {
format!("Sequence[{}]", kind)
format!("Sequence[{kind}]")
} else {
format!("Iterable[{}]", kind)
format!("Iterable[{kind}]")
}
} else if field.is_map() {
let map_kind = field.kind();

View file

@ -263,7 +263,7 @@ impl MethodHelpers for Method {
fn get_input_arg_with_label(&self) -> String {
self.input_type()
.as_ref()
.map(|t| format!("input: {}", t))
.map(|t| format!("input: {t}"))
.unwrap_or_default()
}

View file

@ -515,7 +515,7 @@ impl RowContext {
return "".into();
};
if self.cards[0].is_undue_queue() {
format!("({})", due)
format!("({due})")
} else {
due.into()
}
@ -623,7 +623,7 @@ impl RowContext {
if self.notes_mode {
let decks = self.cards.iter().map(|c| c.deck_id).unique().count();
if decks > 1 {
return format!("({})", decks);
return format!("({decks})");
}
}
let deck_name = self.deck.human_name();

View file

@ -52,7 +52,7 @@ trait Write {
}
fn write_sound(&mut self, buf: &mut String, resource: &str) {
write!(buf, "[sound:{}]", resource).unwrap();
write!(buf, "[sound:{resource}]").unwrap();
}
fn write_directive(&mut self, buf: &mut String, directive: &Directive) {
@ -94,9 +94,9 @@ trait Write {
fn write_directive_option(&mut self, buf: &mut String, key: &str, val: &str) {
if val.contains([']', ' ', '\t', '\r', '\n']) {
write!(buf, " {}=\"{}\"", key, val).unwrap();
write!(buf, " {key}=\"{val}\"").unwrap();
} else {
write!(buf, " {}={}", key, val).unwrap();
write!(buf, " {key}={val}").unwrap();
}
}
@ -158,7 +158,7 @@ impl Write for AvExtractor<'_> {
fn write_tts_directive(&mut self, buf: &mut String, directive: &TtsDirective) {
if let Some(error) = directive.error(self.tr) {
write!(buf, "[{}]", error).unwrap();
write!(buf, "[{error}]").unwrap();
return;
}
@ -173,7 +173,7 @@ impl Write for AvExtractor<'_> {
other_args: directive
.options
.iter()
.map(|(key, val)| format!("{}={}", key, val))
.map(|(key, val)| format!("{key}={val}"))
.collect(),
},
)),
@ -204,7 +204,7 @@ impl AvPrettifier {
impl Write for AvPrettifier {
fn write_sound(&mut self, buf: &mut String, resource: &str) {
write!(buf, "🔉{}🔉", resource).unwrap();
write!(buf, "🔉{resource}🔉").unwrap();
}
fn write_tts_directive(&mut self, buf: &mut String, directive: &TtsDirective) {

View file

@ -41,5 +41,5 @@ impl Collection {
}
fn build_aux_deck_key(deck: DeckId, key: &str) -> String {
format!("_deck_{deck}_{key}", deck = deck, key = key)
format!("_deck_{deck}_{key}")
}

View file

@ -32,7 +32,7 @@ impl Collection {
};
Ok(get_aux_notetype_config_key(
ntid,
&format!("{}_{}", key, ordinal),
&format!("{key}_{ordinal}"),
))
}
}
@ -70,5 +70,5 @@ impl Collection {
}
pub fn get_aux_notetype_config_key(ntid: NotetypeId, key: &str) -> String {
format!("_nt_{ntid}_{key}", ntid = ntid, key = key)
format!("_nt_{ntid}_{key}")
}

View file

@ -387,10 +387,10 @@ impl Collection {
let mut basic = all_stock_notetypes(&self.tr).remove(0);
let mut field = 3;
while basic.fields.len() < field_count {
basic.add_field(format!("{}", field));
basic.add_field(format!("{field}"));
field += 1;
}
basic.name = format!("db-check-{}-{}", stamp, field_count);
basic.name = format!("db-check-{stamp}-{field_count}");
let qfmt = basic.templates[0].config.q_format.clone();
let afmt = basic.templates[0].config.a_format.clone();
for n in 0..extra_cards_required {

View file

@ -93,7 +93,7 @@ impl Collection {
pub(crate) fn recover_missing_deck(&mut self, did: DeckId, usn: Usn) -> Result<()> {
let mut deck = Deck::new_normal();
deck.id = did;
deck.name = NativeDeckName::from_native_str(format!("recovered{}", did));
deck.name = NativeDeckName::from_native_str(format!("recovered{did}"));
deck.set_modified(usn);
self.add_or_update_single_deck_with_existing_id(&mut deck, usn)
}

View file

@ -67,7 +67,7 @@ impl From<Error> for AnkiError {
}
AnkiError::DbError {
source: DbError {
info: format!("{:?}", err),
info: format!("{err:?}"),
kind: DbErrorKind::Other,
},
}
@ -88,7 +88,7 @@ impl From<FromSqlError> for AnkiError {
}
AnkiError::DbError {
source: DbError {
info: format!("{:?}", err),
info: format!("{err:?}"),
kind: DbErrorKind::Other,
},
}
@ -101,7 +101,7 @@ impl DbError {
DbErrorKind::Corrupt => self.info.clone(),
// fixme: i18n
DbErrorKind::Locked => "Anki already open, or media currently syncing.".into(),
_ => format!("{:?}", self),
_ => format!("{self:?}"),
}
}
}

View file

@ -26,7 +26,7 @@ impl InvalidInputError {
pub fn context(&self) -> String {
if let Some(source) = &self.source {
format!("{}", source)
format!("{source}")
} else {
String::new()
}

View file

@ -149,13 +149,13 @@ impl AnkiError {
}
CardTypeErrorDetails::MissingCloze => tr.card_templates_missing_cloze(),
};
format!("{}<br>{}", header, details)
format!("{header}<br>{details}")
}
AnkiError::DbError { source } => source.message(tr),
AnkiError::SearchError { source } => source.message(tr),
AnkiError::ParseNumError => tr.errors_parse_number_fail().into(),
AnkiError::FilteredDeckError { source } => source.message(tr),
AnkiError::InvalidRegex { info: source } => format!("<pre>{}</pre>", source),
AnkiError::InvalidRegex { info: source } => format!("<pre>{source}</pre>"),
AnkiError::MultipleNotetypesSelected => tr.errors_multiple_notetypes_selected().into(),
AnkiError::DatabaseCheckRequired => tr.errors_please_check_database().into(),
AnkiError::MediaCheckRequired => tr.errors_please_check_media().into(),
@ -172,7 +172,7 @@ impl AnkiError {
| AnkiError::InvalidServiceIndex
| AnkiError::InvalidMethodIndex
| AnkiError::UndoEmpty
| AnkiError::InvalidCertificateFormat => format!("{:?}", self),
| AnkiError::InvalidCertificateFormat => format!("{self:?}"),
AnkiError::FileIoError { source } => source.message(),
AnkiError::InvalidInput { source } => source.message(),
AnkiError::NotFound { source } => source.message(tr),

View file

@ -68,7 +68,7 @@ impl AnkiError {
impl From<&reqwest::Error> for AnkiError {
fn from(err: &reqwest::Error) -> Self {
let url = err.url().map(|url| url.as_str()).unwrap_or("");
let str_err = format!("{}", err);
let str_err = format!("{err}");
// strip url from error to avoid exposing keys
let info = str_err.replace(url, "");
@ -205,7 +205,7 @@ impl NetworkError {
NetworkErrorKind::Other => tr.network_other(),
};
let details = tr.network_details(self.info.as_str());
format!("{}\n\n{}", summary, details)
format!("{summary}\n\n{details}")
}
}
@ -226,7 +226,7 @@ impl From<HttpError> for AnkiError {
}
.into()
} else {
AnkiError::sync_error(format!("{:?}", err), SyncErrorKind::Other)
AnkiError::sync_error(format!("{err:?}"), SyncErrorKind::Other)
}
}
}

View file

@ -77,7 +77,7 @@ impl Collection {
) -> Result<GetImageOcclusionNoteResponse> {
let value = match self.get_image_occlusion_note_inner(note_id) {
Ok(note) => Value::Note(note),
Err(err) => Value::Error(format!("{:?}", err)),
Err(err) => Value::Error(format!("{err:?}")),
};
Ok(GetImageOcclusionNoteResponse { value: Some(value) })
}

View file

@ -98,7 +98,7 @@ pub fn get_image_cloze_data(text: &str) -> String {
let Some((x, y)) = point_pair.split_once(',') else {
continue;
};
write!(&mut point_str, "{},{} ", x, y).unwrap();
write!(&mut point_str, "{x},{y} ").unwrap();
}
// remove the trailing space
point_str.pop();

View file

@ -100,7 +100,7 @@ fn fname_for_latex(latex: &str, svg: bool) -> String {
let ext = if svg { "svg" } else { "png" };
let csum = hex::encode(sha1_of_data(latex.as_bytes()));
format!("latex-{}.{}", csum, ext)
format!("latex-{csum}.{ext}")
}
fn image_link_for_fname(src: &str, fname: &str) -> String {
@ -122,11 +122,7 @@ mod test {
assert_eq!(
extract_latex("a[latex]one<br>and<div>two[/latex]b", false),
(
format!(
"a<img class=latex alt=\"one&#x0A;and&#x0A;two\" src=\"{}\">b",
fname
)
.into(),
format!("a<img class=latex alt=\"one&#x0A;and&#x0A;two\" src=\"{fname}\">b").into(),
vec![ExtractedLatex {
fname: fname.into(),
latex: "one\nand\ntwo".into()

View file

@ -69,8 +69,8 @@ fn maybe_rotate_log(path: &str) -> io::Result<()> {
return Ok(());
}
let path2 = format!("{}.1", path);
let path3 = format!("{}.2", path);
let path2 = format!("{path}.1");
let path3 = format!("{path}.2");
// if a rotated file already exists, rename it
if let Err(e) = fs::rename(&path2, path3) {

View file

@ -218,7 +218,7 @@ fn truncate_filename(fname: &str, max_bytes: usize) -> Cow<str> {
let mut new_name = if ext.is_empty() {
stem.to_string()
} else {
format!("{}.{}", stem, ext)
format!("{stem}.{ext}")
};
// make sure we don't break Windows by ending with a space or dot

View file

@ -270,7 +270,7 @@ impl Note {
self.fields
.last_mut()
.unwrap()
.push_str(&format!("; {}", last));
.push_str(&format!("; {last}"));
}
}
}

View file

@ -126,7 +126,7 @@ fn other_to_bytes(other: &HashMap<String, Value>) -> Vec<u8> {
} else {
serde_json::to_vec(other).unwrap_or_else(|e| {
// theoretically should never happen
println!("serialization failed for {:?}: {}", other, e);
println!("serialization failed for {other:?}: {e}");
vec![]
})
}
@ -140,7 +140,7 @@ pub(crate) fn parse_other_fields(
Default::default()
} else {
let mut map: HashMap<String, Value> = serde_json::from_slice(bytes).unwrap_or_else(|e| {
println!("deserialization failed for other: {}", e);
println!("deserialization failed for other: {e}");
Default::default()
});
map.retain(|k, _v| !reserved.contains(k));

View file

@ -179,8 +179,8 @@ pub(crate) fn cloze(tr: &I18n) -> Notetype {
let back_extra = tr.notetypes_back_extra_field();
config = nt.add_field(back_extra.as_ref());
config.tag = Some(ClozeField::BackExtra as u32);
let qfmt = format!("{{{{cloze:{}}}}}", text);
let afmt = format!("{}<br>\n{{{{{}}}}}", qfmt, back_extra);
let qfmt = format!("{{{{cloze:{text}}}}}");
let afmt = format!("{qfmt}<br>\n{{{{{back_extra}}}}}");
nt.add_template(nt.name.clone(), qfmt, afmt);
nt
}

View file

@ -889,22 +889,20 @@ pub(crate) mod test {
) -> Result<()> {
// Change due time to fake card answer_time,
// works since answer_time is calculated as due - last_ivl
let update_due_string = format!("update cards set due={}", shift_due_time);
let update_due_string = format!("update cards set due={shift_due_time}");
col.storage.db.execute_batch(&update_due_string)?;
col.clear_study_queues();
let current_card_state = current_state(col, post_answer.card_id);
let state = match current_card_state {
CardState::Normal(NormalState::Learning(state)) => state,
_ => panic!("State is not Normal: {:?}", current_card_state),
_ => panic!("State is not Normal: {current_card_state:?}"),
};
let elapsed_secs = state.elapsed_secs as i32;
// Give a 1 second leeway when the test runs on the off chance
// that the test runs as a second rolls over.
assert!(
(elapsed_secs - expected_elapsed_secs).abs() <= 1,
"elapsed_secs: {} != expected_elapsed_secs: {}",
elapsed_secs,
expected_elapsed_secs
"elapsed_secs: {elapsed_secs} != expected_elapsed_secs: {expected_elapsed_secs}"
);
Ok(())

View file

@ -214,14 +214,14 @@ impl Collection {
.search_terms
.get_mut(0)
.unwrap();
term1.search = format!("{} is:due", search);
term1.search = format!("{search} is:due");
let term2 = deck
.filtered_mut()
.unwrap()
.search_terms
.get_mut(1)
.unwrap();
term2.search = format!("{} is:new", search);
term2.search = format!("{search} is:new");
}
}

View file

@ -25,7 +25,7 @@ pub fn answer_button_time_collapsible(seconds: u32, collapse_secs: u32, tr: &I18
if seconds == 0 {
tr.scheduling_end().into()
} else if seconds < collapse_secs {
format!("<{}", string)
format!("<{string}")
} else {
string
}

View file

@ -219,7 +219,7 @@ impl From<TemplateKind> for SearchNode {
impl From<NoteId> for SearchNode {
fn from(n: NoteId) -> Self {
SearchNode::NoteIds(format!("{}", n))
SearchNode::NoteIds(format!("{n}"))
}
}

View file

@ -240,7 +240,7 @@ impl Collection {
} else {
self.storage.setup_searched_cards_table()?;
}
let sql = format!("insert into search_cids {}", sql);
let sql = format!("insert into search_cids {sql}");
let cards = self
.storage
@ -307,7 +307,7 @@ impl Collection {
let (sql, args) = writer.build_query(&top_node, mode.required_table())?;
self.storage.setup_searched_notes_table()?;
let sql = format!("insert into search_nids {}", sql);
let sql = format!("insert into search_nids {sql}");
let notes = self
.storage

View file

@ -277,7 +277,7 @@ fn unquoted_term(s: &str) -> IResult<Node> {
Err(parse_failure(
s,
FailKind::UnknownEscape {
provided: format!("\\{}", c),
provided: format!("\\{c}"),
},
))
} else if "\"() \u{3000}".contains(s.chars().next().unwrap()) {
@ -637,7 +637,7 @@ fn check_id_list<'a>(s: &'a str, context: &str) -> ParseResult<'a, &'a str> {
s,
// id lists are undocumented, so no translation
FailKind::Other {
info: Some(format!("expected only digits and commas in {}:", context)),
info: Some(format!("expected only digits and commas in {context}:")),
},
))
}
@ -1110,19 +1110,19 @@ mod test {
for term in &["added", "edited", "rated", "resched"] {
assert!(matches!(
failkind(&format!("{}:1.1", term)),
failkind(&format!("{term}:1.1")),
SearchErrorKind::InvalidPositiveWholeNumber { .. }
));
assert!(matches!(
failkind(&format!("{}:-1", term)),
failkind(&format!("{term}:-1")),
SearchErrorKind::InvalidPositiveWholeNumber { .. }
));
assert!(matches!(
failkind(&format!("{}:", term)),
failkind(&format!("{term}:")),
SearchErrorKind::InvalidPositiveWholeNumber { .. }
));
assert!(matches!(
failkind(&format!("{}:foo", term)),
failkind(&format!("{term}:foo")),
SearchErrorKind::InvalidPositiveWholeNumber { .. }
));
}
@ -1223,19 +1223,19 @@ mod test {
for term in &["ivl", "reps", "lapses", "pos"] {
assert!(matches!(
failkind(&format!("prop:{}>", term)),
failkind(&format!("prop:{term}>")),
SearchErrorKind::InvalidPositiveWholeNumber { .. }
));
assert!(matches!(
failkind(&format!("prop:{}=0.5", term)),
failkind(&format!("prop:{term}=0.5")),
SearchErrorKind::InvalidPositiveWholeNumber { .. }
));
assert!(matches!(
failkind(&format!("prop:{}!=-1", term)),
failkind(&format!("prop:{term}!=-1")),
SearchErrorKind::InvalidPositiveWholeNumber { .. }
));
assert!(matches!(
failkind(&format!("prop:{}<foo", term)),
failkind(&format!("prop:{term}<foo")),
SearchErrorKind::InvalidPositiveWholeNumber { .. }
));
}

View file

@ -99,7 +99,7 @@ impl crate::services::SearchService for Collection {
regex::escape(&input.search)
};
if !input.match_case {
search = format!("(?i){}", search);
search = format!("(?i){search}");
}
let mut nids = to_note_ids(input.nids);
let field_name = if input.field_name.is_empty() {

View file

@ -158,13 +158,12 @@ impl SqlWriter<'_> {
},
SearchNode::Deck(deck) => self.write_deck(&norm(deck))?,
SearchNode::NotetypeId(ntid) => {
write!(self.sql, "n.mid = {}", ntid).unwrap();
write!(self.sql, "n.mid = {ntid}").unwrap();
}
SearchNode::DeckIdsWithoutChildren(dids) => {
write!(
self.sql,
"c.did in ({}) or (c.odid != 0 and c.odid in ({}))",
dids, dids
"c.did in ({dids}) or (c.odid != 0 and c.odid in ({dids}))"
)
.unwrap();
}
@ -175,13 +174,13 @@ impl SqlWriter<'_> {
SearchNode::Tag { tag, is_re } => self.write_tag(&norm(tag), *is_re),
SearchNode::State(state) => self.write_state(state)?,
SearchNode::Flag(flag) => {
write!(self.sql, "(c.flags & 7) == {}", flag).unwrap();
write!(self.sql, "(c.flags & 7) == {flag}").unwrap();
}
SearchNode::NoteIds(nids) => {
write!(self.sql, "{} in ({})", self.note_id_column(), nids).unwrap();
}
SearchNode::CardIds(cids) => {
write!(self.sql, "c.id in ({})", cids).unwrap();
write!(self.sql, "c.id in ({cids})").unwrap();
}
SearchNode::Property { operator, kind } => self.write_prop(operator, kind)?,
SearchNode::CustomData(key) => self.write_custom_data(key)?,
@ -199,7 +198,7 @@ impl SqlWriter<'_> {
text
};
// implicitly wrap in %
let text = format!("%{}%", text);
let text = format!("%{text}%");
self.args.push(text);
let arg_idx = self.args.len();
@ -279,7 +278,7 @@ impl SqlWriter<'_> {
text => {
write!(self.sql, "n.tags regexp ?").unwrap();
let re = &to_custom_re(text, r"\S");
self.args.push(format!("(?i).* {}(::| ).*", re));
self.args.push(format!("(?i).* {re}(::| ).*"));
}
}
}
@ -293,10 +292,10 @@ impl SqlWriter<'_> {
write!(self.sql, "c.id in (select cid from revlog where id").unwrap();
match op {
">" => write!(self.sql, " >= {}", target_cutoff_ms),
">=" => write!(self.sql, " >= {}", day_before_cutoff_ms),
"<" => write!(self.sql, " < {}", day_before_cutoff_ms),
"<=" => write!(self.sql, " < {}", target_cutoff_ms),
">" => write!(self.sql, " >= {target_cutoff_ms}"),
">=" => write!(self.sql, " >= {day_before_cutoff_ms}"),
"<" => write!(self.sql, " < {day_before_cutoff_ms}"),
"<=" => write!(self.sql, " < {target_cutoff_ms}"),
"=" => write!(
self.sql,
" between {} and {}",
@ -314,7 +313,7 @@ impl SqlWriter<'_> {
.unwrap();
match ease {
RatingKind::AnswerButton(u) => write!(self.sql, " and ease = {})", u),
RatingKind::AnswerButton(u) => write!(self.sql, " and ease = {u})"),
RatingKind::AnyAnswerButton => write!(self.sql, " and ease > 0)"),
RatingKind::ManualReschedule => write!(self.sql, " and ease = 0)"),
}
@ -356,9 +355,9 @@ impl SqlWriter<'_> {
pos = pos
)
.unwrap(),
PropertyKind::Interval(ivl) => write!(self.sql, "ivl {} {}", op, ivl).unwrap(),
PropertyKind::Reps(reps) => write!(self.sql, "reps {} {}", op, reps).unwrap(),
PropertyKind::Lapses(days) => write!(self.sql, "lapses {} {}", op, days).unwrap(),
PropertyKind::Interval(ivl) => write!(self.sql, "ivl {op} {ivl}").unwrap(),
PropertyKind::Reps(reps) => write!(self.sql, "reps {op} {reps}").unwrap(),
PropertyKind::Lapses(days) => write!(self.sql, "lapses {op} {days}").unwrap(),
PropertyKind::Ease(ease) => {
write!(self.sql, "factor {} {}", op, (ease * 1000.0) as u32).unwrap()
}
@ -474,7 +473,7 @@ impl SqlWriter<'_> {
};
// convert to a regex that includes child decks
self.args.push(format!("(?i)^{}($|\x1f)", native_deck));
self.args.push(format!("(?i)^{native_deck}($|\x1f)"));
let arg_idx = self.args.len();
self.sql.push_str(&format!(concat!(
"(c.did in (select id from decks where name regexp ?{n})",
@ -491,7 +490,7 @@ impl SqlWriter<'_> {
let ids = self.col.storage.deck_id_with_children(&parent)?;
let mut buf = String::new();
ids_to_string(&mut buf, &ids);
write!(self.sql, "c.did in {}", buf,).unwrap();
write!(self.sql, "c.did in {buf}",).unwrap();
} else {
self.sql.push_str("false")
}
@ -502,7 +501,7 @@ impl SqlWriter<'_> {
fn write_template(&mut self, template: &TemplateKind) {
match template {
TemplateKind::Ordinal(n) => {
write!(self.sql, "c.ord = {}", n).unwrap();
write!(self.sql, "c.ord = {n}").unwrap();
}
TemplateKind::Name(name) => {
if is_glob(name) {
@ -550,7 +549,7 @@ impl SqlWriter<'_> {
}
fn write_all_fields_regexp(&mut self, val: &str) {
self.args.push(format!("(?i){}", val));
self.args.push(format!("(?i){val}"));
write!(self.sql, "regexp_fields(?{}, n.flds)", self.args.len()).unwrap();
}
@ -566,7 +565,7 @@ impl SqlWriter<'_> {
return Ok(());
}
self.args.push(format!("(?i){}", val));
self.args.push(format!("(?i){val}"));
let arg_idx = self.args.len();
let all_notetype_clauses = field_indicies_by_notetype
@ -775,13 +774,13 @@ impl SqlWriter<'_> {
fn write_added(&mut self, days: u32) -> Result<()> {
let cutoff = self.previous_day_cutoff(days)?.as_millis();
write!(self.sql, "c.id > {}", cutoff).unwrap();
write!(self.sql, "c.id > {cutoff}").unwrap();
Ok(())
}
fn write_edited(&mut self, days: u32) -> Result<()> {
let cutoff = self.previous_day_cutoff(days)?;
write!(self.sql, "n.mod > {}", cutoff).unwrap();
write!(self.sql, "n.mod > {cutoff}").unwrap();
Ok(())
}
@ -813,7 +812,7 @@ impl SqlWriter<'_> {
} else {
std::borrow::Cow::Borrowed(word)
};
self.args.push(format!(r"(?i){}", word));
self.args.push(format!(r"(?i){word}"));
let arg_idx = self.args.len();
if let Some(field_indices_by_notetype) = self.included_fields_for_unqualified_regex()? {
let notetype_clause = |ctx: &UnqualifiedRegexSearchContext| -> String {

View file

@ -70,30 +70,30 @@ fn write_search_node(node: &SearchNode) -> String {
match node {
UnqualifiedText(s) => maybe_quote(&s.replace(':', "\\:")),
SingleField { field, text, is_re } => write_single_field(field, text, *is_re),
AddedInDays(u) => format!("added:{}", u),
EditedInDays(u) => format!("edited:{}", u),
IntroducedInDays(u) => format!("introduced:{}", u),
AddedInDays(u) => format!("added:{u}"),
EditedInDays(u) => format!("edited:{u}"),
IntroducedInDays(u) => format!("introduced:{u}"),
CardTemplate(t) => write_template(t),
Deck(s) => maybe_quote(&format!("deck:{}", s)),
DeckIdsWithoutChildren(s) => format!("did:{}", s),
Deck(s) => maybe_quote(&format!("deck:{s}")),
DeckIdsWithoutChildren(s) => format!("did:{s}"),
// not exposed on the GUI end
DeckIdWithChildren(_) => "".to_string(),
NotetypeId(NotetypeIdType(i)) => format!("mid:{}", i),
Notetype(s) => maybe_quote(&format!("note:{}", s)),
NotetypeId(NotetypeIdType(i)) => format!("mid:{i}"),
Notetype(s) => maybe_quote(&format!("note:{s}")),
Rated { days, ease } => write_rated(days, ease),
Tag { tag, is_re } => write_single_field("tag", tag, *is_re),
Duplicates { notetype_id, text } => write_dupe(notetype_id, text),
State(k) => write_state(k),
Flag(u) => format!("flag:{}", u),
NoteIds(s) => format!("nid:{}", s),
CardIds(s) => format!("cid:{}", s),
Flag(u) => format!("flag:{u}"),
NoteIds(s) => format!("nid:{s}"),
CardIds(s) => format!("cid:{s}"),
Property { operator, kind } => write_property(operator, kind),
WholeCollection => "deck:*".to_string(),
Regex(s) => maybe_quote(&format!("re:{}", s)),
NoCombining(s) => maybe_quote(&format!("nc:{}", s)),
WordBoundary(s) => maybe_quote(&format!("w:{}", s)),
CustomData(k) => maybe_quote(&format!("has-cd:{}", k)),
Preset(s) => maybe_quote(&format!("preset:{}", s)),
Regex(s) => maybe_quote(&format!("re:{s}")),
NoCombining(s) => maybe_quote(&format!("nc:{s}")),
WordBoundary(s) => maybe_quote(&format!("w:{s}")),
CustomData(k) => maybe_quote(&format!("has-cd:{k}")),
Preset(s) => maybe_quote(&format!("preset:{s}")),
}
}
@ -128,23 +128,23 @@ fn write_single_field(field: &str, text: &str, is_re: bool) -> String {
fn write_template(template: &TemplateKind) -> String {
match template {
TemplateKind::Ordinal(u) => format!("card:{}", u + 1),
TemplateKind::Name(s) => maybe_quote(&format!("card:{}", s)),
TemplateKind::Name(s) => maybe_quote(&format!("card:{s}")),
}
}
fn write_rated(days: &u32, ease: &RatingKind) -> String {
use RatingKind::*;
match ease {
AnswerButton(n) => format!("rated:{}:{}", days, n),
AnyAnswerButton => format!("rated:{}", days),
ManualReschedule => format!("resched:{}", days),
AnswerButton(n) => format!("rated:{days}:{n}"),
AnyAnswerButton => format!("rated:{days}"),
ManualReschedule => format!("resched:{days}"),
}
}
/// Escape double quotes and backslashes: \"
fn write_dupe(notetype_id: &NotetypeId, text: &str) -> String {
let esc = text.replace('\\', r"\\");
maybe_quote(&format!("dupe:{},{}", notetype_id, esc))
maybe_quote(&format!("dupe:{notetype_id},{esc}"))
}
fn write_state(kind: &StateKind) -> String {
@ -167,19 +167,19 @@ fn write_state(kind: &StateKind) -> String {
fn write_property(operator: &str, kind: &PropertyKind) -> String {
use PropertyKind::*;
match kind {
Due(i) => format!("prop:due{}{}", operator, i),
Interval(u) => format!("prop:ivl{}{}", operator, u),
Reps(u) => format!("prop:reps{}{}", operator, u),
Lapses(u) => format!("prop:lapses{}{}", operator, u),
Ease(f) => format!("prop:ease{}{}", operator, f),
Position(u) => format!("prop:pos{}{}", operator, u),
Stability(u) => format!("prop:s{}{}", operator, u),
Difficulty(u) => format!("prop:d{}{}", operator, u),
Retrievability(u) => format!("prop:r{}{}", operator, u),
Due(i) => format!("prop:due{operator}{i}"),
Interval(u) => format!("prop:ivl{operator}{u}"),
Reps(u) => format!("prop:reps{operator}{u}"),
Lapses(u) => format!("prop:lapses{operator}{u}"),
Ease(f) => format!("prop:ease{operator}{f}"),
Position(u) => format!("prop:pos{operator}{u}"),
Stability(u) => format!("prop:s{operator}{u}"),
Difficulty(u) => format!("prop:d{operator}{u}"),
Retrievability(u) => format!("prop:r{operator}{u}"),
Rated(u, ease) => match ease {
RatingKind::AnswerButton(val) => format!("prop:rated{}{}:{}", operator, u, val),
RatingKind::AnyAnswerButton => format!("prop:rated{}{}", operator, u),
RatingKind::ManualReschedule => format!("prop:resched{}{}", operator, u),
RatingKind::AnswerButton(val) => format!("prop:rated{operator}{u}:{val}"),
RatingKind::AnyAnswerButton => format!("prop:rated{operator}{u}"),
RatingKind::ManualReschedule => format!("prop:resched{operator}{u}"),
},
CustomDataNumber { key, value } => format!("prop:cdn:{key}{operator}{value}"),
CustomDataString { key, value } => {

View file

@ -829,8 +829,7 @@ impl fmt::Display for ReviewOrderSubclause {
ReviewOrderSubclause::RetrievabilitySm2 { today, order } => {
temp_string = format!(
// - (elapsed days+0.001)/(scheduled interval)
"-(1 + cast({today}-due+0.001 as real)/ivl) {order}",
today = today
"-(1 + cast({today}-due+0.001 as real)/ivl) {order}"
);
&temp_string
}
@ -844,7 +843,7 @@ impl fmt::Display for ReviewOrderSubclause {
ReviewOrderSubclause::Added => "nid asc, ord asc",
ReviewOrderSubclause::ReverseAdded => "nid desc, ord asc",
};
write!(f, "{}", clause)
write!(f, "{clause}")
}
}

View file

@ -33,7 +33,7 @@ fn row_to_deck(row: &Row) -> Result<Deck> {
common,
kind: kind.kind.ok_or_else(|| {
AnkiError::db_error(
format!("invalid deck kind: {}", id),
format!("invalid deck kind: {id}"),
DbErrorKind::MissingEntity,
)
})?,
@ -347,8 +347,8 @@ impl SqliteStorage {
))?;
let top = current.name.as_native_str();
let prefix_start = &format!("{}\x1f", top);
let prefix_end = &format!("{}\x20", top);
let prefix_start = &format!("{top}\x1f");
let prefix_end = &format!("{top}\x20");
self.db
.prepare_cached(include_str!("update_active.sql"))?
@ -379,7 +379,7 @@ impl SqliteStorage {
let decks = self
.get_schema11_decks()
.map_err(|e| AnkiError::JsonError {
info: format!("decoding decks: {}", e),
info: format!("decoding decks: {e}"),
})?;
let mut names = HashSet::new();
for (_id, deck) in decks {

View file

@ -197,7 +197,7 @@ impl SqliteStorage {
serde_json::from_value(conf)
})
.map_err(|e| AnkiError::JsonError {
info: format!("decoding deck config: {}", e),
info: format!("decoding deck config: {e}"),
})
})?;
for (id, mut conf) in conf.into_iter() {

View file

@ -52,7 +52,7 @@ where
{
let mut trailing_sep = false;
for id in ids {
write!(buf, "{},", id).unwrap();
write!(buf, "{id},").unwrap();
trailing_sep = true;
}
if trailing_sep {

View file

@ -345,7 +345,7 @@ impl SqliteStorage {
let nts = self
.get_schema11_notetypes()
.map_err(|e| AnkiError::JsonError {
info: format!("decoding models: {:?}", e),
info: format!("decoding models: {e:?}"),
})?;
let mut names = HashSet::new();
for (mut ntid, nt) in nts {

View file

@ -587,7 +587,7 @@ impl SqliteStorage {
}) {
Ok(corrupt) => corrupt,
Err(e) => {
println!("error: {:?}", e);
println!("error: {e:?}");
true
}
}

View file

@ -54,7 +54,7 @@ impl SqliteStorage {
if let Some(new_usn) = server_usn_if_client {
let mut stmt = self
.db
.prepare_cached(&format!("update {} set usn=? where id=?", table))?;
.prepare_cached(&format!("update {table} set usn=? where id=?"))?;
for id in ids {
stmt.execute(params![new_usn, id])?;
}

View file

@ -11,7 +11,7 @@ impl SqliteStorage {
fn table_has_usn(&self, table: &str) -> Result<bool> {
Ok(self
.db
.prepare(&format!("select null from {} where usn=-1", table))?
.prepare(&format!("select null from {table} where usn=-1"))?
.query([])?
.next()?
.is_some())
@ -19,7 +19,7 @@ impl SqliteStorage {
fn table_count(&self, table: &str) -> Result<u32> {
self.db
.query_row(&format!("select count() from {}", table), [], |r| r.get(0))
.query_row(&format!("select count() from {table}"), [], |r| r.get(0))
.map_err(Into::into)
}
@ -36,7 +36,7 @@ impl SqliteStorage {
] {
if self.table_has_usn(table)? {
return Err(AnkiError::sync_error(
format!("table had usn=-1: {}", table),
format!("table had usn=-1: {table}"),
SyncErrorKind::Other,
));
}

View file

@ -100,7 +100,7 @@ where
_lock = LOCK.lock().await;
endpoint
} else {
format!("http://{}/", addr)
format!("http://{addr}/")
};
let endpoint = Url::try_from(endpoint.as_str()).unwrap();
let auth = SyncAuth {
@ -734,7 +734,7 @@ async fn regular_sync(ctx: &SyncTestContext) -> Result<()> {
for table in &["cards", "notes", "decks"] {
assert_eq!(
col1.storage
.db_scalar::<u8>(&format!("select count() from {}", table))?,
.db_scalar::<u8>(&format!("select count() from {table}"))?,
2
);
}
@ -754,7 +754,7 @@ async fn regular_sync(ctx: &SyncTestContext) -> Result<()> {
for table in &["cards", "notes", "decks"] {
assert_eq!(
col2.storage
.db_scalar::<u8>(&format!("select count() from {}", table))?,
.db_scalar::<u8>(&format!("select count() from {table}"))?,
1
);
}

View file

@ -285,7 +285,7 @@ fn row_to_name_and_checksum(row: &Row) -> error::Result<(String, Sha1Hash)> {
fn trace(event: rusqlite::trace::TraceEvent) {
if let rusqlite::trace::TraceEvent::Stmt(_, sql) = event {
println!("sql: {}", sql);
println!("sql: {sql}");
}
}

View file

@ -35,7 +35,7 @@ impl Collection {
};
if !match_case {
search = format!("(?i){}", search).into();
search = format!("(?i){search}").into();
}
self.transact(Op::UpdateTag, |col| {

View file

@ -33,7 +33,7 @@ impl TagMatcher {
(?:^|\ )
# 1: the tag prefix
(
{}
{tags}
)
(?:
# 2: an optional child separator
@ -41,8 +41,7 @@ impl TagMatcher {
# or a space/end of string the end of the string
|\ |$
)
"#,
tags
"#
))?;
Ok(Self {
@ -61,7 +60,7 @@ impl TagMatcher {
let out = self.regex.replace(tag, |caps: &Captures| {
// if we captured the child separator, add it to the replacement
if caps.get(2).is_some() {
Cow::Owned(format!("{}::", replacement))
Cow::Owned(format!("{replacement}::"))
} else {
Cow::Borrowed(replacement)
}
@ -92,7 +91,7 @@ impl TagMatcher {
let replacement = replacer(caps.get(1).unwrap().as_str());
// if we captured the child separator, add it to the replacement
if caps.get(2).is_some() {
format!("{}::", replacement)
format!("{replacement}::")
} else {
replacement
}

View file

@ -109,7 +109,7 @@ fn reparented_name(existing_name: &str, new_parent: Option<&str>) -> Option<Stri
None
} else {
// foo::bar onto baz -> baz::bar
let new_name = format!("{}::{}", new_parent, existing_base);
let new_name = format!("{new_parent}::{existing_base}");
if new_name != existing_name {
Some(new_name)
} else {

View file

@ -265,10 +265,8 @@ fn template_error_to_anki_error(
};
let details = htmlescape::encode_minimal(&localized_template_error(tr, err));
let more_info = tr.card_template_rendering_more_info();
let source = format!(
"{}<br>{}<br><a href='{}'>{}</a>",
header, details, TEMPLATE_ERROR_LINK, more_info
);
let source =
format!("{header}<br>{details}<br><a href='{TEMPLATE_ERROR_LINK}'>{more_info}</a>");
AnkiError::TemplateError { info: source }
}
@ -279,32 +277,29 @@ fn localized_template_error(tr: &I18n, err: TemplateError) -> String {
.card_template_rendering_no_closing_brackets("}}", tag)
.into(),
TemplateError::ConditionalNotClosed(tag) => tr
.card_template_rendering_conditional_not_closed(format!("{{{{/{}}}}}", tag))
.card_template_rendering_conditional_not_closed(format!("{{{{/{tag}}}}}"))
.into(),
TemplateError::ConditionalNotOpen {
closed,
currently_open,
} => if let Some(open) = currently_open {
tr.card_template_rendering_wrong_conditional_closed(
format!("{{{{/{}}}}}", closed),
format!("{{{{/{}}}}}", open),
format!("{{{{/{closed}}}}}"),
format!("{{{{/{open}}}}}"),
)
} else {
tr.card_template_rendering_conditional_not_open(
format!("{{{{/{}}}}}", closed),
format!("{{{{#{}}}}}", closed),
format!("{{{{^{}}}}}", closed),
format!("{{{{/{closed}}}}}"),
format!("{{{{#{closed}}}}}"),
format!("{{{{^{closed}}}}}"),
)
}
.into(),
TemplateError::FieldNotFound { field, filters } => tr
.card_template_rendering_no_such_field(format!("{{{{{}{}}}}}", filters, field), field)
.card_template_rendering_no_such_field(format!("{{{{{filters}{field}}}}}"), field)
.into(),
TemplateError::NoSuchConditional(condition) => tr
.card_template_rendering_no_such_field(
format!("{{{{{}}}}}", condition),
&condition[1..],
)
.card_template_rendering_no_such_field(format!("{{{{{condition}}}}}"), &condition[1..])
.into(),
}
}
@ -523,10 +518,7 @@ impl RenderContext<'_> {
Ok(false ^ negated)
} else {
let prefix = if negated { "^" } else { "#" };
Err(TemplateError::NoSuchConditional(format!(
"{}{}",
prefix, key
)))
Err(TemplateError::NoSuchConditional(format!("{prefix}{key}")))
}
}
}
@ -858,14 +850,14 @@ fn nodes_to_string(buf: &mut String, nodes: &[ParsedNode]) {
.unwrap();
}
ParsedNode::Conditional { key, children } => {
write!(buf, "{{{{#{}}}}}", key).unwrap();
write!(buf, "{{{{#{key}}}}}").unwrap();
nodes_to_string(buf, children);
write!(buf, "{{{{/{}}}}}", key).unwrap();
write!(buf, "{{{{/{key}}}}}").unwrap();
}
ParsedNode::NegatedConditional { key, children } => {
write!(buf, "{{{{^{}}}}}", key).unwrap();
write!(buf, "{{{{^{key}}}}}").unwrap();
nodes_to_string(buf, children);
write!(buf, "{{{{/{}}}}}", key).unwrap();
write!(buf, "{{{{/{key}}}}}").unwrap();
}
}
}

View file

@ -165,15 +165,15 @@ fn furigana_filter(text: &str) -> Cow<str> {
/// convert to [[type:...]] for the gui code to process
fn type_filter<'a>(field_name: &str) -> Cow<'a, str> {
format!("[[type:{}]]", field_name).into()
format!("[[type:{field_name}]]").into()
}
fn type_cloze_filter<'a>(field_name: &str) -> Cow<'a, str> {
format!("[[type:cloze:{}]]", field_name).into()
format!("[[type:cloze:{field_name}]]").into()
}
fn type_nc_filter<'a>(field_name: &str) -> Cow<'a, str> {
format!("[[type:nc:{}]]", field_name).into()
format!("[[type:nc:{field_name}]]").into()
}
fn hint_filter<'a>(text: &'a str, field_name: &str) -> Cow<'a, str> {
@ -191,18 +191,17 @@ fn hint_filter<'a>(text: &'a str, field_name: &str) -> Cow<'a, str> {
r##"
<a class=hint href="#"
onclick="this.style.display='none';
document.getElementById('hint{}').style.display='block';
document.getElementById('hint{id}').style.display='block';
return false;" draggable=false>
{}</a>
<div id="hint{}" class=hint style="display: none">{}</div>
"##,
id, field_name, id, text
{field_name}</a>
<div id="hint{id}" class=hint style="display: none">{text}</div>
"##
)
.into()
}
fn tts_filter(options: &str, text: &str) -> String {
format!("[anki:tts lang={}]{}[/anki:tts]", options, text)
format!("[anki:tts lang={options}]{text}[/anki:tts]")
}
// Tests

View file

@ -484,7 +484,7 @@ pub(crate) fn to_custom_re<'a>(txt: &'a str, wildcard: &str) -> Cow<'a, str> {
match s {
r"\\" | r"\*" => s.to_string(),
r"\_" => "_".to_string(),
"*" => format!("{}*", wildcard),
"*" => format!("{wildcard}*"),
"_" => wildcard.to_string(),
s => regex::escape(s),
}

View file

@ -1,3 +1,3 @@
[toolchain]
# older versions may fail to compile; newer versions may fail the clippy tests
channel = "1.87.0"
channel = "1.88.0"

View file

@ -108,7 +108,7 @@ impl LintContext {
LazyCell::force(&self.unstaged_changes);
fix_copyright(path)?;
} else {
println!("missing standard copyright header: {:?}", path);
println!("missing standard copyright header: {path:?}");
self.found_problems = true;
}
}
@ -241,7 +241,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
.write(true)
.open(path)
.with_context(|| format!("opening {path}"))?;
write!(file, "{}{}", header, data).with_context(|| format!("writing {path}"))?;
write!(file, "{header}{data}").with_context(|| format!("writing {path}"))?;
Ok(())
}

View file

@ -34,5 +34,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
<style lang="scss">
.collapse-label {
cursor: pointer;
user-select: none;
}
</style>

View file

@ -6,9 +6,3 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
<span class="label-name">
<slot />
</span>
<style lang="scss">
.label-name {
user-select: none;
}
</style>