diff --git a/.cargo/config.toml b/.cargo/config.toml index 67f0dea34..3fbb3be1b 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -5,7 +5,8 @@ DESCRIPTORS_BIN = { value = "out/rslib/proto/descriptors.bin", relative = true } # build script will append .exe if necessary PROTOC = { value = "out/extracted/protoc/bin/protoc", relative = true } PYO3_NO_PYTHON = "1" -MACOSX_DEPLOYMENT_TARGET = "10.13.4" +MACOSX_DEPLOYMENT_TARGET = "11" +PYTHONDONTWRITEBYTECODE = "1" # prevent junk files on Windows [term] color = "always" diff --git a/.deny.toml b/.deny.toml index 8a379fa55..7cdf0cf99 100644 --- a/.deny.toml +++ b/.deny.toml @@ -5,9 +5,6 @@ db-path = "~/.cargo/advisory-db" db-urls = ["https://github.com/rustsec/advisory-db"] ignore = [ - # pyoxidizer is stuck on an old ring version - "RUSTSEC-2025-0009", - "RUSTSEC-2025-0010", # burn depends on an unmaintained package 'paste' "RUSTSEC-2024-0436", ] @@ -17,12 +14,11 @@ allow = [ "MIT", "Apache-2.0", "Apache-2.0 WITH LLVM-exception", + "CDLA-Permissive-2.0", "ISC", "MPL-2.0", - "Unicode-DFS-2016", "BSD-2-Clause", "BSD-3-Clause", - "OpenSSL", "CC0-1.0", "Unlicense", "Zlib", diff --git a/.dprint.json b/.dprint.json index 8e9f19b40..4230cdcd6 100644 --- a/.dprint.json +++ b/.dprint.json @@ -20,7 +20,6 @@ "ftl/usage", "licenses.json", ".dmypy.json", - "qt/bundle/PyOxidizer", "target", ".mypy_cache", "extra", diff --git a/.gitignore b/.gitignore index 91a949329..ccac21aa2 100644 --- a/.gitignore +++ b/.gitignore @@ -18,3 +18,5 @@ node_modules yarn-error.log ts/.svelte-kit .yarn +.claude/settings.local.json +.claude/user.md diff --git a/.gitmodules b/.gitmodules index 90cec9ca9..50b5aa9f3 100644 --- a/.gitmodules +++ b/.gitmodules @@ -6,9 +6,3 @@ path = ftl/qt-repo url = https://github.com/ankitects/anki-desktop-ftl.git shallow = true -[submodule "qt/bundle/PyOxidizer"] - path = qt/bundle/PyOxidizer - url = https://github.com/ankitects/PyOxidizer.git - shallow = true - update = none - diff --git a/.isort.cfg b/.isort.cfg deleted file mode 100644 index 109f5c21e..000000000 --- a/.isort.cfg +++ /dev/null @@ -1,5 +0,0 @@ -[settings] -py_version=39 -known_first_party=anki,aqt,tests -profile=black -extend_skip=qt/bundle diff --git a/.mypy.ini b/.mypy.ini index 648c6a6ea..9fb8d3689 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -18,7 +18,7 @@ mypy_path = ftl, pylib/tools, python -exclude = (qt/bundle/PyOxidizer|pylib/anki/_vendor) +exclude = (pylib/anki/_vendor) [mypy-anki.*] disallow_untyped_defs = True @@ -165,3 +165,5 @@ ignore_missing_imports = True ignore_missing_imports = True [mypy-pip_system_certs.*] ignore_missing_imports = True +[mypy-anki_audio] +ignore_missing_imports = True diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index 2413cc6c4..000000000 --- a/.pylintrc +++ /dev/null @@ -1,48 +0,0 @@ -[MASTER] -ignore-patterns=.*_pb2.* -persistent = no -extension-pkg-whitelist=orjson,PyQt6 -init-hook="import sys; sys.path.extend(['pylib/anki/_vendor', 'out/qt'])" - -[REPORTS] -output-format=colorized - -[MESSAGES CONTROL] -disable= - R, - line-too-long, - too-many-lines, - missing-function-docstring, - missing-module-docstring, - missing-class-docstring, - import-outside-toplevel, - wrong-import-position, - wrong-import-order, - fixme, - unused-wildcard-import, - attribute-defined-outside-init, - redefined-builtin, - wildcard-import, - broad-except, - bare-except, - unused-argument, - unused-variable, - redefined-outer-name, - global-statement, - protected-access, - arguments-differ, - arguments-renamed, - consider-using-f-string, - invalid-name, - broad-exception-raised - -[BASIC] -good-names = - id, - tr, - db, - ok, - ip, - -[IMPORTS] -ignored-modules = anki.*_pb2, anki.sync_pb2, win32file,pywintypes,socket,win32pipe,pyaudio,anki.scheduler_pb2,anki.notetypes_pb2 diff --git a/.python-version b/.python-version new file mode 100644 index 000000000..86f8c02eb --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.13.5 diff --git a/.ruff.toml b/.ruff.toml index 498ecbdac..4fa1ffea6 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -1,2 +1,91 @@ -target-version = "py39" -extend-exclude = ["qt/bundle"] +lint.select = [ + "E", # pycodestyle errors + "F", # Pyflakes errors + "PL", # Pylint rules + "I", # Isort rules + "ARG", + # "UP", # pyupgrade + # "B", # flake8-bugbear + # "SIM", # flake8-simplify +] + +extend-exclude = ["*_pb2.py", "*_pb2.pyi"] + +lint.ignore = [ + # Docstring rules (missing-*-docstring in pylint) + "D100", # Missing docstring in public module + "D101", # Missing docstring in public class + "D103", # Missing docstring in public function + + # Import rules (wrong-import-* in pylint) + "E402", # Module level import not at top of file + "E501", # Line too long + + # pycodestyle rules + "E741", # ambiguous-variable-name + + # Comment rules (fixme in pylint) + "FIX002", # Line contains TODO + + # Pyflakes rules + "F402", # import-shadowed-by-loop-var + "F403", # undefined-local-with-import-star + "F405", # undefined-local-with-import-star-usage + + # Naming rules (invalid-name in pylint) + "N801", # Class name should use CapWords convention + "N802", # Function name should be lowercase + "N803", # Argument name should be lowercase + "N806", # Variable in function should be lowercase + "N811", # Constant imported as non-constant + "N812", # Lowercase imported as non-lowercase + "N813", # Camelcase imported as lowercase + "N814", # Camelcase imported as constant + "N815", # Variable in class scope should not be mixedCase + "N816", # Variable in global scope should not be mixedCase + "N817", # CamelCase imported as acronym + "N818", # Error suffix in exception names + + # Pylint rules + "PLW0603", # global-statement + "PLW2901", # redefined-loop-name + "PLC0415", # import-outside-top-level + "PLR2004", # magic-value-comparison + + # Exception handling (broad-except, bare-except in pylint) + "BLE001", # Do not catch blind exception + + # Argument rules (unused-argument in pylint) + "ARG001", # Unused function argument + "ARG002", # Unused method argument + "ARG005", # Unused lambda argument + + # Access rules (protected-access in pylint) + "SLF001", # Private member accessed + + # String formatting (consider-using-f-string in pylint) + "UP032", # Use f-string instead of format call + + # Exception rules (broad-exception-raised in pylint) + "TRY301", # Abstract raise to an inner function + + # Builtin shadowing (redefined-builtin in pylint) + "A001", # Variable shadows a Python builtin + "A002", # Argument shadows a Python builtin + "A003", # Class attribute shadows a Python builtin +] + +[lint.per-file-ignores] +"**/anki/*_pb2.py" = ["ALL"] + +[lint.pep8-naming] +ignore-names = ["id", "tr", "db", "ok", "ip"] + +[lint.pylint] +max-args = 12 +max-returns = 10 +max-branches = 35 +max-statements = 125 + +[lint.isort] +known-first-party = ["anki", "aqt", "tests"] diff --git a/.version b/.version index 9bab2a4b4..6ee14a7b9 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -25.06 +25.07.1 diff --git a/.vscode.dist/extensions.json b/.vscode.dist/extensions.json index bb449cc57..eb13662d6 100644 --- a/.vscode.dist/extensions.json +++ b/.vscode.dist/extensions.json @@ -2,7 +2,7 @@ "recommendations": [ "dprint.dprint", "ms-python.python", - "ms-python.black-formatter", + "charliermarsh.ruff", "rust-lang.rust-analyzer", "svelte.svelte-vscode", "zxh404.vscode-proto3", diff --git a/.vscode.dist/settings.json b/.vscode.dist/settings.json index ffac17cae..0da294c38 100644 --- a/.vscode.dist/settings.json +++ b/.vscode.dist/settings.json @@ -18,7 +18,7 @@ "out/qt", "qt" ], - "python.formatting.provider": "black", + "python.formatting.provider": "charliermarsh.ruff", "python.linting.mypyEnabled": false, "python.analysis.diagnosticSeverityOverrides": { "reportMissingModuleSource": "none" @@ -31,11 +31,13 @@ "rust-analyzer.rustfmt.extraArgs": ["+nightly"], "search.exclude": { "**/node_modules": true, - ".bazel/**": true, - "qt/bundle/PyOxidizer": true + ".bazel/**": true }, "rust-analyzer.cargo.buildScripts.enable": true, "python.analysis.typeCheckingMode": "off", + "python.analysis.exclude": [ + "out/launcher/**" + ], "terminal.integrated.env.windows": { "PATH": "c:\\msys64\\usr\\bin;${env:Path}" } diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 000000000..fa58b805b --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,86 @@ +# Claude Code Configuration + +## Project Overview + +Anki is a spaced repetition flashcard program with a multi-layered architecture. Main components: + +- Web frontend: Svelte/TypeScript in ts/ +- PyQt GUI, which embeds the web components in aqt/ +- Python library which wraps our rust Layer (pylib/, with Rust module in pylib/rsbridge) +- Core Rust layer in rslib/ +- Protobuf definitions in proto/ that are used by the different layers to + talk to each other. + +## Building/checking + +./check (check.bat) will format the code and run the main build & checks. +Please do this as a final step before marking a task as completed. + +## Quick iteration + +During development, you can build/check subsections of our code: + +- Rust: 'cargo check' +- Python: './tools/dmypy', and if wheel-related, './ninja wheels' +- TypeScript/Svelte: './ninja check:svelte' + +Be mindful that some changes (such as modifications to .proto files) may +need a full build with './check' first. + +## Build tooling + +'./check' and './ninja' invoke our build system, which is implemented in build/. It takes care of downloading required deps and invoking our build +steps. + +## Translations + +ftl/ contains our Fluent translation files. We have scripts in rslib/i18n +to auto-generate an API for Rust, TypeScript and Python so that our code can +access the translations in a type-safe manner. Changes should be made to +ftl/core or ftl/qt. Except for features specific to our Qt interface, prefer +the core module. When adding new strings, confirm the appropriate ftl file +first, and try to match the existing style. + +## Protobuf and IPC + +Our build scripts use the .proto files to define our Rust library's +non-Rust API. pylib/rsbridge exposes that API, and _backend.py exposes +snake_case methods for each protobuf RPC that call into the API. +Similar tooling creates a @generated/backend TypeScript module for +communicating with the Rust backend (which happens over POST requests). + +## Fixing errors + +When dealing with build errors or failing tests, invoke 'check' or one +of the quick iteration commands regularly. This helps verify your changes +are correct. To locate other instances of a problem, run the check again - +don't attempt to grep the codebase. + +## Ignores + +The files in out/ are auto-generated. Mostly you should ignore that folder, +though you may sometimes find it useful to view out/{pylib/anki,qt/_aqt,ts/lib/generated} when dealing with cross-language communication or our other generated sourcecode. + +## Launcher/installer + +The code for our launcher is in qt/launcher, with separate code for each +platform. + +## Rust dependencies + +Prefer adding to the root workspace, and using dep.workspace = true in the individual Rust project. + +## Rust utilities + +rslib/{process,io} contain some helpers for file and process operations, +which provide better error messages/context and some ergonomics. Use them +when possible. + +## Rust error handling + +in rslib, use error/mod.rs's AnkiError/Result and snafu. In our other Rust modules, prefer anyhow + additional context where appropriate. Unwrapping +in build scripts/tests is fine. + +## Individual preferences + +See @.claude/user.md diff --git a/CONTRIBUTORS b/CONTRIBUTORS index d334540fb..327e37f27 100644 --- a/CONTRIBUTORS +++ b/CONTRIBUTORS @@ -63,6 +63,7 @@ Jakub Kaczmarzyk Akshara Balachandra lukkea David Allison +David Allison <62114487+david-allison@users.noreply.github.com> Tsung-Han Yu Piotr Kubowicz RumovZ @@ -232,6 +233,7 @@ Spiritual Father Emmanuel Ferdman Sunong2008 Marvin Kopf +Kevin Nakamura ******************** The text of the 3 clause BSD license follows: diff --git a/Cargo.lock b/Cargo.lock index 28073f3ac..26006790b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13,20 +13,20 @@ dependencies = [ [[package]] name = "adler2" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "ahash" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" dependencies = [ "cfg-if", "once_cell", "version_check", - "zerocopy 0.7.35", + "zerocopy", ] [[package]] @@ -94,6 +94,7 @@ dependencies = [ "axum", "axum-client-ip", "axum-extra", + "bitflags 2.9.1", "blake3", "bytes", "chrono", @@ -103,7 +104,7 @@ dependencies = [ "csv", "data-encoding", "difflib", - "dirs 5.0.1", + "dirs 6.0.0", "envy", "flate2", "fluent", @@ -116,8 +117,8 @@ dependencies = [ "hyper 1.6.0", "id_tree", "inflections", - "itertools 0.13.0", - "nom", + "itertools 0.14.0", + "nom 8.0.0", "num_cpus", "num_enum", "once_cell", @@ -128,10 +129,10 @@ dependencies = [ "prettyplease", "prost", "prost-reflect", - "pulldown-cmark 0.9.6", - "rand 0.8.5", + "pulldown-cmark 0.13.0", + "rand 0.9.1", "regex", - "reqwest 0.12.15", + "reqwest 0.12.20", "rusqlite", "rustls-pemfile 2.2.0", "scopeguard", @@ -142,8 +143,8 @@ dependencies = [ "serde_tuple", "sha1", "snafu", - "strum 0.26.3", - "syn 2.0.101", + "strum 0.27.1", + "syn 2.0.103", "tempfile", "tokio", "tokio-util", @@ -154,9 +155,9 @@ dependencies = [ "unic-ucd-category", "unicase", "unicode-normalization", - "windows 0.56.0", + "windows 0.61.3", "wiremock", - "zip 0.6.6", + "zip 4.1.0", "zstd", ] @@ -178,7 +179,7 @@ dependencies = [ "fluent-syntax", "inflections", "intl-memoizer", - "itertools 0.13.0", + "itertools 0.14.0", "num-format", "phf 0.11.3", "serde", @@ -199,7 +200,7 @@ dependencies = [ name = "anki_process" version = "0.0.0" dependencies = [ - "itertools 0.13.0", + "itertools 0.14.0", "snafu", ] @@ -211,14 +212,14 @@ dependencies = [ "anki_proto_gen", "anyhow", "inflections", - "itertools 0.13.0", + "itertools 0.14.0", "prost", "prost-build", "prost-reflect", "prost-types", "serde", "snafu", - "strum 0.26.3", + "strum 0.27.1", ] [[package]] @@ -229,7 +230,7 @@ dependencies = [ "anyhow", "camino", "inflections", - "itertools 0.13.0", + "itertools 0.14.0", "prost-reflect", "prost-types", "regex", @@ -238,9 +239,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.18" +version = "0.6.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +checksum = "301af1932e46185686725e0fad2f8f2aa7da69dd70bf6ecc44d6b703844a3933" dependencies = [ "anstyle", "anstyle-parse", @@ -253,36 +254,36 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" +checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" [[package]] name = "anstyle-parse" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +checksum = "6c8bdeb6047d8983be085bab0ba1472e6dc604e7041dbf6fcd5e71523014fae9" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.7" +version = "3.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" +checksum = "403f75924867bb1033c59fbf0797484329750cfbe3c4325cd33127941fabc882" dependencies = [ "anstyle", - "once_cell", + "once_cell_polyfill", "windows-sys 0.59.0", ] @@ -292,18 +293,6 @@ version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" -[[package]] -name = "apple-bundles" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "716b8a7bacf7325eb3e7a1a7f5ead4da91e1e16d9b56a25edea0e1e4ba21fd8e" -dependencies = [ - "anyhow", - "plist", - "simple-file-manifest", - "walkdir", -] - [[package]] name = "arbitrary" version = "1.4.1" @@ -358,9 +347,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.23" +version = "0.4.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b37fc50485c4f3f736a4fb14199f6d5f5ba008d7f28fe710306c92780f004c07" +checksum = "d615619615a650c571269c00dca41db04b9210037fa76ed8239f70404ab56985" dependencies = [ "futures-core", "memchr", @@ -389,7 +378,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -400,7 +389,7 @@ checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -423,14 +412,14 @@ checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "axum" -version = "0.7.9" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" +checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5" dependencies = [ - "async-trait", "axum-core", "axum-macros", "bytes", + "form_urlencoded", "futures-util", "http 1.3.1", "http-body 1.0.1", @@ -459,24 +448,23 @@ dependencies = [ [[package]] name = "axum-client-ip" -version = "0.6.1" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9eefda7e2b27e1bda4d6fa8a06b50803b8793769045918bc37ad062d48a6efac" +checksum = "3f08a543641554404b42acd0d2494df12ca2be034d7b8ee4dbbf7446f940a2ef" dependencies = [ "axum", - "forwarded-header-value", + "client-ip", "serde", ] [[package]] name = "axum-core" -version = "0.4.5" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6" dependencies = [ - "async-trait", "bytes", - "futures-util", + "futures-core", "http 1.3.1", "http-body 1.0.1", "http-body-util", @@ -491,22 +479,21 @@ dependencies = [ [[package]] name = "axum-extra" -version = "0.9.6" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c794b30c904f0a1c2fb7740f7df7f7972dfaa14ef6f57cb6178dc63e5dca2f04" +checksum = "45bf463831f5131b7d3c756525b305d40f1185b688565648a92e1392ca35713d" dependencies = [ "axum", "axum-core", "bytes", - "fastrand", "futures-util", - "headers 0.4.0", + "headers 0.4.1", "http 1.3.1", "http-body 1.0.1", "http-body-util", "mime", - "multer", "pin-project-lite", + "rustversion", "serde", "tower", "tower-layer", @@ -515,20 +502,20 @@ dependencies = [ [[package]] name = "axum-macros" -version = "0.4.2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57d123550fa8d071b7255cb0cc04dc302baa6c8c4a79f55701552684d8399bce" +checksum = "604fde5e028fea851ce1d8570bbdc034bec850d157f7569d10f347d06808c05c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] name = "backtrace" -version = "0.3.74" +version = "0.3.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" dependencies = [ "addr2line", "cfg-if", @@ -539,12 +526,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - [[package]] name = "base64" version = "0.21.7" @@ -559,9 +540,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" -version = "1.7.3" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89e25b6adfb930f02d1981565a6e5d9c547ac15a96606256d3b59040e5cd4ca3" +checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" [[package]] name = "bincode" @@ -596,9 +577,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" +checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" dependencies = [ "serde", ] @@ -631,15 +612,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "block-padding" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" -dependencies = [ - "generic-array", -] - [[package]] name = "bstr" version = "1.12.0" @@ -653,9 +625,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.17.0" +version = "3.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" +checksum = "793db76d6187cd04dff33004d8e6c9cc4e05cd330500379d2394209271b4aeee" [[package]] name = "burn" @@ -683,7 +655,7 @@ dependencies = [ "burn-common", "burn-tensor", "derive-new 0.7.0", - "hashbrown 0.15.2", + "hashbrown 0.15.4", "log", "num-traits", "portable-atomic", @@ -729,7 +701,7 @@ dependencies = [ "derive-new 0.7.0", "flate2", "half", - "hashbrown 0.15.2", + "hashbrown 0.15.4", "log", "num-traits", "portable-atomic-util", @@ -756,7 +728,7 @@ dependencies = [ "derive-new 0.7.0", "futures-lite", "half", - "hashbrown 0.15.2", + "hashbrown 0.15.4", "log", "num-traits", "rand 0.9.1", @@ -808,7 +780,7 @@ dependencies = [ "derive-new 0.7.0", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -818,7 +790,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d63629f2c8b82ee52dbb9c18becded5117c2faf57365dc271a55c16d139cd91a" dependencies = [ "burn-tensor", - "hashbrown 0.15.2", + "hashbrown 0.15.4", "portable-atomic-util", "serde", ] @@ -873,7 +845,7 @@ dependencies = [ "burn-common", "burn-ir", "burn-tensor", - "hashbrown 0.15.2", + "hashbrown 0.15.4", "log", "spin 0.10.0", ] @@ -890,10 +862,10 @@ dependencies = [ "cubecl", "derive-new 0.7.0", "half", - "hashbrown 0.15.2", + "hashbrown 0.15.4", "num-traits", "rand 0.9.1", - "rand_distr 0.5.1", + "rand_distr", "serde", "serde_bytes", ] @@ -932,9 +904,9 @@ dependencies = [ [[package]] name = "bytemuck" -version = "1.22.0" +version = "1.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6b1fc10dbac614ebc03540c9dbd60e83887fda27794998c6528f1782047d540" +checksum = "5c76a5792e44e4abe34d3abf15636779261d45a7450612059293d1d2cfc63422" dependencies = [ "bytemuck_derive", ] @@ -947,7 +919,7 @@ checksum = "7ecc273b49b3205b83d648f0690daa588925572cc5063745bfe547fe7ec8e1a1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -970,9 +942,9 @@ checksum = "2e93abca9e28e0a1b9877922aacb20576e05d4679ffa78c3d6dc22a26a216659" [[package]] name = "camino" -version = "1.1.9" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" +checksum = "0da45bc31171d8d6960122e222a67740df867c1dd53b4d51caa297084c185cab" [[package]] name = "candle-core" @@ -987,12 +959,12 @@ dependencies = [ "num-traits", "num_cpus", "rand 0.9.1", - "rand_distr 0.5.1", + "rand_distr", "rayon", "safetensors", "thiserror 1.0.69", "ug", - "yoke", + "yoke 0.7.5", "zip 1.1.4", ] @@ -1002,20 +974,11 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" -[[package]] -name = "cbc" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6" -dependencies = [ - "cipher", -] - [[package]] name = "cc" -version = "1.2.20" +version = "1.2.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04da6a0d40b948dfc4fa8f5bbf402b0fc1a64a28dbf7d12ffd683550f2c1b63a" +checksum = "d487aa071b5f64da6f19a3e848e3578944b726ee5a4854b82172f02aa876bfdc" dependencies = [ "jobserver", "libc", @@ -1024,9 +987,9 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" [[package]] name = "cfg_aliases" @@ -1036,9 +999,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.40" +version = "0.4.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" +checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" dependencies = [ "android-tzdata", "iana-time-zone", @@ -1073,21 +1036,11 @@ dependencies = [ "half", ] -[[package]] -name = "cipher" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" -dependencies = [ - "crypto-common", - "inout", -] - [[package]] name = "clap" -version = "4.5.37" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eccb054f56cbd38340b380d4a8e69ef1f02f1af43db2f0cc817a4774d80ae071" +checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f" dependencies = [ "clap_builder", "clap_derive", @@ -1095,9 +1048,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.37" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efd9466fac8543255d3b1fcad4762c5e116ffe808c8a3043d4263cd4fd4862a2" +checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e" dependencies = [ "anstream", "anstyle", @@ -1108,30 +1061,39 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.47" +version = "4.5.54" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06f5378ea264ad4f82bbc826628b5aad714a75abf6ece087e923010eb937fb6" +checksum = "aad5b1b4de04fead402672b48897030eec1f3bfe1550776322f59f6d6e6a5677" dependencies = [ "clap", ] [[package]] name = "clap_derive" -version = "4.5.32" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" +checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] name = "clap_lex" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" +checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" + +[[package]] +name = "client-ip" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31211fc26899744f5b22521fdc971e5f3875991d8880537537470685a0e9552d" +dependencies = [ + "http 1.3.1", +] [[package]] name = "coarsetime" @@ -1173,14 +1135,14 @@ checksum = "fe6d2e5af09e8c8ad56c969f2157a3d4238cebc7c55f0a517728c38f7b200f81" dependencies = [ "serde", "termcolor", - "unicode-width 0.2.0", + "unicode-width 0.2.1", ] [[package]] name = "colorchoice" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" [[package]] name = "colored" @@ -1205,7 +1167,7 @@ name = "configure" version = "0.0.0" dependencies = [ "anyhow", - "itertools 0.13.0", + "itertools 0.14.0", "ninja_gen", ] @@ -1217,9 +1179,9 @@ checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" [[package]] name = "convert_case" -version = "0.6.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f" dependencies = [ "unicode-segmentation", ] @@ -1236,9 +1198,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" dependencies = [ "core-foundation-sys", "libc", @@ -1281,25 +1243,22 @@ dependencies = [ [[package]] name = "criterion" -version = "0.5.1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" +checksum = "3bf7af66b0989381bd0be551bd7cc91912a655a58c6918420c9527b1fd8b4679" dependencies = [ "anes", "cast", "ciborium", "clap", "criterion-plot", - "is-terminal", - "itertools 0.10.5", + "itertools 0.13.0", "num-traits", - "once_cell", "oorandom", "plotters", "rayon", "regex", "serde", - "serde_derive", "serde_json", "tinytemplate", "walkdir", @@ -1385,7 +1344,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -1456,7 +1415,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b03bf4211cdbd68bb0fb8291e0ed825c13da0d1ac01b7c02dce3cee44a6138be" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "bytemuck", "cubecl-common", "cubecl-ir", @@ -1527,11 +1486,12 @@ dependencies = [ [[package]] name = "cubecl-hip-sys" -version = "6.4.0" +version = "6.4.4348200" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7557762176858fa0357504025f09ae6e979c3547776ff8b6a1025ef0702450" +checksum = "283fa7401056c53fb27e18f5d1806246bb5f937c4ecbd2453896f7a9ec495c73" dependencies = [ "libc", + "regex", ] [[package]] @@ -1582,7 +1542,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -1594,7 +1554,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -1696,7 +1656,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -1707,7 +1667,7 @@ checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -1716,17 +1676,6 @@ version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" -[[package]] -name = "dbus" -version = "0.9.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bb21987b9fb1613058ba3843121dd18b163b254d8a6e797e144cbac14d96d1b" -dependencies = [ - "libc", - "libdbus-sys", - "winapi", -] - [[package]] name = "deadpool" version = "0.10.0" @@ -1762,7 +1711,7 @@ checksum = "d150dea618e920167e5973d70ae6ece4385b7164e0d799fe7c122dd0a5d912ad" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -1773,7 +1722,7 @@ checksum = "2cdc8d50f426189eef89dac62fabfa0abb27d5cc008f25bf4156a0203325becc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -1784,7 +1733,7 @@ checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -1805,7 +1754,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -1815,7 +1764,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" dependencies = [ "derive_builder_core", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -1835,19 +1784,10 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", "unicode-xid", ] -[[package]] -name = "des" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdd80ce8ce993de27e9f063a444a4d53ce8e8db4c1f00cc03af5ad5a9867a1e" -dependencies = [ - "cipher", -] - [[package]] name = "difflib" version = "0.4.0" @@ -1904,7 +1844,7 @@ dependencies = [ "libc", "option-ext", "redox_users 0.5.0", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] @@ -1915,7 +1855,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -1942,18 +1882,6 @@ dependencies = [ "dtoa", ] -[[package]] -name = "duct" -version = "0.13.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4ab5718d1224b63252cd0c6f74f6480f9ffeb117438a2e0f5cf6d9a4798929c" -dependencies = [ - "libc", - "once_cell", - "os_pipe", - "shared_child", -] - [[package]] name = "dunce" version = "1.0.5" @@ -2003,6 +1931,20 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f878075b9794c1e4ac788c95b728f26aa6366d32eeb10c7051389f898f7d067" +[[package]] +name = "embed-resource" +version = "3.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0963f530273dc3022ab2bdc3fcd6d488e850256f2284a82b7413cb9481ee85dd" +dependencies = [ + "cc", + "memchr", + "rustc_version", + "toml 0.8.23", + "vswhom", + "winreg 0.55.0", +] + [[package]] name = "encoding_rs" version = "0.8.35" @@ -2021,7 +1963,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -2034,6 +1976,12 @@ dependencies = [ "regex", ] +[[package]] +name = "env_home" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7f84e12ccf0a7ddc17a6c41c93326024c42920d7ee630d04950e6926645c0fe" + [[package]] name = "env_logger" version = "0.11.8" @@ -2064,9 +2012,9 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.11" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" +checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18" dependencies = [ "libc", "windows-sys 0.59.0", @@ -2123,17 +2071,6 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "find-winsdk" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8cbf17b871570c1f8612b763bac3e86290602bcf5dc3c5ce657e0e1e9071d9e" -dependencies = [ - "serde", - "serde_derive", - "winreg 0.5.1", -] - [[package]] name = "fixedbitset" version = "0.5.7" @@ -2142,11 +2079,12 @@ checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" [[package]] name = "flate2" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" +checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" dependencies = [ "crc32fast", + "libz-rs-sys", "miniz_oxide", ] @@ -2158,9 +2096,9 @@ checksum = "8ce81f49ae8a0482e4c55ea62ebbd7e5a686af544c00b9d090bba3ff9be97b3d" [[package]] name = "fluent" -version = "0.16.1" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb74634707bebd0ce645a981148e8fb8c7bccd4c33c652aeffd28bf2f96d555a" +checksum = "8137a6d5a2c50d6b0ebfcb9aaa91a28154e0a70605f112d30cb0cd4a78670477" dependencies = [ "fluent-bundle", "unic-langid", @@ -2168,16 +2106,16 @@ dependencies = [ [[package]] name = "fluent-bundle" -version = "0.15.3" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe0a21ee80050c678013f82edf4b705fe2f26f1f9877593d13198612503f493" +checksum = "01203cb8918f5711e73891b347816d932046f95f54207710bda99beaeb423bf4" dependencies = [ "fluent-langneg", "fluent-syntax", "intl-memoizer", "intl_pluralrules", - "rustc-hash 1.1.0", - "self_cell 0.10.3", + "rustc-hash 2.1.1", + "self_cell", "smallvec", "unic-langid", ] @@ -2193,11 +2131,12 @@ dependencies = [ [[package]] name = "fluent-syntax" -version = "0.11.1" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a530c4694a6a8d528794ee9bbd8ba0122e779629ac908d15ad5a7ae7763a33d" +checksum = "54f0d287c53ffd184d04d8677f590f4ac5379785529e5e08b1c8083acdd5c198" dependencies = [ - "thiserror 1.0.69", + "memchr", + "thiserror 2.0.12", ] [[package]] @@ -2239,7 +2178,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -2263,26 +2202,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "forwarded-header-value" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8835f84f38484cc86f110a805655697908257fb9a7af005234060891557198e9" -dependencies = [ - "nonempty", - "thiserror 1.0.69", -] - -[[package]] -name = "fs2" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "fsevent-sys" version = "4.1.0" @@ -2294,16 +2213,16 @@ dependencies = [ [[package]] name = "fsrs" -version = "4.0.0" +version = "4.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba2a3f0432b200326eed062fdcf5b1cef82ab9ba6635021a45f355ccca187ca3" +checksum = "c1f3a8c3df2c324ebab71461178fe8c1fe2d7373cf603f312b652befd026f06d" dependencies = [ "burn", "itertools 0.14.0", "log", "ndarray", - "ndarray-rand", "priority-queue", + "rand 0.9.1", "rayon", "serde", "snafu", @@ -2320,7 +2239,7 @@ dependencies = [ "camino", "clap", "fluent-syntax", - "itertools 0.13.0", + "itertools 0.14.0", "regex", "serde_json", "snafu", @@ -2406,7 +2325,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -2579,7 +2498,7 @@ dependencies = [ "num-traits", "once_cell", "paste", - "pulp 0.21.4", + "pulp 0.21.5", "raw-cpuid 11.5.0", "rayon", "seq-macro", @@ -2694,11 +2613,11 @@ dependencies = [ [[package]] name = "getopts" -version = "0.2.21" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5" +checksum = "cba6ae63eb948698e300f645f87c70f76630d505f23b8907cf1e193ee85048c1" dependencies = [ - "unicode-width 0.1.14", + "unicode-width 0.2.1", ] [[package]] @@ -2710,15 +2629,15 @@ dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi 0.11.1+wasi-snapshot-preview1", "wasm-bindgen", ] [[package]] name = "getrandom" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", "js-sys", @@ -2791,7 +2710,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbcd2dba93594b227a1f57ee09b8b9da8892c34d55aa332e034a228d0fe6a171" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "gpu-alloc-types", ] @@ -2801,7 +2720,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "98ff03b468aa837d70984d55f5d3f846f6ec31fe34bbb97c4f85219caeee1ca4" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", ] [[package]] @@ -2818,13 +2737,13 @@ dependencies = [ [[package]] name = "gpu-descriptor" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf29e94d6d243368b7a56caa16bc213e4f9f8ed38c4d9557069527b5d5281ca" +checksum = "b89c83349105e3732062a895becfc71a8f921bb71ecbbdd8ff99263e3b53a0ca" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "gpu-descriptor-types", - "hashbrown 0.15.2", + "hashbrown 0.15.4", ] [[package]] @@ -2833,7 +2752,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdf242682df893b86f33a73828fb09ca4b2d3bb6cc95249707fc684d27484b91" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", ] [[package]] @@ -2857,9 +2776,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75249d144030531f8dee69fe9cea04d3edf809a017ae445e2abdff6629e86633" +checksum = "a9421a676d1b147b16b82c9225157dc629087ef8ec4d5e2960f9437a90dac0a5" dependencies = [ "atomic-waker", "bytes", @@ -2885,7 +2804,7 @@ dependencies = [ "crunchy", "num-traits", "rand 0.9.1", - "rand_distr 0.5.1", + "rand_distr", "serde", ] @@ -2927,9 +2846,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.2" +version = "0.15.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" dependencies = [ "allocator-api2", "equivalent", @@ -2939,11 +2858,11 @@ dependencies = [ [[package]] name = "hashlink" -version = "0.8.4" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" dependencies = [ - "hashbrown 0.14.5", + "hashbrown 0.15.4", ] [[package]] @@ -2963,11 +2882,11 @@ dependencies = [ [[package]] name = "headers" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "322106e6bd0cba2d5ead589ddb8150a13d7c4217cf80d7c4f682ca994ccc6aa9" +checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb" dependencies = [ - "base64 0.21.7", + "base64 0.22.1", "bytes", "headers-core 0.3.0", "http 1.3.1", @@ -3002,15 +2921,9 @@ checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hermit-abi" -version = "0.3.9" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" - -[[package]] -name = "hermit-abi" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbd780fe5cc30f81464441920d82ac8740e2e46b29a6fad543ddd075229ce37e" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" [[package]] name = "hex" @@ -3033,15 +2946,6 @@ dependencies = [ "digest", ] -[[package]] -name = "home" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" -dependencies = [ - "windows-sys 0.59.0", -] - [[package]] name = "html5ever" version = "0.26.0" @@ -3175,7 +3079,7 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.9", + "h2 0.4.10", "http 1.3.1", "http-body 1.0.1", "httparse", @@ -3189,35 +3093,20 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.24.2" +version = "0.27.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ - "futures-util", - "http 0.2.12", - "hyper 0.14.32", - "rustls 0.21.12", - "tokio", - "tokio-rustls 0.24.1", -] - -[[package]] -name = "hyper-rustls" -version = "0.27.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" -dependencies = [ - "futures-util", "http 1.3.1", "hyper 1.6.0", "hyper-util", - "rustls 0.23.26", + "rustls", "rustls-native-certs", "rustls-pki-types", "tokio", - "tokio-rustls 0.26.2", + "tokio-rustls", "tower-service", - "webpki-roots 0.26.8", + "webpki-roots", ] [[package]] @@ -3251,17 +3140,21 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.11" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497bbc33a26fdd4af9ed9c70d63f61cf56a938375fbb32df34db9b1cd6d643f2" +checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb" dependencies = [ + "base64 0.22.1", "bytes", "futures-channel", + "futures-core", "futures-util", "http 1.3.1", "http-body 1.0.1", "hyper 1.6.0", + "ipnet", "libc", + "percent-encoding", "pin-project-lite", "socket2", "tokio", @@ -3281,7 +3174,7 @@ dependencies = [ "js-sys", "log", "wasm-bindgen", - "windows-core 0.61.0", + "windows-core 0.61.2", ] [[package]] @@ -3295,21 +3188,22 @@ dependencies = [ [[package]] name = "icu_collections" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" dependencies = [ "displaydoc", - "yoke", + "potential_utf", + "yoke 0.8.0", "zerofrom", "zerovec", ] [[package]] -name = "icu_locid" -version = "1.5.0" +name = "icu_locale_core" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" dependencies = [ "displaydoc", "litemap", @@ -3318,31 +3212,11 @@ dependencies = [ "zerovec", ] -[[package]] -name = "icu_locid_transform" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_locid_transform_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_locid_transform_data" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7515e6d781098bf9f7205ab3fc7e9709d34554ae0b21ddbcb5febfa4bc7df11d" - [[package]] name = "icu_normalizer" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" dependencies = [ "displaydoc", "icu_collections", @@ -3350,67 +3224,54 @@ dependencies = [ "icu_properties", "icu_provider", "smallvec", - "utf16_iter", - "utf8_iter", - "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" -version = "1.5.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e8338228bdc8ab83303f16b797e177953730f601a96c25d10cb3ab0daa0cb7" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" [[package]] name = "icu_properties" -version = "1.5.1" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" dependencies = [ "displaydoc", "icu_collections", - "icu_locid_transform", + "icu_locale_core", "icu_properties_data", "icu_provider", - "tinystr", + "potential_utf", + "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "1.5.1" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85fb8799753b75aee8d2a21d7c14d9f38921b54b3dbda10f5a3c7a7b82dba5e2" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" [[package]] name = "icu_provider" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" dependencies = [ "displaydoc", - "icu_locid", - "icu_provider_macros", + "icu_locale_core", "stable_deref_trait", "tinystr", "writeable", - "yoke", + "yoke 0.8.0", "zerofrom", + "zerotrie", "zerovec", ] -[[package]] -name = "icu_provider_macros" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - [[package]] name = "id_tree" version = "1.8.0" @@ -3439,9 +3300,9 @@ dependencies = [ [[package]] name = "idna_adapter" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" dependencies = [ "icu_normalizer", "icu_properties", @@ -3470,7 +3331,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" dependencies = [ "equivalent", - "hashbrown 0.15.2", + "hashbrown 0.15.4", ] [[package]] @@ -3491,7 +3352,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "inotify-sys", "libc", ] @@ -3505,21 +3366,11 @@ dependencies = [ "libc", ] -[[package]] -name = "inout" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" -dependencies = [ - "block-padding", - "generic-array", -] - [[package]] name = "intl-memoizer" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe22e020fce238ae18a6d5d8c502ee76a52a6e880d99477657e6acc30ec57bda" +checksum = "310da2e345f5eb861e7a07ee182262e94975051db9e4223e909ba90f392f163f" dependencies = [ "type-map", "unic-langid", @@ -3541,14 +3392,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] -name = "is-terminal" -version = "0.4.16" +name = "iri-string" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" dependencies = [ - "hermit-abi 0.5.0", - "libc", - "windows-sys 0.59.0", + "memchr", + "serde", ] [[package]] @@ -3592,9 +3442,9 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jiff" -version = "0.2.10" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a064218214dc6a10fbae5ec5fa888d80c45d611aba169222fc272072bf7aef6" +checksum = "be1f93b8b1eb69c77f24bbb0afdf66f54b632ee39af40ca21c4365a1d7347e49" dependencies = [ "jiff-static", "log", @@ -3605,13 +3455,13 @@ dependencies = [ [[package]] name = "jiff-static" -version = "0.2.10" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "199b7932d97e325aff3a7030e141eafe7f2c6268e1d1b24859b753a627f45254" +checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -3626,7 +3476,7 @@ version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" dependencies = [ - "getrandom 0.3.2", + "getrandom 0.3.3", "libc", ] @@ -3669,9 +3519,9 @@ checksum = "e2db585e1d738fc771bf08a151420d3ed193d9d895a36df7f6f8a9456b911ddc" [[package]] name = "kqueue" -version = "1.0.8" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7447f1ca1b7b563588a205fe93dea8df60fd981423a768bc1c0ded35ed147d0c" +checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" dependencies = [ "kqueue-sys", "libc", @@ -3687,6 +3537,22 @@ dependencies = [ "libc", ] +[[package]] +name = "launcher" +version = "1.0.0" +dependencies = [ + "anki_io", + "anki_process", + "anyhow", + "camino", + "dirs 6.0.0", + "embed-resource", + "libc", + "libc-stdhandle", + "widestring", + "windows 0.61.3", +] + [[package]] name = "lazy_static" version = "1.5.0" @@ -3695,35 +3561,35 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.172" +version = "0.2.173" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" +checksum = "d8cfeafaffdbc32176b64fb251369d52ea9f0a8fbc6f8759edffef7b525d64bb" [[package]] -name = "libdbus-sys" -version = "0.2.5" +name = "libc-stdhandle" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06085512b750d640299b79be4bad3d2fa90a9c00b1fd9e1b46364f66f0485c72" +checksum = "6dac2473dc28934c5e0b82250dab231c9d3b94160d91fe9ff483323b05797551" dependencies = [ "cc", - "pkg-config", + "libc", ] [[package]] name = "libloading" -version = "0.8.6" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" +checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.53.2", ] [[package]] name = "libm" -version = "0.2.13" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9627da5196e5d8ed0b0495e61e518847578da83483c37288316d9b2e03a7f72" +checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" [[package]] name = "libredox" @@ -3731,22 +3597,31 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "libc", "redox_syscall", ] [[package]] name = "libsqlite3-sys" -version = "0.27.0" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716" +checksum = "91632f3b4fb6bd1d72aa3d78f41ffecfcf2b1a6648d8c241dbe7dbfaf4875e15" dependencies = [ "cc", "pkg-config", "vcpkg", ] +[[package]] +name = "libz-rs-sys" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "172a788537a2221661b480fee8dc5f96c580eb34fa88764d3205dc356c7e4221" +dependencies = [ + "zlib-rs", +] + [[package]] name = "linkcheck" version = "0.4.1" @@ -3777,11 +3652,11 @@ version = "0.0.0" dependencies = [ "anki", "futures", - "itertools 0.13.0", + "itertools 0.14.0", "linkcheck", "regex", - "reqwest 0.12.15", - "strum 0.26.3", + "reqwest 0.12.20", + "strum 0.27.1", "tokio", ] @@ -3794,12 +3669,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "linux-raw-sys" -version = "0.4.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" - [[package]] name = "linux-raw-sys" version = "0.9.4" @@ -3808,9 +3677,9 @@ checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" [[package]] name = "litemap" -version = "0.7.5" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" [[package]] name = "litrs" @@ -3820,9 +3689,9 @@ checksum = "b4ce301924b7887e9d637144fdade93f9dfff9b60981d4ac161db09720d39aa5" [[package]] name = "lock_api" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" dependencies = [ "autocfg", "scopeguard", @@ -3834,6 +3703,12 @@ version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + [[package]] name = "lzma-sys" version = "0.1.20" @@ -3853,54 +3728,29 @@ checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" [[package]] name = "macerator" -version = "0.2.6" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f684f0f95ca0724667e4baf9bf60dc662cb2f6235fd9402d754f5512440efe0e" +checksum = "bce07f822458c4c303081d133a90610406162e7c8df17434956ac1892faf447b" dependencies = [ "bytemuck", + "cfg_aliases", "half", "macerator-macros", + "moddef", "num-traits", "paste", ] [[package]] name = "macerator-macros" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "806a19478649154a009ef47e9361db11ed392a8f7978590eed4a590ceef01bdd" +checksum = "a2b955a106dca78c0577269d67a6d56114abb8644b810fc995a22348276bb9dd" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.101", -] - -[[package]] -name = "makeapp" -version = "0.0.0" -dependencies = [ - "anyhow", - "apple-bundles", - "camino", - "clap", - "glob", - "plist", - "serde", - "serde_json", - "simple-file-manifest", - "walkdir", -] - -[[package]] -name = "makeexe" -version = "0.0.0" -dependencies = [ - "anyhow", - "camino", - "clap", - "tugger-windows-codesign", - "walkdir", + "syn 2.0.103", ] [[package]] @@ -3963,7 +3813,7 @@ checksum = "88a9689d8d44bf9964484516275f5cd4c9b59457a6940c1d5d0ecbb94510a36b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -3983,15 +3833,15 @@ checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" [[package]] name = "matchit" -version = "0.7.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" [[package]] name = "matrixmultiply" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9380b911e3e96d10c1f415da0876389aaf1b56759054eeb0de7df940c456ba1a" +checksum = "a06de3016e9fae57a36fd14dba131fccf49f74b40b7fbdb472f96e361ec71a08" dependencies = [ "autocfg", "num_cpus", @@ -4008,9 +3858,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "mdbook" -version = "0.4.48" +version = "0.4.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6fbb4ac2d9fd7aa987c3510309ea3c80004a968d063c42f0d34fea070817c1" +checksum = "a87e65420ab45ca9c1b8cdf698f95b710cc826d373fa550f0f7fad82beac9328" dependencies = [ "ammonia", "anyhow", @@ -4027,7 +3877,6 @@ dependencies = [ "memchr", "notify", "notify-debouncer-mini", - "once_cell", "opener", "pathdiff", "pulldown-cmark 0.10.3", @@ -4038,7 +3887,7 @@ dependencies = [ "shlex", "tempfile", "tokio", - "toml", + "toml 0.5.11", "topological-sort", "walkdir", "warp", @@ -4046,9 +3895,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" [[package]] name = "memmap2" @@ -4075,7 +3924,7 @@ version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f569fb946490b5743ad69813cb19629130ce9374034abe31614a36402d18f99e" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "block", "core-graphics-types", "foreign-types 0.5.0", @@ -4120,25 +3969,31 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", ] [[package]] name = "mio" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" dependencies = [ "libc", "log", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.52.0", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.59.0", ] +[[package]] +name = "moddef" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e519fd9c6131c1c9a4a67f8bdc4f32eb4105b16c1468adea1b8e68c98c85ec4" + [[package]] name = "multer" version = "3.1.0" @@ -4158,9 +4013,9 @@ dependencies = [ [[package]] name = "multimap" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" +checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" [[package]] name = "naga" @@ -4170,11 +4025,11 @@ checksum = "2b977c445f26e49757f9aca3631c3b8b836942cb278d69a92e7b80d3b24da632" dependencies = [ "arrayvec", "bit-set", - "bitflags 2.9.0", + "bitflags 2.9.1", "cfg_aliases", "codespan-reporting 0.12.0", "half", - "hashbrown 0.15.2", + "hashbrown 0.15.4", "hexf-parse", "indexmap", "log", @@ -4220,17 +4075,6 @@ dependencies = [ "rayon", ] -[[package]] -name = "ndarray-rand" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f093b3db6fd194718dcdeea6bd8c829417deae904e3fcc7732dabcd4416d25d8" -dependencies = [ - "ndarray", - "rand 0.8.5", - "rand_distr 0.4.3", -] - [[package]] name = "ndk-sys" version = "0.5.0+25.2.9519653" @@ -4255,9 +4099,13 @@ dependencies = [ "camino", "dunce", "globset", - "itertools 0.13.0", + "itertools 0.14.0", "maplit", "num_cpus", + "regex", + "reqwest 0.12.20", + "serde_json", + "sha2", "walkdir", "which", ] @@ -4273,10 +4121,13 @@ dependencies = [ ] [[package]] -name = "nonempty" -version = "0.7.0" +name = "nom" +version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9e591e719385e6ebaeb5ce5d3887f7d5676fceca6411d1925ccc95745f3d6f7" +checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405" +dependencies = [ + "memchr", +] [[package]] name = "normpath" @@ -4293,7 +4144,7 @@ version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "filetime", "fsevent-sys", "inotify", @@ -4451,11 +4302,11 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" dependencies = [ - "hermit-abi 0.3.9", + "hermit-abi", "libc", ] @@ -4477,7 +4328,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -4486,7 +4337,7 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c9bff0aa1d48904a1385ea2a8b97576fbdcbc9a3cfccd0d31fe978e1c4038c5" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "libloading", "nvml-wrapper-sys", "static_assertions", @@ -4527,6 +4378,12 @@ version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" +[[package]] +name = "once_cell_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" + [[package]] name = "oorandom" version = "11.1.5" @@ -4535,23 +4392,22 @@ checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" [[package]] name = "opener" -version = "0.7.2" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0812e5e4df08da354c851a3376fead46db31c2214f849d3de356d774d057681" +checksum = "771b9704f8cd8b424ec747a320b30b47517a6966ba2c7da90047c16f4a962223" dependencies = [ "bstr", - "dbus", "normpath", "windows-sys 0.59.0", ] [[package]] name = "openssl" -version = "0.10.72" +version = "0.10.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da" +checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "cfg-if", "foreign-types 0.3.2", "libc", @@ -4568,7 +4424,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -4579,9 +4435,9 @@ checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-sys" -version = "0.9.107" +version = "0.9.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07" +checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" dependencies = [ "cc", "libc", @@ -4613,39 +4469,12 @@ dependencies = [ "num-traits", ] -[[package]] -name = "os_pipe" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ffd2b0a5634335b135d5728d84c5e0fd726954b87111f7506a61c502280d982" -dependencies = [ - "libc", - "windows-sys 0.59.0", -] - [[package]] name = "overload" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" -[[package]] -name = "p12" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4873306de53fe82e7e484df31e1e947d61514b6ea2ed6cd7b45d63006fd9224" -dependencies = [ - "cbc", - "cipher", - "des", - "getrandom 0.2.16", - "hmac", - "lazy_static", - "rc2", - "sha1", - "yasna", -] - [[package]] name = "parking" version = "2.2.1" @@ -4654,9 +4483,9 @@ checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" dependencies = [ "lock_api", "parking_lot_core", @@ -4664,9 +4493,9 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.10" +version = "0.9.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" dependencies = [ "cfg-if", "libc", @@ -4710,15 +4539,6 @@ dependencies = [ "sha2", ] -[[package]] -name = "pem" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" -dependencies = [ - "base64 0.13.1", -] - [[package]] name = "percent-encoding" version = "2.3.1" @@ -4732,9 +4552,9 @@ source = "git+https://github.com/ankitects/rust-url.git?rev=bb930b8d089f4d30d7d1 [[package]] name = "pest" -version = "2.8.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "198db74531d58c70a361c42201efde7e2591e976d518caf7662a47dc5720e7b6" +checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323" dependencies = [ "memchr", "thiserror 2.0.12", @@ -4743,9 +4563,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.8.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d725d9cfd79e87dccc9341a2ef39d1b6f6353d68c4b33c177febbe1a402c97c5" +checksum = "bb056d9e8ea77922845ec74a1c4e8fb17e7c218cc4fc11a15c5d25e189aa40bc" dependencies = [ "pest", "pest_generator", @@ -4753,24 +4573,23 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.8.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db7d01726be8ab66ab32f9df467ae8b1148906685bbe75c82d1e65d7f5b3f841" +checksum = "87e404e638f781eb3202dc82db6760c8ae8a1eeef7fb3fa8264b2ef280504966" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] name = "pest_meta" -version = "2.8.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f9f832470494906d1fca5329f8ab5791cc60beb230c74815dff541cbd2b5ca0" +checksum = "edd1101f170f5903fde0914f899bb503d9ff5271d7ba76bbb70bea63690cc0d5" dependencies = [ - "once_cell", "pest", "sha2", ] @@ -4854,7 +4673,7 @@ dependencies = [ "phf_shared 0.11.3", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -4892,7 +4711,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -4913,19 +4732,6 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" -[[package]] -name = "plist" -version = "1.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac26e981c03a6e53e0aee43c113e3202f5581d5360dae7bd2c70e800dd0451d" -dependencies = [ - "base64 0.22.1", - "indexmap", - "quick-xml", - "serde", - "time", -] - [[package]] name = "plotters" version = "0.3.7" @@ -4956,9 +4762,9 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" +checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" dependencies = [ "serde", ] @@ -4972,6 +4778,15 @@ dependencies = [ "portable-atomic", ] +[[package]] +name = "potential_utf" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +dependencies = [ + "zerovec", +] + [[package]] name = "powerfmt" version = "0.2.0" @@ -4984,7 +4799,7 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ - "zerocopy 0.8.25", + "zerocopy", ] [[package]] @@ -5001,12 +4816,12 @@ checksum = "e8cf8e6a8aa66ce33f63993ffc4ea4271eb5b0530a9002db8455ea6050c77bfa" [[package]] name = "prettyplease" -version = "0.2.32" +version = "0.2.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "664ec5419c51e34154eec046ebcba56312d5a2fc3b09a06da188e1ad21afadf6" +checksum = "6837b9e10d61f45f987d50808f83d1ee3d206c66acf650c3e4ae2e1f6ddedf55" dependencies = [ "proc-macro2", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -5076,7 +4891,7 @@ dependencies = [ "prost", "prost-types", "regex", - "syn 2.0.101", + "syn 2.0.103", "tempfile", ] @@ -5090,7 +4905,7 @@ dependencies = [ "itertools 0.14.0", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -5127,25 +4942,26 @@ dependencies = [ [[package]] name = "pulldown-cmark" -version = "0.9.6" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" +checksum = "76979bea66e7875e7509c4ec5300112b316af87fa7a252ca91c448b32dfe3993" dependencies = [ - "bitflags 2.9.0", - "getopts", + "bitflags 2.9.1", "memchr", + "pulldown-cmark-escape 0.10.1", "unicase", ] [[package]] name = "pulldown-cmark" -version = "0.10.3" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76979bea66e7875e7509c4ec5300112b316af87fa7a252ca91c448b32dfe3993" +checksum = "1e8bbe1a966bd2f362681a44f6edce3c2310ac21e4d5067a6e7ec396297a6ea0" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", + "getopts", "memchr", - "pulldown-cmark-escape", + "pulldown-cmark-escape 0.11.0", "unicase", ] @@ -5155,6 +4971,12 @@ version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd348ff538bc9caeda7ee8cad2d1d48236a1f443c1fa3913c6a02fe0043b1dd3" +[[package]] +name = "pulldown-cmark-escape" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae" + [[package]] name = "pulp" version = "0.18.22" @@ -5169,9 +4991,9 @@ dependencies = [ [[package]] name = "pulp" -version = "0.21.4" +version = "0.21.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95fb7a99b37aaef4c7dd2fd15a819eb8010bfc7a2c2155230d51f497316cad6d" +checksum = "96b86df24f0a7ddd5e4b95c94fc9ed8a98f1ca94d3b01bdce2824097e7835907" dependencies = [ "bytemuck", "cfg-if", @@ -5183,11 +5005,10 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.24.2" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5203598f366b11a02b13aa20cab591229ff0a89fd121a308a5df751d5fc9219" +checksum = "8970a78afe0628a3e3430376fc5fd76b6b45c4d43360ffd6cdd40bdde72b682a" dependencies = [ - "cfg-if", "indoc", "libc", "memoffset", @@ -5201,9 +5022,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.24.2" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99636d423fa2ca130fa5acde3059308006d46f98caac629418e53f7ebb1e9999" +checksum = "458eb0c55e7ece017adeba38f2248ff3ac615e53660d7c71a238d7d2a01c7598" dependencies = [ "once_cell", "target-lexicon", @@ -5211,9 +5032,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.24.2" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78f9cf92ba9c409279bc3305b5409d90db2d2c22392d443a87df3a1adad59e33" +checksum = "7114fe5457c61b276ab77c5055f206295b812608083644a5c5b2640c3102565c" dependencies = [ "libc", "pyo3-build-config", @@ -5221,43 +5042,34 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.24.2" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b999cb1a6ce21f9a6b147dcf1be9ffedf02e0043aec74dc390f3007047cecd9" +checksum = "a8725c0a622b374d6cb051d11a0983786448f7785336139c3c94f5aa6bef7e50" dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] name = "pyo3-macros-backend" -version = "0.24.2" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "822ece1c7e1012745607d5cf0bcb2874769f0f7cb34c4cde03b9358eb9ef911a" +checksum = "4109984c22491085343c05b0dbc54ddc405c3cf7b4374fc533f5c3313a572ccc" dependencies = [ "heck", "proc-macro2", "pyo3-build-config", "quote", - "syn 2.0.101", -] - -[[package]] -name = "quick-xml" -version = "0.32.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d3a6e5838b60e0e8fa7a43f22ade549a37d61f8bdbe636d0d7816191de969c2" -dependencies = [ - "memchr", + "syn 2.0.103", ] [[package]] name = "quinn" -version = "0.11.7" +version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3bd15a6f2967aef83887dcb9fec0014580467e33720d073560cf015a5683012" +checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8" dependencies = [ "bytes", "cfg_aliases", @@ -5265,7 +5077,7 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash 2.1.1", - "rustls 0.23.26", + "rustls", "socket2", "thiserror 2.0.12", "tokio", @@ -5275,16 +5087,17 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.11.11" +version = "0.11.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcbafbbdbb0f638fe3f35f3c56739f77a8a1d070cb25603226c83339b391472b" +checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e" dependencies = [ "bytes", - "getrandom 0.3.2", + "getrandom 0.3.3", + "lru-slab", "rand 0.9.1", - "ring 0.17.14", + "ring", "rustc-hash 2.1.1", - "rustls 0.23.26", + "rustls", "rustls-pki-types", "slab", "thiserror 2.0.12", @@ -5295,9 +5108,9 @@ dependencies = [ [[package]] name = "quinn-udp" -version = "0.5.11" +version = "0.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "541d0f57c6ec747a90738a52741d3221f7960e8ac2f0ff4b1a63680e033b4ab5" +checksum = "ee4e529991f949c5e25755532370b8af5d114acae52326361d68d47af64aa842" dependencies = [ "cfg_aliases", "libc", @@ -5378,17 +5191,7 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ - "getrandom 0.3.2", -] - -[[package]] -name = "rand_distr" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31" -dependencies = [ - "num-traits", - "rand 0.8.5", + "getrandom 0.3.3", ] [[package]] @@ -5422,7 +5225,7 @@ version = "11.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6df7ab838ed27997ba19a4664507e6f82b41fe6e20be42929332156e5e85146" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", ] [[package]] @@ -5457,27 +5260,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "rc2" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62c64daa8e9438b84aaae55010a93f396f8e60e3911590fcba770d04643fc1dd" -dependencies = [ - "cipher", -] - -[[package]] -name = "rcgen" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffbe84efe2f38dea12e9bfc1f65377fdf03e53a18cb3b995faedf7934c7e785b" -dependencies = [ - "pem", - "ring 0.16.20", - "time", - "yasna", -] - [[package]] name = "reborrow" version = "0.5.5" @@ -5486,11 +5268,11 @@ checksum = "03251193000f4bd3b042892be858ee50e8b3719f2b08e5833ac4353724632430" [[package]] name = "redox_syscall" -version = "0.5.11" +version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2f103c6d277498fbceb16e84d317e2a400f160f46904d5f5410848c829511a3" +checksum = "0d04b7d0ee6b4a0207a0a7adb104d23ecb0b47d6beae7152d0fa34b692b29fd6" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", ] [[package]] @@ -5586,7 +5368,6 @@ dependencies = [ "http 0.2.12", "http-body 0.4.6", "hyper 0.14.32", - "hyper-rustls 0.24.2", "hyper-tls 0.5.0", "ipnet", "js-sys", @@ -5596,7 +5377,6 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.12", "rustls-pemfile 1.0.4", "serde", "serde_json", @@ -5605,46 +5385,41 @@ dependencies = [ "system-configuration", "tokio", "tokio-native-tls", - "tokio-rustls 0.24.1", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "webpki-roots 0.25.4", "winreg 0.50.0", ] [[package]] name = "reqwest" -version = "0.12.15" +version = "0.12.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" +checksum = "eabf4c97d9130e2bf606614eb937e86edac8292eaa6f422f995d7e8de1eb1813" dependencies = [ "base64 0.22.1", "bytes", + "futures-channel", "futures-core", "futures-util", "http 1.3.1", "http-body 1.0.1", "http-body-util", "hyper 1.6.0", - "hyper-rustls 0.27.5", + "hyper-rustls", "hyper-tls 0.6.0", "hyper-util", - "ipnet", "js-sys", "log", - "mime", "mime_guess", "native-tls", - "once_cell", "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.26", + "rustls", "rustls-native-certs", - "rustls-pemfile 2.2.0", "rustls-pki-types", "serde", "serde_json", @@ -5652,33 +5427,17 @@ dependencies = [ "sync_wrapper 1.0.2", "tokio", "tokio-native-tls", - "tokio-rustls 0.26.2", - "tokio-socks", + "tokio-rustls", "tokio-util", "tower", + "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots 0.26.8", - "windows-registry", -] - -[[package]] -name = "ring" -version = "0.16.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", + "webpki-roots", ] [[package]] @@ -5691,7 +5450,7 @@ dependencies = [ "cfg-if", "getrandom 0.2.16", "libc", - "untrusted 0.9.0", + "untrusted", "windows-sys 0.52.0", ] @@ -5751,7 +5510,7 @@ dependencies = [ "regex", "relative-path", "rustc_version", - "syn 2.0.101", + "syn 2.0.103", "unicode-ident", ] @@ -5766,24 +5525,24 @@ dependencies = [ "clap", "flate2", "junction", - "reqwest 0.12.15", + "reqwest 0.12.20", "sha2", "tar", "termcolor", "tokio", "which", "xz2", - "zip 0.6.6", + "zip 4.1.0", "zstd", ] [[package]] name = "rusqlite" -version = "0.30.0" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a78046161564f5e7cd9008aff3b2990b3850dc8e0349119b98e8f251e099f24d" +checksum = "3de23c3319433716cf134eed225fe9986bc24f63bed9be9f20c329029e672dc7" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "fallible-iterator", "fallible-streaming-iterator", "hashlink", @@ -5793,9 +5552,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.24" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f" [[package]] name = "rustc-hash" @@ -5820,52 +5579,27 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.44" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "errno", "libc", - "linux-raw-sys 0.4.15", - "windows-sys 0.59.0", -] - -[[package]] -name = "rustix" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" -dependencies = [ - "bitflags 2.9.0", - "errno", - "libc", - "linux-raw-sys 0.9.4", + "linux-raw-sys", "windows-sys 0.59.0", ] [[package]] name = "rustls" -version = "0.21.12" +version = "0.23.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" -dependencies = [ - "log", - "ring 0.17.14", - "rustls-webpki 0.101.7", - "sct", -] - -[[package]] -name = "rustls" -version = "0.23.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df51b5869f3a441595eac5e8ff14d486ff285f7b8c0df8770e49c3b56351f0f0" +checksum = "7160e3e10bf4535308537f3c4e1641468cd0e485175d6163087c0393c7d46643" dependencies = [ "once_cell", - "ring 0.17.14", + "ring", "rustls-pki-types", - "rustls-webpki 0.103.1", + "rustls-webpki", "subtle", "zeroize", ] @@ -5902,39 +5636,30 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" +checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" dependencies = [ "web-time", + "zeroize", ] [[package]] name = "rustls-webpki" -version = "0.101.7" +version = "0.103.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +checksum = "e4a72fe2bcf7a6ac6fd7d0b9e5cb68aeb7d4c0a0271730218b3e92d43b4eb435" dependencies = [ - "ring 0.17.14", - "untrusted 0.9.0", -] - -[[package]] -name = "rustls-webpki" -version = "0.103.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fef8b8769aaccf73098557a87cd1816b4f9c7c16811c9c77142aa695c16f2c03" -dependencies = [ - "ring 0.17.14", + "ring", "rustls-pki-types", - "untrusted 0.9.0", + "untrusted", ] [[package]] name = "rustversion" -version = "1.0.20" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" +checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" [[package]] name = "ryu" @@ -6001,23 +5726,13 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" -[[package]] -name = "sct" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" -dependencies = [ - "ring 0.17.14", - "untrusted 0.9.0", -] - [[package]] name = "security-framework" version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "core-foundation 0.9.4", "core-foundation-sys", "libc", @@ -6030,8 +5745,8 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" dependencies = [ - "bitflags 2.9.0", - "core-foundation 0.10.0", + "bitflags 2.9.1", + "core-foundation 0.10.1", "core-foundation-sys", "libc", "security-framework-sys", @@ -6047,15 +5762,6 @@ dependencies = [ "libc", ] -[[package]] -name = "self_cell" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e14e4d63b804dc0c7ec4a1e52bcb63f02c7ac94476755aa579edac21e01f915d" -dependencies = [ - "self_cell 1.2.0", -] - [[package]] name = "self_cell" version = "1.2.0" @@ -6122,7 +5828,7 @@ checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -6155,14 +5861,23 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", ] [[package]] name = "serde_tuple" -version = "0.5.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f025b91216f15a2a32aa39669329a475733590a015835d1783549a56d09427" +checksum = "f0f9b739e59a0e07b7a73bc11c3dcd6abf790d0f54042b67a422d4bd1f6cf6c0" dependencies = [ "serde", "serde_tuple_macros", @@ -6170,9 +5885,9 @@ dependencies = [ [[package]] name = "serde_tuple_macros" -version = "0.5.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4076151d1a2b688e25aaf236997933c66e18b870d0369f8b248b8ab2be630d7e" +checksum = "9e87546e85c5047d03b454d12ee25266fc269a461a4029956ca58d246b9aefae" dependencies = [ "proc-macro2", "quote", @@ -6204,9 +5919,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.8" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", "cpufeatures", @@ -6222,16 +5937,6 @@ dependencies = [ "lazy_static", ] -[[package]] -name = "shared_child" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09fa9338aed9a1df411814a5b2252f7cd206c55ae9bf2fa763f8de84603aa60c" -dependencies = [ - "libc", - "windows-sys 0.59.0", -] - [[package]] name = "shlex" version = "1.3.0" @@ -6248,10 +5953,10 @@ dependencies = [ ] [[package]] -name = "simple-file-manifest" -version = "0.11.0" +name = "simd-adler32" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd19be0257552dd56d1bb6946f89f193c6e5b9f13cc9327c4bc84a357507c74" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" [[package]] name = "siphasher" @@ -6267,12 +5972,9 @@ checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "slab" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] +checksum = "04dc19736151f35336d325007ac991178d504a119863a2fcb3758cdb5e52c50d" [[package]] name = "slotmap" @@ -6285,9 +5987,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.15.0" +version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "snafu" @@ -6307,7 +6009,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -6318,20 +6020,14 @@ checksum = "27207bb65232eda1f588cf46db2fee75c0808d557f6b3cf19a75f5d6d7c94df1" [[package]] name = "socket2" -version = "0.5.9" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" dependencies = [ "libc", "windows-sys 0.52.0", ] -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - [[package]] name = "spin" version = "0.9.8" @@ -6358,7 +6054,7 @@ version = "0.3.0+sdk-1.3.268.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eda41003dc44290527a59b13432d4a0379379fa074b70174882adfbdfd917844" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", ] [[package]] @@ -6432,7 +6128,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -6445,7 +6141,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -6467,9 +6163,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.101" +version = "2.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" +checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8" dependencies = [ "proc-macro2", "quote", @@ -6493,13 +6189,13 @@ dependencies = [ [[package]] name = "synstructure" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -6508,7 +6204,7 @@ version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec7dddc5f0fee506baf8b9fdb989e242f17e4b11c61dfbb0635b705217199eea" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "byteorder", "enum-as-inner", "libc", @@ -6522,7 +6218,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01198a2debb237c62b6826ec7081082d951f46dbb64b0e8c7649a452230d1dfc" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "byteorder", "enum-as-inner", "libc", @@ -6541,7 +6237,7 @@ dependencies = [ "memchr", "ntapi", "rayon", - "windows 0.57.0", + "windows 0.56.0", ] [[package]] @@ -6574,7 +6270,7 @@ dependencies = [ "bytesize", "lazy_static", "libc", - "nom", + "nom 7.1.3", "time", "winapi", ] @@ -6598,14 +6294,14 @@ checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a" [[package]] name = "tempfile" -version = "3.19.1" +version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ "fastrand", - "getrandom 0.3.2", + "getrandom 0.3.3", "once_cell", - "rustix 1.0.5", + "rustix", "windows-sys 0.59.0", ] @@ -6635,7 +6331,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed" dependencies = [ - "rustix 1.0.5", + "rustix", "windows-sys 0.59.0", ] @@ -6676,7 +6372,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -6687,7 +6383,7 @@ checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -6701,12 +6397,11 @@ dependencies = [ [[package]] name = "thread_local" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" dependencies = [ "cfg-if", - "once_cell", ] [[package]] @@ -6742,9 +6437,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.7.6" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" dependencies = [ "displaydoc", "zerovec", @@ -6777,9 +6472,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.44.2" +version = "1.45.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48" +checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779" dependencies = [ "backtrace", "bytes", @@ -6800,7 +6495,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -6813,35 +6508,13 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-rustls" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" -dependencies = [ - "rustls 0.21.12", - "tokio", -] - [[package]] name = "tokio-rustls" version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" dependencies = [ - "rustls 0.23.26", - "tokio", -] - -[[package]] -name = "tokio-socks" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d4770b8024672c1101b3f6733eab95b18007dbe0847a8afe341fcf79e06043f" -dependencies = [ - "either", - "futures-util", - "thiserror 1.0.69", + "rustls", "tokio", ] @@ -6880,22 +6553,46 @@ dependencies = [ ] [[package]] -name = "toml_datetime" -version = "0.6.9" +name = "toml" +version = "0.8.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] [[package]] name = "toml_edit" -version = "0.22.25" +version = "0.22.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10558ed0bd2a1562e630926a2d1f0b98c827da99fabd3fe20920a59642504485" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ "indexmap", + "serde", + "serde_spanned", "toml_datetime", + "toml_write", "winnow", ] +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + [[package]] name = "topological-sort" version = "0.2.2" @@ -6920,16 +6617,18 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.5.2" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "bytes", + "futures-util", "http 1.3.1", "http-body 1.0.1", - "http-body-util", + "iri-string", "pin-project-lite", + "tower", "tower-layer", "tower-service", "tracing", @@ -6973,20 +6672,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.28" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +checksum = "1b1ffbcf9c6f6b99d386e7444eb608ba646ae452a36b39737deb9663b610f662" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] name = "tracing-core" -version = "0.1.33" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" dependencies = [ "once_cell", "valuable", @@ -7027,58 +6726,6 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" -[[package]] -name = "tugger-common" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90d950380afdb1a6bbe74f29485a04e821869dfad11f5929ff1c5b1dac09d02" -dependencies = [ - "anyhow", - "fs2", - "glob", - "hex", - "log", - "once_cell", - "reqwest 0.11.27", - "sha2", - "tempfile", - "url", - "zip 0.6.6", -] - -[[package]] -name = "tugger-windows" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9f181ac4fc7f8facfd418824d13045cd068ee73de44319a6116868c22789782" -dependencies = [ - "anyhow", - "duct", - "find-winsdk", - "glob", - "once_cell", - "semver", - "tugger-common", - "winapi", -] - -[[package]] -name = "tugger-windows-codesign" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed3f09f8bdace495373cec3fc607bc39f00720a984ba82e310cc9382462fd364" -dependencies = [ - "anyhow", - "duct", - "log", - "p12", - "rcgen", - "time", - "tugger-common", - "tugger-windows", - "yasna", -] - [[package]] name = "tungstenite" version = "0.21.0" @@ -7100,11 +6747,11 @@ dependencies = [ [[package]] name = "type-map" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "deb68604048ff8fa93347f02441e4487594adc20bb8a084f9e564d2b827a0a9f" +checksum = "cb30dbbd9036155e74adad6812e9898d03ec374946234fbcebd5dfc7b9187b90" dependencies = [ - "rustc-hash 1.1.0", + "rustc-hash 2.1.1", ] [[package]] @@ -7137,7 +6784,7 @@ dependencies = [ "serde", "thiserror 1.0.69", "tracing", - "yoke", + "yoke 0.7.5", ] [[package]] @@ -7163,9 +6810,9 @@ checksum = "80d7ff825a6a654ee85a63e80f92f054f904f21e7d12da4e22f9834a4aaa35bc" [[package]] name = "unic-langid" -version = "0.9.5" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23dd9d1e72a73b25e07123a80776aae3e7b0ec461ef94f9151eed6ec88005a44" +checksum = "a28ba52c9b05311f4f6e62d5d9d46f094bd6e84cb8df7b3ef952748d752a7d05" dependencies = [ "unic-langid-impl", "unic-langid-macros", @@ -7173,18 +6820,18 @@ dependencies = [ [[package]] name = "unic-langid-impl" -version = "0.9.5" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a5422c1f65949306c99240b81de9f3f15929f5a8bfe05bb44b034cc8bf593e5" +checksum = "dce1bf08044d4b7a94028c93786f8566047edc11110595914de93362559bc658" dependencies = [ "tinystr", ] [[package]] name = "unic-langid-macros" -version = "0.9.5" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0da1cd2c042d3c7569a1008806b02039e7a4a2bdf8f8e96bd3c792434a0e275e" +checksum = "d5957eb82e346d7add14182a3315a7e298f04e1ba4baac36f7f0dbfedba5fc25" dependencies = [ "proc-macro-hack", "tinystr", @@ -7194,13 +6841,13 @@ dependencies = [ [[package]] name = "unic-langid-macros-impl" -version = "0.9.5" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ed7f4237ba393424195053097c1516bd4590dc82b84f2f97c5c69e12704555b" +checksum = "a1249a628de3ad34b821ecb1001355bca3940bcb2f88558f1a8bd82e977f75b5" dependencies = [ "proc-macro-hack", "quote", - "syn 2.0.101", + "syn 2.0.103", "unic-langid-impl", ] @@ -7263,9 +6910,9 @@ checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "unicode-width" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" +checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" [[package]] name = "unicode-xid" @@ -7279,12 +6926,6 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3" -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - [[package]] name = "untrusted" version = "0.9.0" @@ -7315,12 +6956,6 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" -[[package]] -name = "utf16_iter" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" - [[package]] name = "utf8_iter" version = "1.0.4" @@ -7335,9 +6970,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9" +checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" [[package]] name = "valuable" @@ -7353,7 +6988,7 @@ checksum = "41b6d82be61465f97d42bd1d15bf20f3b0a3a0905018f38f9d6f6962055b0b5c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -7368,6 +7003,26 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "vswhom" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be979b7f07507105799e854203b470ff7c78a1639e330a58f183b5fea574608b" +dependencies = [ + "libc", + "vswhom-sys", +] + +[[package]] +name = "vswhom-sys" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb067e4cbd1ff067d1df46c9194b5de0e98efd2810bbc95c5d5e5f25a3231150" +dependencies = [ + "cc", + "libc", +] + [[package]] name = "walkdir" version = "2.5.0" @@ -7417,9 +7072,9 @@ dependencies = [ [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasi" @@ -7436,7 +7091,7 @@ version = "0.12.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1fbb4ef9bbca0c1170e0b00dd28abc9e3b68669821600cad1caaed606583c6d" dependencies = [ - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi 0.11.1+wasi-snapshot-preview1", ] [[package]] @@ -7461,7 +7116,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", "wasm-bindgen-shared", ] @@ -7496,7 +7151,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -7545,9 +7200,9 @@ dependencies = [ [[package]] name = "web_atoms" -version = "0.1.0" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "954c5a41f2bcb7314344079d0891505458cc2f4b422bdea1d5bfbe6d1a04903b" +checksum = "57ffde1dc01240bdf9992e3205668b235e59421fd085e8a317ed98da0178d414" dependencies = [ "phf 0.11.3", "phf_codegen 0.11.3", @@ -7557,30 +7212,24 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.25.4" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" - -[[package]] -name = "webpki-roots" -version = "0.26.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2210b291f7ea53617fbafcc4939f10914214ec15aace5ba62293a668f322c5c9" +checksum = "2853738d1cc4f2da3a225c18ec6c3721abb31961096e9dbf5ab35fa88b19cfdb" dependencies = [ "rustls-pki-types", ] [[package]] name = "wgpu" -version = "25.0.0" +version = "25.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6049eb2014a0e0d8689f9b787605dd71d5bbfdc74095ead499f3cff705c229" +checksum = "ec8fb398f119472be4d80bc3647339f56eb63b2a331f6a3d16e25d8144197dd9" dependencies = [ "arrayvec", - "bitflags 2.9.0", + "bitflags 2.9.1", "cfg_aliases", "document-features", - "hashbrown 0.15.2", + "hashbrown 0.15.4", "js-sys", "log", "naga", @@ -7600,17 +7249,17 @@ dependencies = [ [[package]] name = "wgpu-core" -version = "25.0.1" +version = "25.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19813e647da7aa3cdaa84f5846e2c64114970ea7c86b1e6aae8be08091f4bdc" +checksum = "f7b882196f8368511d613c6aeec80655160db6646aebddf8328879a88d54e500" dependencies = [ "arrayvec", "bit-set", "bit-vec", - "bitflags 2.9.0", + "bitflags 2.9.1", "cfg_aliases", "document-features", - "hashbrown 0.15.2", + "hashbrown 0.15.4", "indexmap", "log", "naga", @@ -7658,15 +7307,15 @@ dependencies = [ [[package]] name = "wgpu-hal" -version = "25.0.1" +version = "25.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb7c4a1dc42ff14c23c9b11ebf1ee85cde661a9b1cf0392f79c1faca5bc559fb" +checksum = "f968767fe4d3d33747bbd1473ccd55bf0f6451f55d733b5597e67b5deab4ad17" dependencies = [ "android_system_properties", "arrayvec", "ash", "bit-set", - "bitflags 2.9.0", + "bitflags 2.9.1", "block", "bytemuck", "cfg-if", @@ -7677,7 +7326,7 @@ dependencies = [ "gpu-alloc", "gpu-allocator", "gpu-descriptor", - "hashbrown 0.15.2", + "hashbrown 0.15.4", "js-sys", "khronos-egl", "libc", @@ -7709,7 +7358,7 @@ version = "25.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2aa49460c2a8ee8edba3fca54325540d904dd85b2e086ada762767e17d06e8bc" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "bytemuck", "js-sys", "log", @@ -7719,17 +7368,21 @@ dependencies = [ [[package]] name = "which" -version = "5.0.0" +version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bf3ea8596f3a0dd5980b46430f2058dfe2c36a27ccfbb1845d6fbfcd9ba6e14" +checksum = "d3fabb953106c3c8eea8306e4393700d7657561cb43122571b172bbfb7c7ba1d" dependencies = [ - "either", - "home", - "once_cell", - "rustix 0.38.44", - "windows-sys 0.48.0", + "env_home", + "rustix", + "winsafe", ] +[[package]] +name = "widestring" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd7cf3379ca1aac9eea11fba24fd7e315d621f8dfe35c8d7d2be8b793726e07d" + [[package]] name = "winapi" version = "0.3.9" @@ -7771,16 +7424,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "windows" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" -dependencies = [ - "windows-core 0.57.0", - "windows-targets 0.52.6", -] - [[package]] name = "windows" version = "0.58.0" @@ -7791,6 +7434,28 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows" +version = "0.61.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" +dependencies = [ + "windows-collections", + "windows-core 0.61.2", + "windows-future", + "windows-link", + "windows-numerics", +] + +[[package]] +name = "windows-collections" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" +dependencies = [ + "windows-core 0.61.2", +] + [[package]] name = "windows-core" version = "0.56.0" @@ -7803,18 +7468,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "windows-core" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" -dependencies = [ - "windows-implement 0.57.0", - "windows-interface 0.57.0", - "windows-result 0.1.2", - "windows-targets 0.52.6", -] - [[package]] name = "windows-core" version = "0.58.0" @@ -7830,15 +7483,26 @@ dependencies = [ [[package]] name = "windows-core" -version = "0.61.0" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" dependencies = [ "windows-implement 0.60.0", "windows-interface 0.59.1", "windows-link", - "windows-result 0.3.2", - "windows-strings 0.4.0", + "windows-result 0.3.4", + "windows-strings 0.4.2", +] + +[[package]] +name = "windows-future" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" +dependencies = [ + "windows-core 0.61.2", + "windows-link", + "windows-threading", ] [[package]] @@ -7849,18 +7513,7 @@ checksum = "f6fc35f58ecd95a9b71c4f2329b911016e6bec66b3f2e6a4aad86bd2e99e2f9b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", -] - -[[package]] -name = "windows-implement" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -7871,7 +7524,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -7882,7 +7535,7 @@ checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -7893,18 +7546,7 @@ checksum = "08990546bf4edef8f431fa6326e032865f27138718c587dc21bc0265bbcb57cc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", -] - -[[package]] -name = "windows-interface" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -7915,7 +7557,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -7926,24 +7568,23 @@ checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] name = "windows-link" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" [[package]] -name = "windows-registry" -version = "0.4.0" +name = "windows-numerics" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" +checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" dependencies = [ - "windows-result 0.3.2", - "windows-strings 0.3.1", - "windows-targets 0.53.0", + "windows-core 0.61.2", + "windows-link", ] [[package]] @@ -7966,9 +7607,9 @@ dependencies = [ [[package]] name = "windows-result" -version = "0.3.2" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ "windows-link", ] @@ -7985,18 +7626,9 @@ dependencies = [ [[package]] name = "windows-strings" -version = "0.3.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-strings" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" dependencies = [ "windows-link", ] @@ -8028,6 +7660,15 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.2", +] + [[package]] name = "windows-targets" version = "0.48.5" @@ -8061,9 +7702,9 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.53.0" +version = "0.53.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b" +checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef" dependencies = [ "windows_aarch64_gnullvm 0.53.0", "windows_aarch64_msvc 0.53.0", @@ -8075,6 +7716,15 @@ dependencies = [ "windows_x86_64_msvc 0.53.0", ] +[[package]] +name = "windows-threading" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" +dependencies = [ + "windows-link", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -8215,23 +7865,13 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "winnow" -version = "0.7.7" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cb8234a863ea0e8cd7284fcdd4f145233eb00fee02bbdd9861aec44e6477bc5" +checksum = "74c7b26e3480b707944fc872477815d29a8e429d2f93a1ce000f5fa84a15cbcd" dependencies = [ "memchr", ] -[[package]] -name = "winreg" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a27a759395c1195c4cc5cda607ef6f8f6498f64e78f7900f5de0a127a424704a" -dependencies = [ - "serde", - "winapi", -] - [[package]] name = "winreg" version = "0.50.0" @@ -8242,6 +7882,22 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "winreg" +version = "0.55.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb5a765337c50e9ec252c2069be9bf91c7df47afb103b642ba3a53bf8101be97" +dependencies = [ + "cfg-if", + "windows-sys 0.59.0", +] + +[[package]] +name = "winsafe" +version = "0.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" + [[package]] name = "wiremock" version = "0.6.3" @@ -8272,7 +7928,7 @@ version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", ] [[package]] @@ -8284,20 +7940,14 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] -[[package]] -name = "write16" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" - [[package]] name = "writeable" -version = "0.5.5" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" [[package]] name = "xattr" @@ -8306,7 +7956,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e" dependencies = [ "libc", - "rustix 1.0.5", + "rustix", ] [[package]] @@ -8335,15 +7985,6 @@ dependencies = [ "lzma-sys", ] -[[package]] -name = "yasna" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e17bb3549cc1321ae1296b9cdc2698e2b6cb1992adfa19a8c72e5b7a738f44cd" -dependencies = [ - "time", -] - [[package]] name = "yoke" version = "0.7.5" @@ -8352,7 +7993,19 @@ checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" dependencies = [ "serde", "stable_deref_trait", - "yoke-derive", + "yoke-derive 0.7.5", + "zerofrom", +] + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive 0.8.0", "zerofrom", ] @@ -8364,17 +8017,20 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", "synstructure", ] [[package]] -name = "zerocopy" -version = "0.7.35" +name = "yoke-derive" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ - "zerocopy-derive 0.7.35", + "proc-macro2", + "quote", + "syn 2.0.103", + "synstructure", ] [[package]] @@ -8383,18 +8039,7 @@ version = "0.8.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1702d9583232ddb9174e01bb7c15a2ab8fb1bc6f227aa1233858c351a3ba0cb" dependencies = [ - "zerocopy-derive 0.8.25", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", + "zerocopy-derive", ] [[package]] @@ -8405,7 +8050,7 @@ checksum = "28a6e20d751156648aa063f3800b706ee209a32c0b4d9f24be3d980b01be55ef" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", ] [[package]] @@ -8425,7 +8070,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.103", "synstructure", ] @@ -8436,38 +8081,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" [[package]] -name = "zerovec" -version = "0.10.4" +name = "zerotrie" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" dependencies = [ - "yoke", + "displaydoc", + "yoke 0.8.0", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428" +dependencies = [ + "yoke 0.8.0", "zerofrom", "zerovec-derive", ] [[package]] name = "zerovec-derive" -version = "0.10.3" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", -] - -[[package]] -name = "zip" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" -dependencies = [ - "byteorder", - "crc32fast", - "crossbeam-utils", - "flate2", - "time", + "syn 2.0.103", ] [[package]] @@ -8485,6 +8128,39 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "zip" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af7dcdb4229c0e79c2531a24de7726a0e980417a74fb4d030a35f535665439a0" +dependencies = [ + "arbitrary", + "crc32fast", + "flate2", + "indexmap", + "memchr", + "time", + "zopfli", +] + +[[package]] +name = "zlib-rs" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "626bd9fa9734751fc50d6060752170984d7053f5a39061f524cda68023d4db8a" + +[[package]] +name = "zopfli" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edfc5ee405f504cd4984ecc6f14d02d55cfda60fa4b689434ef4102aae150cd7" +dependencies = [ + "bumpalo", + "crc32fast", + "log", + "simd-adler32", +] + [[package]] name = "zstd" version = "0.13.3" diff --git a/Cargo.toml b/Cargo.toml index db0b9f79f..db5753893 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,8 +12,7 @@ members = [ "build/runner", "ftl", "pylib/rsbridge", - "qt/bundle/mac", - "qt/bundle/win", + "qt/launcher", "rslib", "rslib/i18n", "rslib/io", @@ -23,7 +22,6 @@ members = [ "rslib/sync", "tools/minilints", ] -exclude = ["qt/bundle"] resolver = "2" [workspace.dependencies.percent-encoding-iri] @@ -35,7 +33,7 @@ git = "https://github.com/ankitects/linkcheck.git" rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca" [workspace.dependencies.fsrs] -version = "4.0.0" +version = "4.1.1" # git = "https://github.com/open-spaced-repetition/fsrs-rs.git" # rev = "a7f7efc10f0a26b14ee348cc7402155685f2a24f" # path = "../open-spaced-repetition/fsrs-rs" @@ -54,99 +52,100 @@ ninja_gen = { "path" = "build/ninja_gen" } unicase = "=2.6.0" # any changes could invalidate sqlite indexes # normal -ammonia = "4.0.0" -anyhow = "1.0.90" -apple-bundles = "0.17.0" -async-compression = { version = "0.4.17", features = ["zstd", "tokio"] } +ammonia = "4.1.0" +anyhow = "1.0.98" +async-compression = { version = "0.4.24", features = ["zstd", "tokio"] } async-stream = "0.3.6" -async-trait = "0.1.83" -axum = { version = "0.7", features = ["multipart", "macros"] } -axum-client-ip = "0.6" -axum-extra = { version = "0.9.4", features = ["typed-header"] } -blake3 = "1.5.4" -bytes = "1.7.2" -camino = "1.1.9" -chrono = { version = "0.4.38", default-features = false, features = ["std", "clock"] } -clap = { version = "4.5.20", features = ["derive"] } -coarsetime = "0.1.34" -convert_case = "0.6.0" -criterion = { version = "0.5.1" } -csv = "1.3.0" -data-encoding = "2.6.0" +async-trait = "0.1.88" +axum = { version = "0.8.4", features = ["multipart", "macros"] } +axum-client-ip = "1.1.3" +axum-extra = { version = "0.10.1", features = ["typed-header"] } +bitflags = "2.9.1" +blake3 = "1.8.2" +bytes = "1.10.1" +camino = "1.1.10" +chrono = { version = "0.4.41", default-features = false, features = ["std", "clock"] } +clap = { version = "4.5.40", features = ["derive"] } +coarsetime = "0.1.36" +convert_case = "0.8.0" +criterion = { version = "0.6.0" } +csv = "1.3.1" +data-encoding = "2.9.0" difflib = "0.4.0" -dirs = "5.0.1" +dirs = "6.0.0" dunce = "1.0.5" +embed-resource = "3.0.4" envy = "0.4.2" -flate2 = "1.0.34" -fluent = "0.16.1" -fluent-bundle = "0.15.3" -fluent-syntax = "0.11.1" +flate2 = "1.1.2" +fluent = "0.17.0" +fluent-bundle = "0.16.0" +fluent-syntax = "0.12.0" fnv = "1.0.7" futures = "0.3.31" -glob = "0.3.1" -globset = "0.4.15" +globset = "0.4.16" hex = "0.4.3" htmlescape = "0.3.1" hyper = "1" id_tree = "1.8.0" inflections = "1.1.1" -intl-memoizer = "0.5.2" -itertools = "0.13.0" +intl-memoizer = "0.5.3" +itertools = "0.14.0" junction = "1.2.0" -lazy_static = "1.5.0" +libc = "0.2" +libc-stdhandle = "0.1" maplit = "1.0.2" -nom = "7.1.3" +nom = "8.0.0" num-format = "0.4.4" -num_cpus = "1.16.0" +num_cpus = "1.17.0" num_enum = "0.7.3" -once_cell = "1.20.2" +once_cell = "1.21.3" pbkdf2 = { version = "0.12", features = ["simple"] } -phf = { version = "0.11.2", features = ["macros"] } -pin-project = "1.1.6" -plist = "1.7.0" -prettyplease = "0.2.24" +phf = { version = "0.11.3", features = ["macros"] } +pin-project = "1.1.10" +prettyplease = "0.2.34" prost = "0.13" prost-build = "0.13" -prost-reflect = "0.14" +prost-reflect = "0.14.7" prost-types = "0.13" -pulldown-cmark = "0.9.6" -pyo3 = { version = "0.24", features = ["extension-module", "abi3", "abi3-py39"] } -rand = "0.8.5" -regex = "1.11.0" -reqwest = { version = "0.12.8", default-features = false, features = ["json", "socks", "stream", "multipart"] } -rusqlite = { version = "0.30.0", features = ["trace", "functions", "collation", "bundled"] } +pulldown-cmark = "0.13.0" +pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] } +rand = "0.9.1" +regex = "1.11.1" +reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] } +rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] } rustls-pemfile = "2.2.0" scopeguard = "1.2.0" -serde = { version = "1.0.210", features = ["derive"] } -serde-aux = "4.5.0" -serde_json = "1.0.132" -serde_repr = "0.1.19" -serde_tuple = "0.5.0" +serde = { version = "1.0.219", features = ["derive"] } +serde-aux = "4.7.0" +serde_json = "1.0.140" +serde_repr = "0.1.20" +serde_tuple = "1.1.0" sha1 = "0.10.6" -sha2 = { version = "0.10.8" } -simple-file-manifest = "0.11.0" +sha2 = { version = "0.10.9" } snafu = { version = "0.8.6", features = ["rust_1_61"] } -strum = { version = "0.26.3", features = ["derive"] } -syn = { version = "2.0.82", features = ["parsing", "printing"] } -tar = "0.4.42" -tempfile = "3.13.0" +strum = { version = "0.27.1", features = ["derive"] } +syn = { version = "2.0.103", features = ["parsing", "printing"] } +tar = "0.4.44" +tempfile = "3.20.0" termcolor = "1.4.1" -tokio = { version = "1.40", features = ["fs", "rt-multi-thread", "macros", "signal"] } -tokio-util = { version = "0.7.12", features = ["io"] } -tower-http = { version = "0.5", features = ["trace"] } -tracing = { version = "0.1.40", features = ["max_level_trace", "release_max_level_debug"] } +tokio = { version = "1.45", features = ["fs", "rt-multi-thread", "macros", "signal"] } +tokio-util = { version = "0.7.15", features = ["io"] } +tower-http = { version = "0.6.6", features = ["trace"] } +tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] } tracing-appender = "0.2.3" -tracing-subscriber = { version = "0.3.18", features = ["fmt", "env-filter"] } -tugger-windows-codesign = "0.10.0" -unic-langid = { version = "0.9.5", features = ["macros"] } +tracing-subscriber = { version = "0.3.19", features = ["fmt", "env-filter"] } +unic-langid = { version = "0.9.6", features = ["macros"] } unic-ucd-category = "0.9.0" unicode-normalization = "0.1.24" walkdir = "2.5.0" -which = "5.0.0" -wiremock = "0.6.2" +which = "8.0.0" +widestring = "1.1.0" +winapi = { version = "0.3", features = ["wincon", "winreg"] } +windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_Foundation", "Win32_UI_Shell"] } +wiremock = "0.6.3" xz2 = "0.1.7" -zip = { version = "0.6.6", default-features = false, features = ["deflate", "time"] } -zstd = { version = "0.13.2", features = ["zstdmt"] } +zip = { version = "4.1.0", default-features = false, features = ["deflate", "time"] } +zstd = { version = "0.13.3", features = ["zstdmt"] } # Apply mild optimizations to our dependencies in dev mode, which among other things # improves sha2 performance by about 21x. Opt 1 chosen due to diff --git a/LICENSE b/LICENSE index 033dc2a0a..456a7cfd6 100644 --- a/LICENSE +++ b/LICENSE @@ -6,8 +6,6 @@ The following included source code items use a license other than AGPL3: In the pylib folder: - * The SuperMemo importer: GPL3 and 0BSD. - * The Pauker importer: BSD-3. * statsbg.py: CC BY 4.0. In the qt folder: diff --git a/README.md b/README.md index 3bdcc2db3..04d5603a7 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Anki +# Anki® [![Build status](https://badge.buildkite.com/c9edf020a4aec976f9835e54751cc5409d843adbb66d043bd3.svg?branch=main)](https://buildkite.com/ankitects/anki-ci) diff --git a/build/configure/src/aqt.rs b/build/configure/src/aqt.rs index 5b2b8ec49..83be77e91 100644 --- a/build/configure/src/aqt.rs +++ b/build/configure/src/aqt.rs @@ -27,7 +27,6 @@ pub fn build_and_check_aqt(build: &mut Build) -> Result<()> { build_forms(build)?; build_generated_sources(build)?; build_data_folder(build)?; - build_macos_helper(build)?; build_wheel(build)?; check_python(build)?; Ok(()) @@ -39,7 +38,6 @@ fn build_forms(build: &mut Build) -> Result<()> { let mut py_files = vec![]; for path in ui_files.resolve() { let outpath = outdir.join(path.file_name().unwrap()).into_string(); - py_files.push(outpath.replace(".ui", "_qt5.py")); py_files.push(outpath.replace(".ui", "_qt6.py")); } build.add_action( @@ -337,47 +335,25 @@ impl BuildAction for BuildThemedIcon<'_> { } } -fn build_macos_helper(build: &mut Build) -> Result<()> { - if cfg!(target_os = "macos") { - build.add_action( - "qt:aqt:data:lib:libankihelper", - RunCommand { - command: ":pyenv:bin", - args: "$script $out $in", - inputs: hashmap! { - "script" => inputs!["qt/mac/helper_build.py"], - "in" => inputs![glob!["qt/mac/*.swift"]], - "" => inputs!["out/env"], - }, - outputs: hashmap! { - "out" => vec!["qt/_aqt/data/lib/libankihelper.dylib"], - }, - }, - )?; - } - Ok(()) -} - fn build_wheel(build: &mut Build) -> Result<()> { build.add_action( "wheels:aqt", BuildWheel { name: "aqt", version: anki_version(), - src_folder: "qt/aqt", - gen_folder: "$builddir/qt/_aqt", platform: None, - deps: inputs![":qt:aqt", glob!("qt/aqt/**"), "python/requirements.aqt.in"], + deps: inputs![ + ":qt:aqt", + glob!("qt/aqt/**"), + "qt/pyproject.toml", + "qt/hatch_build.py" + ], }, ) } fn check_python(build: &mut Build) -> Result<()> { - python_format( - build, - "qt", - inputs![glob!("qt/**/*.py", "qt/bundle/PyOxidizer/**")], - )?; + python_format(build, "qt", inputs![glob!("qt/**/*.py")])?; build.add_action( "check:pytest:aqt", diff --git a/build/configure/src/bundle.rs b/build/configure/src/bundle.rs deleted file mode 100644 index 50fe7414c..000000000 --- a/build/configure/src/bundle.rs +++ /dev/null @@ -1,442 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use std::env; - -use anyhow::Result; -use ninja_gen::action::BuildAction; -use ninja_gen::archives::download_and_extract; -use ninja_gen::archives::empty_manifest; -use ninja_gen::archives::with_exe; -use ninja_gen::archives::OnlineArchive; -use ninja_gen::archives::Platform; -use ninja_gen::build::BuildProfile; -use ninja_gen::cargo::CargoBuild; -use ninja_gen::cargo::RustOutput; -use ninja_gen::git::SyncSubmodule; -use ninja_gen::glob; -use ninja_gen::input::BuildInput; -use ninja_gen::inputs; -use ninja_gen::python::PythonEnvironment; -use ninja_gen::Build; -use ninja_gen::Utf8Path; - -use crate::anki_version; -use crate::platform::overriden_python_target_platform; -use crate::platform::overriden_rust_target_triple; - -#[derive(Debug, PartialEq, Eq)] -enum DistKind { - Standard, -} - -impl DistKind { - fn folder_name(&self) -> &'static str { - match self { - DistKind::Standard => "std", - } - } - - fn name(&self) -> &'static str { - match self { - DistKind::Standard => "standard", - } - } -} - -pub fn build_bundle(build: &mut Build) -> Result<()> { - // install into venv - setup_primary_venv(build)?; - install_anki_wheels(build)?; - - // bundle venv into output binary + extra_files - build_pyoxidizer(build)?; - build_artifacts(build)?; - build_binary(build)?; - - // package up outputs with Qt/other deps - download_dist_folder_deps(build)?; - build_dist_folder(build, DistKind::Standard)?; - - build_packages(build)?; - - Ok(()) -} - -fn targetting_macos_arm() -> bool { - cfg!(all(target_os = "macos", target_arch = "aarch64")) - && overriden_python_target_platform().is_none() -} - -const WIN_AUDIO: OnlineArchive = OnlineArchive { - url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/audio-win-amd64.tar.gz", - sha256: "0815a601baba05e03bc36b568cdc2332b1cf4aa17125fc33c69de125f8dd687f", -}; - -const MAC_ARM_AUDIO: OnlineArchive = OnlineArchive { - url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-05-26/audio-mac-arm64.tar.gz", - sha256: "f6c4af9be59ae1c82a16f5c6307f13cbf31b49ad7b69ce1cb6e0e7b403cfdb8f", -}; - -const MAC_AMD_AUDIO: OnlineArchive = OnlineArchive { - url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-05-26/audio-mac-amd64.tar.gz", - sha256: "ecbb3c878805cdd58b1a0b8e3fd8c753b8ce3ad36c8b5904a79111f9db29ff42", -}; - -const MAC_ARM_QT6: OnlineArchive = OnlineArchive { - url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2024-02-29/pyqt6.6-mac-arm64.tar.zst", - sha256: "9b2ade4ae9b80506689062845e83e8c60f7fa9843545bf7bb2d11d3e2f105878", -}; - -const MAC_AMD_QT6: OnlineArchive = OnlineArchive { - url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2024-02-29/pyqt6.6-mac-amd64.tar.zst", - sha256: "dbd0871e4da22820d1fa9ab29220d631467d1178038dcab4b15169ad7f499b1b", -}; - -const LINUX_QT_PLUGINS: OnlineArchive = OnlineArchive { - url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-02/qt-plugins-linux-amd64.tar.gz", - sha256: "66bb568aca7242bc55ad419bf5c96755ca15d2a743e1c3a09cba8b83230b138b", -}; - -const NSIS_PLUGINS: OnlineArchive = OnlineArchive { - url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-19/nsis.tar.zst", - sha256: "6133f730ece699de19714d0479c73bc848647d277e9cc80dda9b9ebe532b40a8", -}; - -fn download_dist_folder_deps(build: &mut Build) -> Result<()> { - let mut bundle_deps = vec![":wheels"]; - if cfg!(windows) { - download_and_extract(build, "win_amd64_audio", WIN_AUDIO, empty_manifest())?; - download_and_extract(build, "nsis_plugins", NSIS_PLUGINS, empty_manifest())?; - bundle_deps.extend([":extract:win_amd64_audio", ":extract:nsis_plugins"]); - } else if cfg!(target_os = "macos") { - if targetting_macos_arm() { - download_and_extract(build, "mac_arm_audio", MAC_ARM_AUDIO, empty_manifest())?; - download_and_extract(build, "mac_arm_qt6", MAC_ARM_QT6, empty_manifest())?; - bundle_deps.extend([":extract:mac_arm_audio", ":extract:mac_arm_qt6"]); - } else { - download_and_extract(build, "mac_amd_audio", MAC_AMD_AUDIO, empty_manifest())?; - download_and_extract(build, "mac_amd_qt6", MAC_AMD_QT6, empty_manifest())?; - bundle_deps.extend([":extract:mac_amd_audio", ":extract:mac_amd_qt6"]); - } - } else { - download_and_extract( - build, - "linux_qt_plugins", - LINUX_QT_PLUGINS, - empty_manifest(), - )?; - bundle_deps.extend([":extract:linux_qt_plugins"]); - } - build.add_dependency( - "bundle:deps", - inputs![bundle_deps - .iter() - .map(ToString::to_string) - .collect::>()], - ); - Ok(()) -} - -struct Venv { - label: &'static str, - path_without_builddir: &'static str, -} - -impl Venv { - fn label_as_target(&self, suffix: &str) -> String { - format!(":{}{suffix}", self.label) - } -} - -const PRIMARY_VENV: Venv = Venv { - label: "bundle:pyenv", - path_without_builddir: "bundle/pyenv", -}; - -fn setup_primary_venv(build: &mut Build) -> Result<()> { - let mut qt6_reqs = inputs![ - "python/requirements.bundle.txt", - "python/requirements.qt6_6.txt", - ]; - if cfg!(windows) { - qt6_reqs = inputs![qt6_reqs, "python/requirements.win.txt"]; - } - build.add_action( - PRIMARY_VENV.label, - PythonEnvironment { - folder: PRIMARY_VENV.path_without_builddir, - base_requirements_txt: "python/requirements.base.txt".into(), - requirements_txt: qt6_reqs, - extra_binary_exports: &[], - }, - )?; - Ok(()) -} - -struct InstallAnkiWheels { - venv: Venv, -} - -impl BuildAction for InstallAnkiWheels { - fn command(&self) -> &str { - "$pip install --force-reinstall --no-deps $in" - } - - fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) { - build.add_inputs("pip", inputs![self.venv.label_as_target(":pip")]); - build.add_inputs("in", inputs![":wheels"]); - build.add_output_stamp("bundle/wheels.stamp"); - } -} - -fn install_anki_wheels(build: &mut Build) -> Result<()> { - build.add_action( - "bundle:add_wheels:qt6", - InstallAnkiWheels { venv: PRIMARY_VENV }, - )?; - Ok(()) -} - -fn build_pyoxidizer(build: &mut Build) -> Result<()> { - let offline_build = env::var("OFFLINE_BUILD").is_ok(); - - build.add_action( - "bundle:pyoxidizer:repo", - SyncSubmodule { - path: "qt/bundle/PyOxidizer", - offline_build, - }, - )?; - let target = - overriden_rust_target_triple().unwrap_or_else(|| Platform::current().as_rust_triple()); - let output_bin = format!("bundle/rust/{target}/release/pyoxidizer",); - build.add_action( - "bundle:pyoxidizer:bin", - CargoBuild { - inputs: inputs![ - ":bundle:pyoxidizer:repo", - "out/env", - glob!["qt/bundle/PyOxidizer/**"] - ], - // can't use ::Binary() here, as we're in a separate workspace - outputs: &[RustOutput::Data("bin", &with_exe(&output_bin))], - target: Some(target), - extra_args: &format!( - "--manifest-path={} --target-dir={} -p pyoxidizer", - "qt/bundle/PyOxidizer/Cargo.toml", "$builddir/bundle/rust" - ), - release_override: Some(BuildProfile::Release), - }, - )?; - Ok(()) -} - -struct BuildArtifacts {} - -impl BuildAction for BuildArtifacts { - fn command(&self) -> &str { - "$runner build-artifacts $bundle_root $pyoxidizer_bin" - } - - fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) { - build.add_inputs("pyoxidizer_bin", inputs![":bundle:pyoxidizer:bin"]); - build.add_inputs("", inputs![PRIMARY_VENV.label_as_target("")]); - build.add_inputs("", inputs![":bundle:add_wheels:qt6", glob!["qt/bundle/**"]]); - build.add_variable("bundle_root", "$builddir/bundle"); - build.add_outputs_ext( - "pyo3_config", - vec!["bundle/artifacts/pyo3-build-config-file.txt"], - true, - ); - } - - fn check_output_timestamps(&self) -> bool { - true - } -} - -fn build_artifacts(build: &mut Build) -> Result<()> { - build.add_action("bundle:artifacts", BuildArtifacts {}) -} - -struct BuildBundle {} - -impl BuildAction for BuildBundle { - fn command(&self) -> &str { - "$runner build-bundle-binary" - } - - fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) { - build.add_inputs("", inputs![":bundle:artifacts", glob!["qt/bundle/**"]]); - build.add_outputs( - "", - vec![RustOutput::Binary("anki").path( - Utf8Path::new("$builddir/bundle/rust"), - Some( - overriden_rust_target_triple() - .unwrap_or_else(|| Platform::current().as_rust_triple()), - ), - // our pyoxidizer bin uses lto on the release profile - BuildProfile::Release, - )], - ); - } -} - -fn build_binary(build: &mut Build) -> Result<()> { - build.add_action("bundle:binary", BuildBundle {}) -} - -struct BuildDistFolder { - kind: DistKind, - deps: BuildInput, -} - -impl BuildAction for BuildDistFolder { - fn command(&self) -> &str { - "$runner build-dist-folder $kind $out_folder " - } - - fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) { - build.add_inputs("", &self.deps); - build.add_variable("kind", self.kind.name()); - let folder = match self.kind { - DistKind::Standard => "bundle/std", - }; - build.add_outputs("out_folder", vec![folder]); - build.add_outputs("stamp", vec![format!("{folder}.stamp")]); - } - - fn check_output_timestamps(&self) -> bool { - true - } -} - -fn build_dist_folder(build: &mut Build, kind: DistKind) -> Result<()> { - let deps = inputs![":bundle:deps", ":bundle:binary", glob!["qt/bundle/**"]]; - let group = match kind { - DistKind::Standard => "bundle:folder:std", - }; - build.add_action(group, BuildDistFolder { kind, deps }) -} - -fn build_packages(build: &mut Build) -> Result<()> { - if cfg!(windows) { - build_windows_installers(build) - } else if cfg!(target_os = "macos") { - build_mac_app(build, DistKind::Standard)?; - build_dmgs(build) - } else { - build_tarball(build, DistKind::Standard) - } -} - -struct BuildTarball { - kind: DistKind, -} - -impl BuildAction for BuildTarball { - fn command(&self) -> &str { - "chmod -R a+r $folder && tar -I '$zstd' --transform $transform -cf $tarball -C $folder ." - } - - fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) { - let input_folder_name = self.kind.folder_name(); - let input_folder_target = format!(":bundle:folder:{input_folder_name}"); - let input_folder_path = format!("$builddir/bundle/{input_folder_name}"); - - let version = anki_version(); - let qt = match self.kind { - DistKind::Standard => "qt6", - }; - let output_folder_base = format!("anki-{version}-linux-{qt}"); - let output_tarball = format!("bundle/package/{output_folder_base}.tar.zst"); - - build.add_inputs("", inputs![input_folder_target]); - build.add_variable("zstd", "zstd -c --long -T0 -18"); - build.add_variable("transform", format!("s%^.%{output_folder_base}%S")); - build.add_variable("folder", input_folder_path); - build.add_outputs("tarball", vec![output_tarball]); - } -} - -fn build_tarball(build: &mut Build, kind: DistKind) -> Result<()> { - let name = kind.folder_name(); - build.add_action(format!("bundle:package:{name}"), BuildTarball { kind }) -} - -struct BuildWindowsInstallers {} - -impl BuildAction for BuildWindowsInstallers { - fn command(&self) -> &str { - "cargo run -p makeexe --target-dir=out/rust -- $version $src_root $bundle_root $out" - } - - fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) { - let version = anki_version(); - let outputs = ["qt6"].iter().map(|qt| { - let output_base = format!("anki-{version}-windows-{qt}"); - format!("bundle/package/{output_base}.exe") - }); - - build.add_inputs("", inputs![":bundle:folder:std"]); - build.add_variable("version", &version); - build.add_variable("bundle_root", "$builddir/bundle"); - build.add_outputs("out", outputs); - } -} - -fn build_windows_installers(build: &mut Build) -> Result<()> { - build.add_action("bundle:package", BuildWindowsInstallers {}) -} - -struct BuildMacApp { - kind: DistKind, -} - -impl BuildAction for BuildMacApp { - fn command(&self) -> &str { - "cargo run -p makeapp --target-dir=out/rust -- build-app $version $kind $stamp" - } - - fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) { - let folder_name = self.kind.folder_name(); - build.add_inputs("", inputs![format!(":bundle:folder:{folder_name}")]); - build.add_variable("version", anki_version()); - build.add_variable("kind", self.kind.name()); - build.add_outputs("stamp", vec![format!("bundle/app/{folder_name}.stamp")]); - } -} - -fn build_mac_app(build: &mut Build, kind: DistKind) -> Result<()> { - build.add_action(format!("bundle:app:{}", kind.name()), BuildMacApp { kind }) -} - -struct BuildDmgs {} - -impl BuildAction for BuildDmgs { - fn command(&self) -> &str { - "cargo run -p makeapp --target-dir=out/rust -- build-dmgs $dmgs" - } - - fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) { - let version = anki_version(); - let platform = if targetting_macos_arm() { - "apple" - } else { - "intel" - }; - let qt = &["qt6"][..]; - let dmgs = qt - .iter() - .map(|qt| format!("bundle/dmg/anki-{version}-mac-{platform}-{qt}.dmg")); - - build.add_inputs("", inputs![":bundle:app"]); - build.add_outputs("dmgs", dmgs); - } -} - -fn build_dmgs(build: &mut Build) -> Result<()> { - build.add_action("bundle:dmg", BuildDmgs {}) -} diff --git a/build/configure/src/launcher.rs b/build/configure/src/launcher.rs new file mode 100644 index 000000000..4a1927289 --- /dev/null +++ b/build/configure/src/launcher.rs @@ -0,0 +1,44 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use anyhow::Result; +use ninja_gen::archives::download_and_extract; +use ninja_gen::archives::empty_manifest; +use ninja_gen::archives::OnlineArchive; +use ninja_gen::command::RunCommand; +use ninja_gen::hashmap; +use ninja_gen::inputs; +use ninja_gen::Build; + +pub fn setup_uv_universal(build: &mut Build) -> Result<()> { + if !cfg!(target_arch = "aarch64") { + return Ok(()); + } + + build.add_action( + "launcher:uv_universal", + RunCommand { + command: "/usr/bin/lipo", + args: "-create -output $out $arm_bin $x86_bin", + inputs: hashmap! { + "arm_bin" => inputs![":extract:uv:bin"], + "x86_bin" => inputs![":extract:uv_mac_x86:bin"], + }, + outputs: hashmap! { + "out" => vec!["launcher/uv"], + }, + }, + ) +} + +pub fn build_launcher(build: &mut Build) -> Result<()> { + setup_uv_universal(build)?; + download_and_extract(build, "nsis_plugins", NSIS_PLUGINS, empty_manifest())?; + + Ok(()) +} + +const NSIS_PLUGINS: OnlineArchive = OnlineArchive { + url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-19/nsis.tar.zst", + sha256: "6133f730ece699de19714d0479c73bc848647d277e9cc80dda9b9ebe532b40a8", +}; diff --git a/build/configure/src/main.rs b/build/configure/src/main.rs index f88a32155..afd1cfb4a 100644 --- a/build/configure/src/main.rs +++ b/build/configure/src/main.rs @@ -2,7 +2,7 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html mod aqt; -mod bundle; +mod launcher; mod platform; mod pylib; mod python; @@ -13,13 +13,14 @@ use std::env; use anyhow::Result; use aqt::build_and_check_aqt; -use bundle::build_bundle; +use launcher::build_launcher; use ninja_gen::glob; use ninja_gen::inputs; use ninja_gen::protobuf::check_proto; use ninja_gen::protobuf::setup_protoc; -use ninja_gen::python::setup_python; +use ninja_gen::python::setup_uv; use ninja_gen::Build; +use platform::overriden_python_venv_platform; use pylib::build_pylib; use pylib::check_pylib; use python::check_python; @@ -47,7 +48,10 @@ fn main() -> Result<()> { check_proto(build, inputs![glob!["proto/**/*.proto"]])?; if env::var("OFFLINE_BUILD").is_err() { - setup_python(build)?; + setup_uv( + build, + overriden_python_venv_platform().unwrap_or(build.host_platform), + )?; } setup_venv(build)?; @@ -57,7 +61,7 @@ fn main() -> Result<()> { build_and_check_aqt(build)?; if env::var("OFFLINE_BUILD").is_err() { - build_bundle(build)?; + build_launcher(build)?; } setup_sphinx(build)?; diff --git a/build/configure/src/platform.rs b/build/configure/src/platform.rs index 4aec36e65..ce8a7a5ba 100644 --- a/build/configure/src/platform.rs +++ b/build/configure/src/platform.rs @@ -5,18 +5,30 @@ use std::env; use ninja_gen::archives::Platform; -/// Usually None to use the host architecture; can be overriden by setting -/// MAC_X86 to build for x86_64 on Apple Silicon +/// Please see [`overriden_python_target_platform()`] for details. pub fn overriden_rust_target_triple() -> Option<&'static str> { - overriden_python_target_platform().map(|p| p.as_rust_triple()) + overriden_python_wheel_platform().map(|p| p.as_rust_triple()) } -/// Usually None to use the host architecture; can be overriden by setting -/// MAC_X86 to build for x86_64 on Apple Silicon -pub fn overriden_python_target_platform() -> Option { - if env::var("MAC_X86").is_ok() { - Some(Platform::MacX64) +/// Usually None to use the host architecture, except on Windows which +/// always uses x86_64, since WebEngine is unavailable for ARM64. +pub fn overriden_python_venv_platform() -> Option { + if cfg!(target_os = "windows") { + Some(Platform::WindowsX64) } else { None } } + +/// Like [`overriden_python_venv_platform`], but: +/// If MAC_X86 is set, an X86 wheel will be built on macOS ARM. +/// If LIN_ARM64 is set, an ARM64 wheel will be built on Linux AMD64. +pub fn overriden_python_wheel_platform() -> Option { + if env::var("MAC_X86").is_ok() { + Some(Platform::MacX64) + } else if env::var("LIN_ARM64").is_ok() { + Some(Platform::LinuxArm) + } else { + overriden_python_venv_platform() + } +} diff --git a/build/configure/src/pylib.rs b/build/configure/src/pylib.rs index 7d269cbd2..21820ae8b 100644 --- a/build/configure/src/pylib.rs +++ b/build/configure/src/pylib.rs @@ -14,7 +14,7 @@ use ninja_gen::python::PythonTest; use ninja_gen::Build; use crate::anki_version; -use crate::platform::overriden_python_target_platform; +use crate::platform::overriden_python_wheel_platform; use crate::python::BuildWheel; use crate::python::GenPythonProto; @@ -50,7 +50,7 @@ pub fn build_pylib(build: &mut Build) -> Result<()> { output: &format!( "pylib/anki/_rsbridge.{}", match build.host_platform { - Platform::WindowsX64 => "pyd", + Platform::WindowsX64 | Platform::WindowsArm => "pyd", _ => "so", } ), @@ -64,13 +64,12 @@ pub fn build_pylib(build: &mut Build) -> Result<()> { BuildWheel { name: "anki", version: anki_version(), - src_folder: "pylib/anki", - gen_folder: "$builddir/pylib/anki", - platform: overriden_python_target_platform().or(Some(build.host_platform)), + platform: overriden_python_wheel_platform().or(Some(build.host_platform)), deps: inputs![ ":pylib:anki", glob!("pylib/anki/**"), - "python/requirements.anki.in", + "pylib/pyproject.toml", + "pylib/hatch_build.py" ], }, )?; diff --git a/build/configure/src/python.rs b/build/configure/src/python.rs index 17dddde16..9d5e9057e 100644 --- a/build/configure/src/python.rs +++ b/build/configure/src/python.rs @@ -7,87 +7,69 @@ use anyhow::Result; use ninja_gen::action::BuildAction; use ninja_gen::archives::Platform; use ninja_gen::build::FilesHandle; -use ninja_gen::command::RunCommand; use ninja_gen::copy::CopyFiles; use ninja_gen::glob; -use ninja_gen::hashmap; use ninja_gen::input::BuildInput; use ninja_gen::inputs; use ninja_gen::python::python_format; use ninja_gen::python::PythonEnvironment; -use ninja_gen::python::PythonLint; use ninja_gen::python::PythonTypecheck; -use ninja_gen::rsync::RsyncFiles; +use ninja_gen::python::RuffCheck; use ninja_gen::Build; -// When updating Qt, make sure to update the .txt file in bundle.rs as well. +/// Normalize version string by removing leading zeros from numeric parts +/// while preserving pre-release markers (b1, rc2, a3, etc.) +fn normalize_version(version: &str) -> String { + version + .split('.') + .map(|part| { + // Check if the part contains only digits + if part.chars().all(|c| c.is_ascii_digit()) { + // Numeric part: remove leading zeros + part.parse::().unwrap_or(0).to_string() + } else { + // Mixed part (contains both numbers and pre-release markers) + // Split on first non-digit character and normalize the numeric prefix + let chars = part.chars(); + let mut numeric_prefix = String::new(); + let mut rest = String::new(); + let mut found_non_digit = false; + + for ch in chars { + if ch.is_ascii_digit() && !found_non_digit { + numeric_prefix.push(ch); + } else { + found_non_digit = true; + rest.push(ch); + } + } + + if numeric_prefix.is_empty() { + part.to_string() + } else { + let normalized_prefix = numeric_prefix.parse::().unwrap_or(0).to_string(); + format!("{normalized_prefix}{rest}") + } + } + }) + .collect::>() + .join(".") +} + pub fn setup_venv(build: &mut Build) -> Result<()> { - let platform_deps = if cfg!(windows) { - inputs![ - "python/requirements.qt6_6.txt", - "python/requirements.win.txt", - ] - } else if cfg!(target_os = "macos") { - inputs!["python/requirements.qt6_6.txt",] - } else if std::env::var("PYTHONPATH").is_ok() { - // assume we have a system-provided Qt - inputs![] - } else if cfg!(target_arch = "aarch64") { - inputs!["python/requirements.qt6_8.txt"] - } else { - inputs!["python/requirements.qt6_6.txt"] - }; - let requirements_txt = inputs!["python/requirements.dev.txt", platform_deps]; + let extra_binary_exports = &["mypy", "ruff", "pytest", "protoc-gen-mypy"]; build.add_action( "pyenv", PythonEnvironment { - folder: "pyenv", - base_requirements_txt: inputs!["python/requirements.base.txt"], - requirements_txt, - extra_binary_exports: &[ - "pip-compile", - "pip-sync", - "mypy", - "black", // Required for offline build - "isort", - "pylint", - "pytest", - "protoc-gen-mypy", // ditto + venv_folder: "pyenv", + deps: inputs![ + "pyproject.toml", + "pylib/pyproject.toml", + "qt/pyproject.toml", + "uv.lock" ], - }, - )?; - - // optional venvs for testing other Qt versions - let mut venv_reqs = inputs!["python/requirements.bundle.txt"]; - if cfg!(windows) { - venv_reqs = inputs![venv_reqs, "python/requirements.win.txt"]; - } - - build.add_action( - "pyenv-qt6.8", - PythonEnvironment { - folder: "pyenv-qt6.8", - base_requirements_txt: inputs!["python/requirements.base.txt"], - requirements_txt: inputs![&venv_reqs, "python/requirements.qt6_8.txt"], - extra_binary_exports: &[], - }, - )?; - build.add_action( - "pyenv-qt5.15", - PythonEnvironment { - folder: "pyenv-qt5.15", - base_requirements_txt: inputs!["python/requirements.base.txt"], - requirements_txt: inputs![&venv_reqs, "python/requirements.qt5_15.txt"], - extra_binary_exports: &[], - }, - )?; - build.add_action( - "pyenv-qt5.14", - PythonEnvironment { - folder: "pyenv-qt5.14", - base_requirements_txt: inputs!["python/requirements.base.txt"], - requirements_txt: inputs![venv_reqs, "python/requirements.qt5_14.txt"], - extra_binary_exports: &[], + extra_args: "--all-packages --extra qt --extra audio", + extra_binary_exports, }, )?; @@ -133,45 +115,59 @@ impl BuildAction for GenPythonProto { pub struct BuildWheel { pub name: &'static str, pub version: String, - pub src_folder: &'static str, - pub gen_folder: &'static str, pub platform: Option, pub deps: BuildInput, } impl BuildAction for BuildWheel { fn command(&self) -> &str { - "$pyenv_bin $script $src $gen $out" + "$uv build --wheel --out-dir=$out_dir --project=$project_dir" } fn files(&mut self, build: &mut impl FilesHandle) { - build.add_inputs("pyenv_bin", inputs![":pyenv:bin"]); - build.add_inputs("script", inputs!["python/write_wheel.py"]); + build.add_inputs("uv", inputs![":uv_binary"]); build.add_inputs("", &self.deps); - build.add_variable("src", self.src_folder); - build.add_variable("gen", self.gen_folder); + // Set the project directory based on which package we're building + let project_dir = if self.name == "anki" { "pylib" } else { "qt" }; + build.add_variable("project_dir", project_dir); + + // Set environment variable for uv to use our pyenv + build.add_variable("pyenv_path", "$builddir/pyenv"); + build.add_env_var("UV_PROJECT_ENVIRONMENT", "$pyenv_path"); + + // Set output directory + build.add_variable("out_dir", "$builddir/wheels/"); + + // Calculate the wheel filename that uv will generate let tag = if let Some(platform) = self.platform { - let platform = match platform { - Platform::LinuxX64 => "manylinux_2_35_x86_64", - Platform::LinuxArm => "manylinux_2_35_aarch64", + let platform_tag = match platform { + Platform::LinuxX64 => "manylinux_2_36_x86_64", + Platform::LinuxArm => "manylinux_2_36_aarch64", Platform::MacX64 => "macosx_12_0_x86_64", Platform::MacArm => "macosx_12_0_arm64", Platform::WindowsX64 => "win_amd64", + Platform::WindowsArm => "win_arm64", }; - format!("cp39-abi3-{platform}") + format!("cp39-abi3-{platform_tag}") } else { "py3-none-any".into() }; + + // Set environment variable for hatch_build.py to use the correct platform tag + build.add_variable("wheel_tag", &tag); + build.add_env_var("ANKI_WHEEL_TAG", "$wheel_tag"); + let name = self.name; - let version = &self.version; - let wheel_path = format!("wheels/{name}-{version}-{tag}.whl"); + + let normalized_version = normalize_version(&self.version); + + let wheel_path = format!("wheels/{name}-{normalized_version}-{tag}.whl"); build.add_outputs("out", vec![wheel_path]); } } pub fn check_python(build: &mut Build) -> Result<()> { - python_format(build, "ftl", inputs![glob!("ftl/**/*.py")])?; python_format(build, "tools", inputs![glob!("tools/**/*.py")])?; build.add_action( @@ -183,7 +179,6 @@ pub fn check_python(build: &mut Build) -> Result<()> { "qt/tools", "out/pylib/anki", "out/qt/_aqt", - "ftl", "python", "tools", ], @@ -195,60 +190,26 @@ pub fn check_python(build: &mut Build) -> Result<()> { }, )?; - add_pylint(build)?; - - Ok(()) -} - -fn add_pylint(build: &mut Build) -> Result<()> { - // pylint does not support PEP420 implicit namespaces split across import paths, - // so we need to merge our pylib sources and generated files before invoking it, - // and add a top-level __init__.py + let ruff_folders = &["qt/aqt", "ftl", "pylib/tools", "tools", "python"]; + let ruff_deps = inputs![ + glob!["{pylib,ftl,qt,python,tools}/**/*.py"], + ":pylib:anki", + ":qt:aqt" + ]; build.add_action( - "check:pylint:copy_pylib", - RsyncFiles { - inputs: inputs![":pylib:anki"], - target_folder: "pylint/anki", - strip_prefix: "$builddir/pylib/anki", - // avoid copying our large rsbridge binary - extra_args: "--links", + "check:ruff", + RuffCheck { + folders: ruff_folders, + deps: ruff_deps.clone(), + check_only: true, }, )?; build.add_action( - "check:pylint:copy_pylib", - RsyncFiles { - inputs: inputs![glob!["pylib/anki/**"]], - target_folder: "pylint/anki", - strip_prefix: "pylib/anki", - extra_args: "", - }, - )?; - build.add_action( - "check:pylint:copy_pylib", - RunCommand { - command: ":pyenv:bin", - args: "$script $out", - inputs: hashmap! { "script" => inputs!["python/mkempty.py"] }, - outputs: hashmap! { "out" => vec!["pylint/anki/__init__.py"] }, - }, - )?; - build.add_action( - "check:pylint", - PythonLint { - folders: &[ - "$builddir/pylint/anki", - "qt/aqt", - "ftl", - "pylib/tools", - "tools", - "python", - ], - pylint_ini: inputs![".pylintrc"], - deps: inputs![ - ":check:pylint:copy_pylib", - ":qt:aqt", - glob!("{pylib/tools,ftl,qt,python,tools}/**/*.py") - ], + "fix:ruff", + RuffCheck { + folders: ruff_folders, + deps: ruff_deps, + check_only: false, }, )?; @@ -262,8 +223,7 @@ struct Sphinx { impl BuildAction for Sphinx { fn command(&self) -> &str { if env::var("OFFLINE_BUILD").is_err() { - "$pip install sphinx sphinx_rtd_theme sphinx-autoapi \ - && $python python/sphinx/build.py" + "$uv sync --extra sphinx && $python python/sphinx/build.py" } else { "$python python/sphinx/build.py" } @@ -271,7 +231,10 @@ impl BuildAction for Sphinx { fn files(&mut self, build: &mut impl FilesHandle) { if env::var("OFFLINE_BUILD").is_err() { - build.add_inputs("pip", inputs![":pyenv:pip"]); + build.add_inputs("uv", inputs![":uv_binary"]); + // Set environment variable to use the existing pyenv + build.add_variable("pyenv_path", "$builddir/pyenv"); + build.add_env_var("UV_PROJECT_ENVIRONMENT", "$pyenv_path"); } build.add_inputs("python", inputs![":pyenv:bin"]); build.add_inputs("", &self.deps); @@ -294,8 +257,35 @@ pub(crate) fn setup_sphinx(build: &mut Build) -> Result<()> { build.add_action( "python:sphinx", Sphinx { - deps: inputs![":pylib", ":qt", ":python:sphinx:copy_conf"], + deps: inputs![ + ":pylib", + ":qt", + ":python:sphinx:copy_conf", + "pyproject.toml" + ], }, )?; Ok(()) } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_normalize_version_basic() { + assert_eq!(normalize_version("1.2.3"), "1.2.3"); + assert_eq!(normalize_version("01.02.03"), "1.2.3"); + assert_eq!(normalize_version("1.0.0"), "1.0.0"); + } + + #[test] + fn test_normalize_version_with_prerelease() { + assert_eq!(normalize_version("1.2.3b1"), "1.2.3b1"); + assert_eq!(normalize_version("01.02.03b1"), "1.2.3b1"); + assert_eq!(normalize_version("1.0.0rc2"), "1.0.0rc2"); + assert_eq!(normalize_version("2.1.0a3"), "2.1.0a3"); + assert_eq!(normalize_version("1.2.3beta1"), "1.2.3beta1"); + assert_eq!(normalize_version("1.2.3alpha1"), "1.2.3alpha1"); + } +} diff --git a/build/configure/src/rust.rs b/build/configure/src/rust.rs index f5da67086..758752fa6 100644 --- a/build/configure/src/rust.rs +++ b/build/configure/src/rust.rs @@ -154,7 +154,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> { "$builddir/buildhash", // building on Windows requires python3.lib if cfg!(windows) { - inputs![":extract:python"] + inputs![":pyenv:bin"] } else { inputs![] } @@ -169,7 +169,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> { pub fn check_rust(build: &mut Build) -> Result<()> { let inputs = inputs![ - glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,tools/workspace-hack/**}"), + glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**}"), "Cargo.lock", "Cargo.toml", "rust-toolchain.toml", @@ -247,7 +247,7 @@ pub fn check_minilints(build: &mut Build) -> Result<()> { let files = inputs![ glob![ "**/*.{py,rs,ts,svelte,mjs,md}", - "{node_modules,qt/bundle/PyOxidizer,ts/.svelte-kit}/**" + "{node_modules,ts/.svelte-kit}/**" ], "Cargo.lock" ]; diff --git a/build/ninja_gen/Cargo.toml b/build/ninja_gen/Cargo.toml index 7757116c6..5e5a4f736 100644 --- a/build/ninja_gen/Cargo.toml +++ b/build/ninja_gen/Cargo.toml @@ -16,5 +16,26 @@ globset.workspace = true itertools.workspace = true maplit.workspace = true num_cpus.workspace = true +regex.workspace = true +serde_json.workspace = true +sha2.workspace = true walkdir.workspace = true which.workspace = true + +[target.'cfg(windows)'.dependencies] +reqwest = { workspace = true, features = ["blocking", "json", "native-tls"] } + +[target.'cfg(not(windows))'.dependencies] +reqwest = { workspace = true, features = ["blocking", "json", "rustls-tls"] } + +[[bin]] +name = "update_uv" +path = "src/bin/update_uv.rs" + +[[bin]] +name = "update_protoc" +path = "src/bin/update_protoc.rs" + +[[bin]] +name = "update_node" +path = "src/bin/update_node.rs" diff --git a/build/ninja_gen/src/archives.rs b/build/ninja_gen/src/archives.rs index 9dd784bdd..3f87d3ff5 100644 --- a/build/ninja_gen/src/archives.rs +++ b/build/ninja_gen/src/archives.rs @@ -26,22 +26,21 @@ pub enum Platform { MacX64, MacArm, WindowsX64, + WindowsArm, } impl Platform { pub fn current() -> Self { - if cfg!(windows) { - Self::WindowsX64 - } else { - let os = std::env::consts::OS; - let arch = std::env::consts::ARCH; - match (os, arch) { - ("linux", "x86_64") => Self::LinuxX64, - ("linux", "aarch64") => Self::LinuxArm, - ("macos", "x86_64") => Self::MacX64, - ("macos", "aarch64") => Self::MacArm, - _ => panic!("unsupported os/arch {os} {arch} - PR welcome!"), - } + let os = std::env::consts::OS; + let arch = std::env::consts::ARCH; + match (os, arch) { + ("linux", "x86_64") => Self::LinuxX64, + ("linux", "aarch64") => Self::LinuxArm, + ("macos", "x86_64") => Self::MacX64, + ("macos", "aarch64") => Self::MacArm, + ("windows", "x86_64") => Self::WindowsX64, + ("windows", "aarch64") => Self::WindowsArm, + _ => panic!("unsupported os/arch {os} {arch} - PR welcome!"), } } @@ -62,6 +61,7 @@ impl Platform { Platform::MacX64 => "x86_64-apple-darwin", Platform::MacArm => "aarch64-apple-darwin", Platform::WindowsX64 => "x86_64-pc-windows-msvc", + Platform::WindowsArm => "aarch64-pc-windows-msvc", } } } diff --git a/build/ninja_gen/src/bin/update_node.rs b/build/ninja_gen/src/bin/update_node.rs new file mode 100644 index 000000000..32dbf6d4a --- /dev/null +++ b/build/ninja_gen/src/bin/update_node.rs @@ -0,0 +1,268 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use std::error::Error; +use std::fs; +use std::path::Path; + +use regex::Regex; +use reqwest::blocking::Client; +use serde_json::Value; + +#[derive(Debug)] +struct NodeRelease { + version: String, + files: Vec, +} + +#[derive(Debug)] +struct NodeFile { + filename: String, + url: String, +} + +fn main() -> Result<(), Box> { + let release_info = fetch_node_release_info()?; + let new_text = generate_node_archive_function(&release_info)?; + update_node_text(&new_text)?; + println!("Node.js archive function updated successfully!"); + Ok(()) +} + +fn fetch_node_release_info() -> Result> { + let client = Client::new(); + + // Get the Node.js release info + let response = client + .get("https://nodejs.org/dist/index.json") + .header("User-Agent", "anki-build-updater") + .send()?; + + let releases: Vec = response.json()?; + + // Find the latest LTS release + let latest = releases + .iter() + .find(|release| { + // LTS releases have a non-false "lts" field + release["lts"].as_str().is_some() && release["lts"] != false + }) + .ok_or("No LTS releases found")?; + + let version = latest["version"] + .as_str() + .ok_or("Version not found")? + .to_string(); + + let files = latest["files"] + .as_array() + .ok_or("Files array not found")? + .iter() + .map(|f| f.as_str().unwrap_or("")) + .collect::>(); + + let lts_name = latest["lts"].as_str().unwrap_or("unknown"); + println!("Found Node.js LTS version: {version} ({lts_name})"); + + // Map platforms to their expected file keys and full filenames + let platform_mapping = vec![ + ( + "linux-x64", + "linux-x64", + format!("node-{version}-linux-x64.tar.xz"), + ), + ( + "linux-arm64", + "linux-arm64", + format!("node-{version}-linux-arm64.tar.xz"), + ), + ( + "darwin-x64", + "osx-x64-tar", + format!("node-{version}-darwin-x64.tar.xz"), + ), + ( + "darwin-arm64", + "osx-arm64-tar", + format!("node-{version}-darwin-arm64.tar.xz"), + ), + ( + "win-x64", + "win-x64-zip", + format!("node-{version}-win-x64.zip"), + ), + ( + "win-arm64", + "win-arm64-zip", + format!("node-{version}-win-arm64.zip"), + ), + ]; + + let mut node_files = Vec::new(); + + for (platform, file_key, filename) in platform_mapping { + // Check if this file exists in the release + if files.contains(&file_key) { + let url = format!("https://nodejs.org/dist/{version}/{filename}"); + node_files.push(NodeFile { + filename: filename.clone(), + url, + }); + println!("Found file for {platform}: {filename} (key: {file_key})"); + } else { + return Err( + format!("File not found for {platform} (key: {file_key}): {filename}").into(), + ); + } + } + + Ok(NodeRelease { + version, + files: node_files, + }) +} + +fn generate_node_archive_function(release: &NodeRelease) -> Result> { + let client = Client::new(); + + // Fetch the SHASUMS256.txt file once + println!("Fetching SHA256 checksums..."); + let shasums_url = format!("https://nodejs.org/dist/{}/SHASUMS256.txt", release.version); + let shasums_response = client + .get(&shasums_url) + .header("User-Agent", "anki-build-updater") + .send()?; + let shasums_text = shasums_response.text()?; + + // Create a mapping from filename patterns to platform names - using the exact + // patterns we stored in files + let platform_mapping = vec![ + ("linux-x64.tar.xz", "LinuxX64"), + ("linux-arm64.tar.xz", "LinuxArm"), + ("darwin-x64.tar.xz", "MacX64"), + ("darwin-arm64.tar.xz", "MacArm"), + ("win-x64.zip", "WindowsX64"), + ("win-arm64.zip", "WindowsArm"), + ]; + + let mut platform_blocks = Vec::new(); + + for (file_pattern, platform_name) in platform_mapping { + // Find the file that ends with this pattern + if let Some(file) = release + .files + .iter() + .find(|f| f.filename.ends_with(file_pattern)) + { + // Find the SHA256 for this file + let sha256 = shasums_text + .lines() + .find(|line| line.contains(&file.filename)) + .and_then(|line| line.split_whitespace().next()) + .ok_or_else(|| format!("SHA256 not found for {}", file.filename))?; + + println!( + "Found SHA256 for {}: {} => {}", + platform_name, file.filename, sha256 + ); + + let block = format!( + " Platform::{} => OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }},", + platform_name, file.url, sha256 + ); + platform_blocks.push(block); + } else { + return Err(format!( + "File not found for platform {platform_name}: no file ending with {file_pattern}" + ) + .into()); + } + } + + let function = format!( + "pub fn node_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}", + platform_blocks.join("\n") + ); + + Ok(function) +} + +fn update_node_text(new_function: &str) -> Result<(), Box> { + let node_rs_content = read_node_rs()?; + + // Regex to match the entire node_archive function with proper multiline + // matching + let re = Regex::new( + r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}", + )?; + + let updated_content = re.replace(&node_rs_content, new_function); + + write_node_rs(&updated_content)?; + Ok(()) +} + +fn read_node_rs() -> Result> { + // Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs + let manifest_dir = + std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?; + let path = Path::new(&manifest_dir).join("src").join("node.rs"); + Ok(fs::read_to_string(path)?) +} + +fn write_node_rs(content: &str) -> Result<(), Box> { + // Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs + let manifest_dir = + std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?; + let path = Path::new(&manifest_dir).join("src").join("node.rs"); + fs::write(path, content)?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_regex_replacement() { + let sample_content = r#"Some other code +pub fn node_archive(platform: Platform) -> OnlineArchive { + match platform { + Platform::LinuxX64 => OnlineArchive { + url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz", + sha256: "old_hash", + }, + Platform::MacX64 => OnlineArchive { + url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz", + sha256: "old_hash", + }, + } +} + +More code here"#; + + let new_function = r#"pub fn node_archive(platform: Platform) -> OnlineArchive { + match platform { + Platform::LinuxX64 => OnlineArchive { + url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-linux-x64.tar.xz", + sha256: "new_hash", + }, + Platform::MacX64 => OnlineArchive { + url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-darwin-x64.tar.xz", + sha256: "new_hash", + }, + } +}"#; + + let re = Regex::new( + r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}" + ).unwrap(); + + let result = re.replace(sample_content, new_function); + assert!(result.contains("v21.0.0")); + assert!(result.contains("new_hash")); + assert!(!result.contains("old_hash")); + assert!(result.contains("Some other code")); + assert!(result.contains("More code here")); + } +} diff --git a/build/ninja_gen/src/bin/update_protoc.rs b/build/ninja_gen/src/bin/update_protoc.rs new file mode 100644 index 000000000..3a8f06b8b --- /dev/null +++ b/build/ninja_gen/src/bin/update_protoc.rs @@ -0,0 +1,125 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use std::error::Error; +use std::fs; +use std::path::Path; + +use regex::Regex; +use reqwest::blocking::Client; +use serde_json::Value; +use sha2::Digest; +use sha2::Sha256; + +fn fetch_protoc_release_info() -> Result> { + let client = Client::new(); + + println!("Fetching latest protoc release info from GitHub..."); + // Fetch latest release info + let response = client + .get("https://api.github.com/repos/protocolbuffers/protobuf/releases/latest") + .header("User-Agent", "Anki-Build-Script") + .send()?; + + let release_info: Value = response.json()?; + let assets = release_info["assets"] + .as_array() + .expect("assets should be an array"); + + // Map platform names to their corresponding asset patterns + let platform_patterns = [ + ("LinuxX64", "linux-x86_64"), + ("LinuxArm", "linux-aarch_64"), + ("MacX64", "osx-universal_binary"), // Mac uses universal binary for both + ("MacArm", "osx-universal_binary"), + ("WindowsX64", "win64"), // Windows uses x86 binary for both archs + ("WindowsArm", "win64"), + ]; + + let mut match_blocks = Vec::new(); + + for (platform, pattern) in platform_patterns { + // Find the asset matching the platform pattern + let asset = assets.iter().find(|asset| { + let name = asset["name"].as_str().unwrap_or(""); + name.starts_with("protoc-") && name.contains(pattern) && name.ends_with(".zip") + }); + + if asset.is_none() { + eprintln!("No asset found for platform {platform} pattern {pattern}"); + continue; + } + + let asset = asset.unwrap(); + let download_url = asset["browser_download_url"].as_str().unwrap(); + let asset_name = asset["name"].as_str().unwrap(); + + // Download the file and calculate SHA256 locally + println!("Downloading and checksumming {asset_name} for {platform}..."); + let response = client + .get(download_url) + .header("User-Agent", "Anki-Build-Script") + .send()?; + + let bytes = response.bytes()?; + let mut hasher = Sha256::new(); + hasher.update(&bytes); + let sha256 = format!("{:x}", hasher.finalize()); + + // Handle platform-specific match patterns + let match_pattern = match platform { + "MacX64" => "Platform::MacX64 | Platform::MacArm", + "MacArm" => continue, // Skip MacArm since it's handled with MacX64 + "WindowsX64" => "Platform::WindowsX64 | Platform::WindowsArm", + "WindowsArm" => continue, // Skip WindowsArm since it's handled with WindowsX64 + _ => &format!("Platform::{platform}"), + }; + + match_blocks.push(format!( + " {match_pattern} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}" + )); + } + + Ok(format!( + "pub fn protoc_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}", + match_blocks.join(",\n") + )) +} + +fn read_protobuf_rs() -> Result> { + let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string()); + let path = Path::new(&manifest_dir).join("src/protobuf.rs"); + println!("Reading {}", path.display()); + let content = fs::read_to_string(path)?; + Ok(content) +} + +fn update_protoc_text(old_text: &str, new_protoc_text: &str) -> Result> { + let re = + Regex::new(r"(?ms)^pub fn protoc_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\}") + .unwrap(); + if !re.is_match(old_text) { + return Err("Could not find protoc_archive function block to replace".into()); + } + let new_content = re.replace(old_text, new_protoc_text).to_string(); + println!("Original lines: {}", old_text.lines().count()); + println!("Updated lines: {}", new_content.lines().count()); + Ok(new_content) +} + +fn write_protobuf_rs(content: &str) -> Result<(), Box> { + let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string()); + let path = Path::new(&manifest_dir).join("src/protobuf.rs"); + println!("Writing to {}", path.display()); + fs::write(path, content)?; + Ok(()) +} + +fn main() -> Result<(), Box> { + let new_protoc_archive = fetch_protoc_release_info()?; + let content = read_protobuf_rs()?; + let updated_content = update_protoc_text(&content, &new_protoc_archive)?; + write_protobuf_rs(&updated_content)?; + println!("Successfully updated protoc_archive function in protobuf.rs"); + Ok(()) +} diff --git a/build/ninja_gen/src/bin/update_uv.rs b/build/ninja_gen/src/bin/update_uv.rs new file mode 100644 index 000000000..5a5d2d253 --- /dev/null +++ b/build/ninja_gen/src/bin/update_uv.rs @@ -0,0 +1,140 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use std::error::Error; +use std::fs; +use std::path::Path; + +use regex::Regex; +use reqwest::blocking::Client; +use serde_json::Value; + +fn fetch_uv_release_info() -> Result> { + let client = Client::new(); + + println!("Fetching latest uv release info from GitHub..."); + // Fetch latest release info + let response = client + .get("https://api.github.com/repos/astral-sh/uv/releases/latest") + .header("User-Agent", "Anki-Build-Script") + .send()?; + + let release_info: Value = response.json()?; + let assets = release_info["assets"] + .as_array() + .expect("assets should be an array"); + + // Map platform names to their corresponding asset patterns + let platform_patterns = [ + ("LinuxX64", "x86_64-unknown-linux-gnu"), + ("LinuxArm", "aarch64-unknown-linux-gnu"), + ("MacX64", "x86_64-apple-darwin"), + ("MacArm", "aarch64-apple-darwin"), + ("WindowsX64", "x86_64-pc-windows-msvc"), + ("WindowsArm", "aarch64-pc-windows-msvc"), + ]; + + let mut match_blocks = Vec::new(); + + for (platform, pattern) in platform_patterns { + // Find the asset matching the platform pattern (the binary) + let asset = assets.iter().find(|asset| { + let name = asset["name"].as_str().unwrap_or(""); + name.contains(pattern) && (name.ends_with(".tar.gz") || name.ends_with(".zip")) + }); + if asset.is_none() { + eprintln!("No asset found for platform {platform} pattern {pattern}"); + continue; + } + let asset = asset.unwrap(); + let download_url = asset["browser_download_url"].as_str().unwrap(); + let asset_name = asset["name"].as_str().unwrap(); + + // Find the corresponding .sha256 or .sha256sum asset + let sha_asset = assets.iter().find(|a| { + let name = a["name"].as_str().unwrap_or(""); + name == format!("{asset_name}.sha256") || name == format!("{asset_name}.sha256sum") + }); + if sha_asset.is_none() { + eprintln!("No sha256 asset found for {asset_name}"); + continue; + } + let sha_asset = sha_asset.unwrap(); + let sha_url = sha_asset["browser_download_url"].as_str().unwrap(); + println!("Fetching SHA256 for {platform}..."); + let sha_text = client + .get(sha_url) + .header("User-Agent", "Anki-Build-Script") + .send()? + .text()?; + // The sha file is usually of the form: " " + let sha256 = sha_text.split_whitespace().next().unwrap_or(""); + + match_blocks.push(format!( + " Platform::{platform} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}" + )); + } + + Ok(format!( + "pub fn uv_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}", + match_blocks.join(",\n") + )) +} + +fn read_python_rs() -> Result> { + let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string()); + let path = Path::new(&manifest_dir).join("src/python.rs"); + println!("Reading {}", path.display()); + let content = fs::read_to_string(path)?; + Ok(content) +} + +fn update_uv_text(old_text: &str, new_uv_text: &str) -> Result> { + let re = Regex::new(r"(?ms)^pub fn uv_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}\s*\n\s*\}").unwrap(); + if !re.is_match(old_text) { + return Err("Could not find uv_archive function block to replace".into()); + } + let new_content = re.replace(old_text, new_uv_text).to_string(); + println!("Original lines: {}", old_text.lines().count()); + println!("Updated lines: {}", new_content.lines().count()); + Ok(new_content) +} + +fn write_python_rs(content: &str) -> Result<(), Box> { + let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string()); + let path = Path::new(&manifest_dir).join("src/python.rs"); + println!("Writing to {}", path.display()); + fs::write(path, content)?; + Ok(()) +} + +fn main() -> Result<(), Box> { + let new_uv_archive = fetch_uv_release_info()?; + let content = read_python_rs()?; + let updated_content = update_uv_text(&content, &new_uv_archive)?; + write_python_rs(&updated_content)?; + println!("Successfully updated uv_archive function in python.rs"); + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_update_uv_text_with_actual_file() { + let content = fs::read_to_string("src/python.rs").unwrap(); + let original_lines = content.lines().count(); + + const EXPECTED_LINES_REMOVED: usize = 38; + + let updated = update_uv_text(&content, "").unwrap(); + let updated_lines = updated.lines().count(); + + assert_eq!( + updated_lines, + original_lines - EXPECTED_LINES_REMOVED, + "Expected line count to decrease by exactly {EXPECTED_LINES_REMOVED} lines (original: {original_lines}, updated: {updated_lines})" + ); + } +} diff --git a/build/ninja_gen/src/build.rs b/build/ninja_gen/src/build.rs index df8ec82fb..ed416b000 100644 --- a/build/ninja_gen/src/build.rs +++ b/build/ninja_gen/src/build.rs @@ -300,7 +300,7 @@ impl BuildStatement<'_> { writeln!(buf, "build {outputs_str}: {action_name} {inputs_str}").unwrap(); for (key, value) in self.variables.iter().sorted() { - writeln!(buf, " {key} = {}", value).unwrap(); + writeln!(buf, " {key} = {value}").unwrap(); } writeln!(buf).unwrap(); @@ -476,7 +476,7 @@ impl FilesHandle for BuildStatement<'_> { let outputs = outputs.into_iter().map(|v| { let v = v.as_ref(); let v = if !v.starts_with("$builddir/") && !v.starts_with("$builddir\\") { - format!("$builddir/{}", v) + format!("$builddir/{v}") } else { v.to_owned() }; diff --git a/build/ninja_gen/src/cargo.rs b/build/ninja_gen/src/cargo.rs index 645203170..2a3397704 100644 --- a/build/ninja_gen/src/cargo.rs +++ b/build/ninja_gen/src/cargo.rs @@ -162,7 +162,7 @@ impl BuildAction for CargoTest { "cargo-nextest", CargoInstall { binary_name: "cargo-nextest", - args: "cargo-nextest --version 0.9.57 --locked", + args: "cargo-nextest --version 0.9.99 --locked --no-default-features --features default-no-update", }, )?; setup_flags(build) diff --git a/build/ninja_gen/src/node.rs b/build/ninja_gen/src/node.rs index dac056c10..b7b66225b 100644 --- a/build/ninja_gen/src/node.rs +++ b/build/ninja_gen/src/node.rs @@ -19,24 +19,28 @@ use crate::input::BuildInput; pub fn node_archive(platform: Platform) -> OnlineArchive { match platform { Platform::LinuxX64 => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz", - sha256: "822780369d0ea309e7d218e41debbd1a03f8cdf354ebf8a4420e89f39cc2e612", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-x64.tar.xz", + sha256: "325c0f1261e0c61bcae369a1274028e9cfb7ab7949c05512c5b1e630f7e80e12", }, Platform::LinuxArm => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-arm64.tar.xz", - sha256: "f6df68c6793244071f69023a9b43a0cf0b13d65cbe86d55925c28e4134d9aafb", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-arm64.tar.xz", + sha256: "140aee84be6774f5fb3f404be72adbe8420b523f824de82daeb5ab218dab7b18", }, Platform::MacX64 => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz", - sha256: "d4b4ab81ebf1f7aab09714f834992f27270ad0079600da00c8110f8950ca6c5a", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-x64.tar.xz", + sha256: "f79de1f64df4ac68493a344bb5ab7d289d0275271e87b543d1278392c9de778a", }, Platform::MacArm => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-arm64.tar.xz", - sha256: "f18a7438723d48417f5e9be211a2f3c0520ffbf8e02703469e5153137ca0f328", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-arm64.tar.xz", + sha256: "cc9cc294eaf782dd93c8c51f460da610cc35753c6a9947411731524d16e97914", }, Platform::WindowsX64 => OnlineArchive { - url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-x64.zip", - sha256: "893115cd92ad27bf178802f15247115e93c0ef0c753b93dca96439240d64feb5", + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-x64.zip", + sha256: "721ab118a3aac8584348b132767eadf51379e0616f0db802cc1e66d7f0d98f85", + }, + Platform::WindowsArm => OnlineArchive { + url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-arm64.zip", + sha256: "78355dc9ca117bb71d3f081e4b1b281855e2b134f3939bb0ca314f7567b0e621", }, } } diff --git a/build/ninja_gen/src/protobuf.rs b/build/ninja_gen/src/protobuf.rs index 2643c2ab1..1198f653d 100644 --- a/build/ninja_gen/src/protobuf.rs +++ b/build/ninja_gen/src/protobuf.rs @@ -21,26 +21,26 @@ pub fn protoc_archive(platform: Platform) -> OnlineArchive { match platform { Platform::LinuxX64 => { OnlineArchive { - url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-linux-x86_64.zip", - sha256: "f90d0dd59065fef94374745627336d622702b67f0319f96cee894d41a974d47a", + url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-linux-x86_64.zip", + sha256: "96553041f1a91ea0efee963cb16f462f5985b4d65365f3907414c360044d8065", } - } + }, Platform::LinuxArm => { OnlineArchive { - url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-linux-aarch_64.zip", - sha256: "f3d8eb5839d6186392d8c7b54fbeabbb6fcdd90618a500b77cb2e24faa245cad", + url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-linux-aarch_64.zip", + sha256: "6c554de11cea04c56ebf8e45b54434019b1cd85223d4bbd25c282425e306ecc2", } - } + }, Platform::MacX64 | Platform::MacArm => { OnlineArchive { - url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-osx-universal_binary.zip", - sha256: "e3324d3bc2e9bc967a0bec2472e0ec73b26f952c7c87f2403197414f780c3c6c", + url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-osx-universal_binary.zip", + sha256: "99ea004549c139f46da5638187a85bbe422d78939be0fa01af1aa8ab672e395f", } - } - Platform::WindowsX64 => { + }, + Platform::WindowsX64 | Platform::WindowsArm => { OnlineArchive { - url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-win64.zip", - sha256: "3657053024faa439ff5f8c1dd2ee06bac0f9b9a3d660e99944f015a7451e87ec", + url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-win64.zip", + sha256: "70381b116ab0d71cb6a5177d9b17c7c13415866603a0fd40d513dafe32d56c35", } } } @@ -67,7 +67,7 @@ fn clang_format_archive(platform: Platform) -> OnlineArchive { sha256: "238be68d9478163a945754f06a213483473044f5a004c4125d3d9d8d3556466e", } } - Platform::WindowsX64 => { + Platform::WindowsX64 | Platform::WindowsArm=> { OnlineArchive { url: "https://github.com/ankitects/clang-format-binaries/releases/download/anki-2021-01-09/clang-format_windows_x86_64.zip", sha256: "7d9f6915e3f0fb72407830f0fc37141308d2e6915daba72987a52f309fbeaccc", diff --git a/build/ninja_gen/src/python.rs b/build/ninja_gen/src/python.rs index 3a8931697..541d6c96e 100644 --- a/build/ninja_gen/src/python.rs +++ b/build/ninja_gen/src/python.rs @@ -9,6 +9,7 @@ use maplit::hashmap; use crate::action::BuildAction; use crate::archives::download_and_extract; +use crate::archives::with_exe; use crate::archives::OnlineArchive; use crate::archives::Platform; use crate::hash::simple_hash; @@ -16,82 +17,113 @@ use crate::input::BuildInput; use crate::inputs; use crate::Build; -/// When updating this, pyoxidizer.bzl needs updating too, but it uses different -/// files. -pub fn python_archive(platform: Platform) -> OnlineArchive { +// To update, run 'cargo run --bin update_uv'. +// You'll need to do this when bumping Python versions, as uv bakes in +// the latest known version. +// When updating Python version, make sure to update version tag in BuildWheel +// too. +pub fn uv_archive(platform: Platform) -> OnlineArchive { match platform { Platform::LinuxX64 => { OnlineArchive { - url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64_v2-unknown-linux-gnu-install_only.tar.gz", - sha256: "9426bca501ae0a257392b10719e2e20ff5fa5e22a3ce4599d6ad0b3139f86417", + url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-unknown-linux-gnu.tar.gz", + sha256: "909278eb197c5ed0e9b5f16317d1255270d1f9ea4196e7179ce934d48c4c2545", } - } + }, Platform::LinuxArm => { OnlineArchive { - url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-aarch64-unknown-linux-gnu-install_only.tar.gz", - sha256: "7d19e1ecd6e582423f7c74a0c67491eaa982ce9d5c5f35f0e4289f83127abcb8", + url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-unknown-linux-gnu.tar.gz", + sha256: "0b2ad9fe4295881615295add8cc5daa02549d29cc9a61f0578e397efcf12f08f", } - } + }, Platform::MacX64 => { OnlineArchive { - url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64-apple-darwin-install_only.tar.gz", - sha256: "5a0bf895a5cb08d6d008140abb41bb2c8cd638a665273f7d8eb258bc89de439b", + url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-apple-darwin.tar.gz", + sha256: "d785753ac092e25316180626aa691c5dfe1fb075290457ba4fdb72c7c5661321", } - } + }, Platform::MacArm => { OnlineArchive { - url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-aarch64-apple-darwin-install_only.tar.gz", - sha256: "bf0cd90204a2cc6da48cae1e4b32f48c9f7031fbe1238c5972104ccb0155d368", + url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-apple-darwin.tar.gz", + sha256: "721f532b73171586574298d4311a91d5ea2c802ef4db3ebafc434239330090c6", } - } + }, Platform::WindowsX64 => { OnlineArchive { - url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64-pc-windows-msvc-shared-install_only.tar.gz", - sha256: "8f0544cd593984f7ecb90c685931249c579302124b9821064873f3a14ed07005", + url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-pc-windows-msvc.zip", + sha256: "e199b10bef1a7cc540014483e7f60f825a174988f41020e9d2a6b01bd60f0669", + } + }, + Platform::WindowsArm => { + OnlineArchive { + url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-pc-windows-msvc.zip", + sha256: "bb40708ad549ad6a12209cb139dd751bf0ede41deb679ce7513ce197bd9ef234", } } } } -/// Returns the Python binary, which can be used to create venvs. -/// Downloads if missing. -pub fn setup_python(build: &mut Build) -> Result<()> { - // if changing this, make sure you remove out/pyenv - let python_binary = match env::var("PYTHON_BINARY") { +pub fn setup_uv(build: &mut Build, platform: Platform) -> Result<()> { + let uv_binary = match env::var("UV_BINARY") { Ok(path) => { assert!( Utf8Path::new(&path).is_absolute(), - "PYTHON_BINARY must be absolute" + "UV_BINARY must be absolute" ); path.into() } Err(_) => { download_and_extract( build, - "python", - python_archive(build.host_platform), + "uv", + uv_archive(platform), hashmap! { "bin" => [ - if cfg!(windows) { "python.exe" } else { "bin/python3"} - ] }, + with_exe("uv") + ] }, )?; - inputs![":extract:python:bin"] + inputs![":extract:uv:bin"] } }; - build.add_dependency("python_binary", python_binary); + build.add_dependency("uv_binary", uv_binary); + + // Our macOS packaging needs access to the x86 binary on ARM. + if cfg!(target_arch = "aarch64") { + download_and_extract( + build, + "uv_mac_x86", + uv_archive(Platform::MacX64), + hashmap! { "bin" => [ + with_exe("uv") + ] }, + )?; + } + // Our Linux packaging needs access to the ARM binary on x86 + if cfg!(target_arch = "x86_64") { + download_and_extract( + build, + "uv_lin_arm", + uv_archive(Platform::LinuxArm), + hashmap! { "bin" => [ + with_exe("uv") + ] }, + )?; + } + Ok(()) } pub struct PythonEnvironment { - pub folder: &'static str, - pub base_requirements_txt: BuildInput, - pub requirements_txt: BuildInput, + pub deps: BuildInput, + // todo: rename + pub venv_folder: &'static str, + pub extra_args: &'static str, pub extra_binary_exports: &'static [&'static str], } impl BuildAction for PythonEnvironment { fn command(&self) -> &str { if env::var("OFFLINE_BUILD").is_err() { - "$runner pyenv $python_binary $builddir/$pyenv_folder $system_pkgs $base_requirements $requirements" + "$runner pyenv $uv_binary $builddir/$pyenv_folder -- $extra_args" } else { "echo 'OFFLINE_BUILD is set. Using the existing PythonEnvironment.'" } @@ -99,7 +131,7 @@ impl BuildAction for PythonEnvironment { fn files(&mut self, build: &mut impl crate::build::FilesHandle) { let bin_path = |binary: &str| -> Vec { - let folder = self.folder; + let folder = self.venv_folder; let path = if cfg!(windows) { format!("{folder}/scripts/{binary}.exe") } else { @@ -108,17 +140,24 @@ impl BuildAction for PythonEnvironment { vec![path] }; + build.add_inputs("", &self.deps); + build.add_variable("pyenv_folder", self.venv_folder); if env::var("OFFLINE_BUILD").is_err() { - build.add_inputs("python_binary", inputs![":python_binary"]); - build.add_variable("pyenv_folder", self.folder); - build.add_inputs("base_requirements", &self.base_requirements_txt); - build.add_inputs("requirements", &self.requirements_txt); - build.add_outputs_ext("pip", bin_path("pip"), true); + build.add_inputs("uv_binary", inputs![":uv_binary"]); + + // Add --python flag to extra_args if PYTHON_BINARY is set + let mut args = self.extra_args.to_string(); + if let Ok(python_binary) = env::var("PYTHON_BINARY") { + args = format!("--python {python_binary} {args}"); + } + build.add_variable("extra_args", args); } + build.add_outputs_ext("bin", bin_path("python"), true); for binary in self.extra_binary_exports { build.add_outputs_ext(*binary, bin_path(binary), true); } + build.add_output_stamp(format!("{}/.stamp", self.venv_folder)); } fn check_output_timestamps(&self) -> bool { @@ -154,31 +193,19 @@ impl BuildAction for PythonTypecheck { struct PythonFormat<'a> { pub inputs: &'a BuildInput, pub check_only: bool, - pub isort_ini: &'a BuildInput, } impl BuildAction for PythonFormat<'_> { fn command(&self) -> &str { - "$black -t py39 -q $check --color $in && $ - $isort --color --settings-path $isort_ini $check $in" + "$ruff format $mode $in && $ruff check --select I --fix $in" } fn files(&mut self, build: &mut impl crate::build::FilesHandle) { build.add_inputs("in", self.inputs); - build.add_inputs("black", inputs![":pyenv:black"]); - build.add_inputs("isort", inputs![":pyenv:isort"]); + build.add_inputs("ruff", inputs![":pyenv:ruff"]); let hash = simple_hash(self.inputs); - build.add_env_var("BLACK_CACHE_DIR", "out/python/black.cache.{hash}"); - build.add_inputs("isort_ini", self.isort_ini); - build.add_variable( - "check", - if self.check_only { - "--diff --check" - } else { - "" - }, - ); + build.add_variable("mode", if self.check_only { "--check" } else { "" }); build.add_output_stamp(format!( "tests/python_format.{}.{hash}", @@ -188,13 +215,11 @@ impl BuildAction for PythonFormat<'_> { } pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Result<()> { - let isort_ini = &inputs![".isort.cfg"]; build.add_action( format!("check:format:python:{group}"), PythonFormat { inputs: &inputs, check_only: true, - isort_ini, }, )?; @@ -203,34 +228,39 @@ pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Resu PythonFormat { inputs: &inputs, check_only: false, - isort_ini, }, )?; Ok(()) } -pub struct PythonLint { +pub struct RuffCheck { pub folders: &'static [&'static str], - pub pylint_ini: BuildInput, pub deps: BuildInput, + pub check_only: bool, } -impl BuildAction for PythonLint { +impl BuildAction for RuffCheck { fn command(&self) -> &str { - "$pylint --rcfile $pylint_ini -sn -j $cpus $folders" + "$ruff check $folders $mode" } fn files(&mut self, build: &mut impl crate::build::FilesHandle) { build.add_inputs("", &self.deps); - build.add_inputs("pylint", inputs![":pyenv:pylint"]); - build.add_inputs("pylint_ini", &self.pylint_ini); + build.add_inputs("", inputs![".ruff.toml"]); + build.add_inputs("ruff", inputs![":pyenv:ruff"]); build.add_variable("folders", self.folders.join(" ")); - // On a 16 core system, values above 10 do not improve wall clock time, - // but waste extra cores that could be working on other tests. - build.add_variable("cpus", num_cpus::get().min(10).to_string()); + build.add_variable( + "mode", + if self.check_only { + "" + } else { + "--fix --unsafe-fixes" + }, + ); let hash = simple_hash(&self.deps); - build.add_output_stamp(format!("tests/python_lint.{hash}")); + let kind = if self.check_only { "check" } else { "fix" }; + build.add_output_stamp(format!("tests/python_ruff.{kind}.{hash}")); } } diff --git a/build/ninja_gen/src/render.rs b/build/ninja_gen/src/render.rs index a9a32cf84..dde307e73 100644 --- a/build/ninja_gen/src/render.rs +++ b/build/ninja_gen/src/render.rs @@ -30,12 +30,12 @@ impl Build { ) .unwrap(); for (key, value) in &self.variables { - writeln!(&mut buf, "{} = {}", key, value).unwrap(); + writeln!(&mut buf, "{key} = {value}").unwrap(); } buf.push('\n'); for (key, value) in &self.pools { - writeln!(&mut buf, "pool {}\n depth = {}", key, value).unwrap(); + writeln!(&mut buf, "pool {key}\n depth = {value}").unwrap(); } buf.push('\n'); diff --git a/build/runner/Cargo.toml b/build/runner/Cargo.toml index 54722f01d..1fffe7050 100644 --- a/build/runner/Cargo.toml +++ b/build/runner/Cargo.toml @@ -15,7 +15,6 @@ camino.workspace = true clap.workspace = true flate2.workspace = true junction.workspace = true -reqwest = { workspace = true, features = ["rustls-tls", "rustls-tls-native-roots"] } sha2.workspace = true tar.workspace = true termcolor.workspace = true @@ -24,3 +23,9 @@ which.workspace = true xz2.workspace = true zip.workspace = true zstd.workspace = true + +[target.'cfg(windows)'.dependencies] +reqwest = { workspace = true, features = ["native-tls"] } + +[target.'cfg(not(windows))'.dependencies] +reqwest = { workspace = true, features = ["rustls-tls", "rustls-tls-native-roots"] } diff --git a/build/runner/src/archive.rs b/build/runner/src/archive.rs index 8a78dd515..932b924e1 100644 --- a/build/runner/src/archive.rs +++ b/build/runner/src/archive.rs @@ -65,7 +65,7 @@ fn sha2_data(data: &[u8]) -> String { let mut digest = sha2::Sha256::new(); digest.update(data); let result = digest.finalize(); - format!("{:x}", result) + format!("{result:x}") } enum CompressionKind { diff --git a/build/runner/src/build.rs b/build/runner/src/build.rs index 5e3042aba..107be9783 100644 --- a/build/runner/src/build.rs +++ b/build/runner/src/build.rs @@ -67,7 +67,10 @@ pub fn run_build(args: BuildArgs) { "MYPY_CACHE_DIR", build_root.join("tests").join("mypy").into_string(), ) - .env("PYTHONPYCACHEPREFIX", build_root.join("pycache")) + .env( + "PYTHONPYCACHEPREFIX", + std::path::absolute(build_root.join("pycache")).unwrap(), + ) // commands will not show colors by default, as we do not provide a tty .env("FORCE_COLOR", "1") .env("MYPY_FORCE_COLOR", "1") @@ -135,7 +138,7 @@ fn setup_build_root() -> Utf8PathBuf { true }; if create { - println!("Switching build root to {}", new_target); + println!("Switching build root to {new_target}"); std::os::unix::fs::symlink(new_target, build_root).unwrap(); } } diff --git a/build/runner/src/bundle/artifacts.rs b/build/runner/src/bundle/artifacts.rs deleted file mode 100644 index ec5506717..000000000 --- a/build/runner/src/bundle/artifacts.rs +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use std::env; -use std::fs; -use std::process::Command; - -use camino::Utf8PathBuf; -use clap::Args; - -use crate::run::run_command; - -#[derive(Args, Debug)] -pub struct BuildArtifactsArgs { - bundle_root: Utf8PathBuf, - pyoxidizer_bin: String, -} - -pub fn build_artifacts(args: BuildArtifactsArgs) { - // build.rs doesn't declare inputs from venv, so we need to force a rebuild to - // ensure changes to our libs/the venv get included - let artifacts = args.bundle_root.join("artifacts"); - if artifacts.exists() { - fs::remove_dir_all(&artifacts).unwrap(); - } - let bundle_root = args.bundle_root.canonicalize_utf8().unwrap(); - let build_folder = bundle_root.join("build"); - if build_folder.exists() { - fs::remove_dir_all(&build_folder).unwrap(); - } - - run_command( - Command::new(&args.pyoxidizer_bin) - .args([ - "--system-rust", - "run-build-script", - "qt/bundle/build.rs", - "--var", - "venv", - "out/bundle/pyenv", - "--var", - "build", - build_folder.as_str(), - ]) - .env("CARGO_MANIFEST_DIR", "qt/bundle") - .env("CARGO_TARGET_DIR", "out/bundle/rust") - .env("PROFILE", "release") - .env("OUT_DIR", &artifacts) - .env("TARGET", env!("TARGET")) - .env("SDKROOT", "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk") - .env("MACOSX_DEPLOYMENT_TARGET", macos_deployment_target()) - .env("CARGO_BUILD_TARGET", env!("TARGET")), - ); -} - -pub fn macos_deployment_target() -> &'static str { - if env!("TARGET") == "x86_64-apple-darwin" { - "10.13.4" - } else { - "11" - } -} diff --git a/build/runner/src/bundle/binary.rs b/build/runner/src/bundle/binary.rs deleted file mode 100644 index e9119220a..000000000 --- a/build/runner/src/bundle/binary.rs +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use std::process::Command; - -use anki_process::CommandExt; -use camino::Utf8Path; -use camino::Utf8PathBuf; - -use super::artifacts::macos_deployment_target; -use crate::run::run_command; - -pub fn build_bundle_binary() { - let mut features = String::from("build-mode-prebuilt-artifacts"); - if cfg!(target_os = "linux") || cfg!(target_os = "macos") { - features.push_str(",global-allocator-jemalloc,allocator-jemalloc"); - } - - let mut command = Command::new("cargo"); - command - .args([ - "build", - "--manifest-path=qt/bundle/Cargo.toml", - "--target-dir=out/bundle/rust", - "--release", - "--no-default-features", - ]) - .arg(format!("--features={features}")) - .env( - "DEFAULT_PYTHON_CONFIG_RS", - // included in main.rs, so relative to qt/bundle/src - "../../../out/bundle/artifacts/", - ) - .env( - "PYO3_CONFIG_FILE", - Utf8Path::new("out/bundle/artifacts/pyo3-build-config-file.txt") - .canonicalize_utf8() - .unwrap(), - ) - .env("MACOSX_DEPLOYMENT_TARGET", macos_deployment_target()) - .env("SDKROOT", "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk") - .env("CARGO_BUILD_TARGET", env!("TARGET")); - if env!("TARGET") == "x86_64-apple-darwin" { - let xcode_path = Command::run_with_output(["xcode-select", "-p"]).unwrap(); - let ld_classic = Utf8PathBuf::from(xcode_path.stdout.trim()) - .join("Toolchains/XcodeDefault.xctoolchain/usr/bin/ld-classic"); - if ld_classic.exists() { - // work around XCode 15's default linker not supporting macOS 10.15-12. - command.env("RUSTFLAGS", format!("-Clink-arg=-fuse-ld={ld_classic}")); - } - } - run_command(&mut command); -} diff --git a/build/runner/src/bundle/folder.rs b/build/runner/src/bundle/folder.rs deleted file mode 100644 index cdbfd21e8..000000000 --- a/build/runner/src/bundle/folder.rs +++ /dev/null @@ -1,156 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use std::env; -use std::fs; -use std::process::Command; - -use camino::Utf8Path; -use camino::Utf8PathBuf; -use clap::Args; -use clap::ValueEnum; - -use crate::paths::absolute_msys_path; -use crate::paths::unix_path; -use crate::run::run_command; - -#[derive(Clone, Copy, ValueEnum, Debug)] -enum DistKind { - Standard, - Alternate, -} - -#[derive(Args, Debug)] -pub struct BuildDistFolderArgs { - kind: DistKind, - folder_root: Utf8PathBuf, -} - -pub fn build_dist_folder(args: BuildDistFolderArgs) { - let BuildDistFolderArgs { kind, folder_root } = args; - fs::create_dir_all(&folder_root).unwrap(); - // Start with Qt, as it's the largest, and we use --delete to ensure there are - // no stale files in lib/. Skipped on macOS as Qt is handled later. - if !cfg!(target_os = "macos") { - copy_qt_from_venv(kind, &folder_root); - } - clean_top_level_files(&folder_root); - copy_binary_and_pylibs(&folder_root); - if cfg!(target_os = "linux") { - copy_linux_extras(kind, &folder_root); - } else if cfg!(windows) { - copy_windows_extras(&folder_root); - } - fs::write(folder_root.with_extension("stamp"), b"").unwrap(); -} - -fn copy_qt_from_venv(kind: DistKind, folder_root: &Utf8Path) { - let python39 = if cfg!(windows) { "" } else { "python3.9/" }; - let qt_root = match kind { - DistKind::Standard => { - folder_root.join(format!("../pyenv/lib/{python39}site-packages/PyQt6")) - } - DistKind::Alternate => { - folder_root.join(format!("../pyenv-qt5/lib/{python39}site-packages/PyQt5")) - } - }; - let src_path = absolute_msys_path(&qt_root); - let lib_path = folder_root.join("lib"); - fs::create_dir_all(&lib_path).unwrap(); - let dst_path = with_slash(absolute_msys_path(&lib_path)); - run_command(Command::new("rsync").args([ - "-a", - "--delete", - "--exclude-from", - "qt/bundle/qt.exclude", - &src_path, - &dst_path, - ])); -} - -fn copy_linux_extras(kind: DistKind, folder_root: &Utf8Path) { - // add README, installer, etc - run_command(Command::new("rsync").args(["-a", "qt/bundle/lin/", &with_slash(folder_root)])); - - // add extra IME plugins from download - let lib_path = folder_root.join("lib"); - let src_path = folder_root - .join("../../extracted/linux_qt_plugins") - .join(match kind { - DistKind::Standard => "qt6", - DistKind::Alternate => "qt5", - }); - let dst_path = lib_path.join(match kind { - DistKind::Standard => "PyQt6/Qt6/plugins", - DistKind::Alternate => "PyQt5/Qt5/plugins", - }); - run_command(Command::new("rsync").args(["-a", &with_slash(src_path), &with_slash(dst_path)])); -} - -fn copy_windows_extras(folder_root: &Utf8Path) { - run_command(Command::new("rsync").args([ - "-a", - "out/extracted/win_amd64_audio/", - &with_slash(folder_root), - ])); -} - -fn clean_top_level_files(folder_root: &Utf8Path) { - let mut to_remove = vec![]; - for entry in fs::read_dir(folder_root).unwrap() { - let entry = entry.unwrap(); - if entry.file_name() == "lib" { - continue; - } else { - to_remove.push(entry.path()); - } - } - for path in to_remove { - if path.is_dir() { - fs::remove_dir_all(path).unwrap() - } else { - fs::remove_file(path).unwrap() - } - } -} - -fn with_slash

(path: P) -> String -where - P: AsRef, -{ - format!("{}/", path.as_ref()) -} - -fn copy_binary_and_pylibs(folder_root: &Utf8Path) { - let binary = folder_root - .join("../rust") - .join(env!("TARGET")) - .join("release") - .join(if cfg!(windows) { "anki.exe" } else { "anki" }); - let extra_files = folder_root - .join("../build") - .join(env!("TARGET")) - .join("release/resources/extra_files"); - run_command(Command::new("rsync").args([ - "-a", - "--exclude", - "PyQt6", - // misleading, as it misses the GPL PyQt, and our Rust/JS - // dependencies - "--exclude", - "COPYING.txt", - &unix_path(&binary), - &with_slash(unix_path(&extra_files)), - &with_slash(unix_path(folder_root)), - ])); - let google_py = if cfg!(windows) { - folder_root.join("../pyenv/lib/site-packages/google") - } else { - folder_root.join("../pyenv/lib/python3.9/site-packages/google") - }; - run_command(Command::new("rsync").args([ - "-a", - &unix_path(&google_py), - &with_slash(unix_path(&folder_root.join("lib"))), - ])); -} diff --git a/build/runner/src/bundle/mod.rs b/build/runner/src/bundle/mod.rs deleted file mode 100644 index 30a3608ab..000000000 --- a/build/runner/src/bundle/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -pub mod artifacts; -pub mod binary; -pub mod folder; diff --git a/build/runner/src/main.rs b/build/runner/src/main.rs index 8fdf8f06f..41cc1fa2e 100644 --- a/build/runner/src/main.rs +++ b/build/runner/src/main.rs @@ -7,7 +7,6 @@ mod archive; mod build; -mod bundle; mod paths; mod pyenv; mod rsync; @@ -19,11 +18,6 @@ use archive::archive_command; use archive::ArchiveArgs; use build::run_build; use build::BuildArgs; -use bundle::artifacts::build_artifacts; -use bundle::artifacts::BuildArtifactsArgs; -use bundle::binary::build_bundle_binary; -use bundle::folder::build_dist_folder; -use bundle::folder::BuildDistFolderArgs; use clap::Parser; use clap::Subcommand; use pyenv::setup_pyenv; @@ -48,9 +42,6 @@ enum Command { Rsync(RsyncArgs), Run(RunArgs), Build(BuildArgs), - BuildArtifacts(BuildArtifactsArgs), - BuildBundleBinary, - BuildDistFolder(BuildDistFolderArgs), #[clap(subcommand)] Archive(ArchiveArgs), } @@ -62,9 +53,6 @@ fn main() -> Result<()> { Command::Rsync(args) => rsync_files(args), Command::Yarn(args) => setup_yarn(args), Command::Build(args) => run_build(args), - Command::BuildArtifacts(args) => build_artifacts(args), - Command::BuildBundleBinary => build_bundle_binary(), - Command::BuildDistFolder(args) => build_dist_folder(args), Command::Archive(args) => archive_command(args)?, }; Ok(()) diff --git a/build/runner/src/paths.rs b/build/runner/src/paths.rs index 2021120cb..c28dde1b9 100644 --- a/build/runner/src/paths.rs +++ b/build/runner/src/paths.rs @@ -16,8 +16,3 @@ pub fn absolute_msys_path(path: &Utf8Path) -> String { // and \ -> / format!("/{drive}/{}", path[7..].replace('\\', "/")) } - -/// Converts backslashes to forward slashes -pub fn unix_path(path: &Utf8Path) -> String { - path.as_str().replace('\\', "/") -} diff --git a/build/runner/src/pyenv.rs b/build/runner/src/pyenv.rs index 934b88bbc..d64c8fb3f 100644 --- a/build/runner/src/pyenv.rs +++ b/build/runner/src/pyenv.rs @@ -1,6 +1,7 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use std::fs; use std::process::Command; use camino::Utf8Path; @@ -10,12 +11,10 @@ use crate::run::run_command; #[derive(Args)] pub struct PyenvArgs { - python_bin: String, + uv_bin: String, pyenv_folder: String, - initial_reqs: String, - reqs: Vec, - #[arg(long, allow_hyphen_values(true))] - venv_args: Vec, + #[arg(trailing_var_arg = true)] + extra_args: Vec, } /// Set up a venv if one doesn't already exist, and then sync packages with @@ -23,42 +22,23 @@ pub struct PyenvArgs { pub fn setup_pyenv(args: PyenvArgs) { let pyenv_folder = Utf8Path::new(&args.pyenv_folder); - let pyenv_bin_folder = pyenv_folder.join(if cfg!(windows) { "scripts" } else { "bin" }); - let pyenv_python = pyenv_bin_folder.join("python"); - let pip_sync = pyenv_bin_folder.join("pip-sync"); - - // Ensure the venv gets recreated properly if it was created by our uv branch - let cache_tag = pyenv_folder.join("CACHEDIR.TAG"); - if cache_tag.exists() { - println!("Cleaning up uv pyenv..."); - std::fs::remove_dir_all(pyenv_folder).expect("Failed to remove pyenv folder"); - } - - if !pyenv_python.exists() { - run_command( - Command::new(&args.python_bin) - .args(["-m", "venv"]) - .args(args.venv_args) - .arg(pyenv_folder), - ); - - if cfg!(windows) { - // the first install on Windows throws an error the first time pip is upgraded, - // so we install it twice and swallow the first error - let _output = Command::new(&pyenv_python) - .args(["-m", "pip", "install", "-r", &args.initial_reqs]) - .output() - .unwrap(); + // On first run, ninja creates an empty bin/ folder which breaks the initial + // install. But we don't want to indiscriminately remove the folder, or + // macOS Gatekeeper needs to rescan the files each time. + if pyenv_folder.exists() { + let cache_tag = pyenv_folder.join("CACHEDIR.TAG"); + if !cache_tag.exists() { + fs::remove_dir_all(pyenv_folder).expect("Failed to remove existing pyenv folder"); } - - run_command(Command::new(pyenv_python).args([ - "-m", - "pip", - "install", - "-r", - &args.initial_reqs, - ])); } - run_command(Command::new(pip_sync).args(&args.reqs)); + run_command( + Command::new(args.uv_bin) + .env("UV_PROJECT_ENVIRONMENT", args.pyenv_folder.clone()) + .args(["sync", "--locked"]) + .args(args.extra_args), + ); + + // Write empty stamp file + fs::write(pyenv_folder.join(".stamp"), "").expect("Failed to write stamp file"); } diff --git a/build/runner/src/run.rs b/build/runner/src/run.rs index 5b60ab80c..fd8877cd9 100644 --- a/build/runner/src/run.rs +++ b/build/runner/src/run.rs @@ -1,7 +1,6 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use std::io::ErrorKind; use std::process::Command; use anki_io::create_dir_all; @@ -44,7 +43,7 @@ fn split_env(s: &str) -> Result<(String, String), std::io::Error> { if let Some((k, v)) = s.split_once('=') { Ok((k.into(), v.into())) } else { - Err(std::io::Error::new(ErrorKind::Other, "invalid env var")) + Err(std::io::Error::other("invalid env var")) } } @@ -84,7 +83,7 @@ fn split_args(args: Vec) -> Vec> { pub fn run_command(command: &mut Command) { if let Err(err) = command.ensure_success() { - println!("{}", err); + println!("{err}"); std::process::exit(1); } } diff --git a/cargo/licenses.json b/cargo/licenses.json index 0333f7e47..f2695ac76 100644 --- a/cargo/licenses.json +++ b/cargo/licenses.json @@ -10,7 +10,7 @@ }, { "name": "adler2", - "version": "2.0.0", + "version": "2.0.1", "authors": "Jonas Schievink |oyvindln ", "repository": "https://github.com/oyvindln/adler2", "license": "0BSD OR Apache-2.0 OR MIT", @@ -19,7 +19,7 @@ }, { "name": "ahash", - "version": "0.8.11", + "version": "0.8.12", "authors": "Tom Kaitchuck ", "repository": "https://github.com/tkaitchuck/ahash", "license": "Apache-2.0 OR MIT", @@ -181,7 +181,7 @@ }, { "name": "async-compression", - "version": "0.4.23", + "version": "0.4.24", "authors": "Wim Looman |Allen Bui ", "repository": "https://github.com/Nullus157/async-compression", "license": "Apache-2.0 OR MIT", @@ -244,7 +244,7 @@ }, { "name": "axum", - "version": "0.7.9", + "version": "0.8.4", "authors": null, "repository": "https://github.com/tokio-rs/axum", "license": "MIT", @@ -253,7 +253,7 @@ }, { "name": "axum-client-ip", - "version": "0.6.1", + "version": "1.1.3", "authors": null, "repository": "https://github.com/imbolc/axum-client-ip", "license": "MIT", @@ -262,7 +262,7 @@ }, { "name": "axum-core", - "version": "0.4.5", + "version": "0.5.2", "authors": null, "repository": "https://github.com/tokio-rs/axum", "license": "MIT", @@ -271,7 +271,7 @@ }, { "name": "axum-extra", - "version": "0.9.6", + "version": "0.10.1", "authors": null, "repository": "https://github.com/tokio-rs/axum", "license": "MIT", @@ -280,7 +280,7 @@ }, { "name": "axum-macros", - "version": "0.4.2", + "version": "0.5.0", "authors": null, "repository": "https://github.com/tokio-rs/axum", "license": "MIT", @@ -289,22 +289,13 @@ }, { "name": "backtrace", - "version": "0.3.74", + "version": "0.3.75", "authors": "The Rust Project Developers", "repository": "https://github.com/rust-lang/backtrace-rs", "license": "Apache-2.0 OR MIT", "license_file": null, "description": "A library to acquire a stack trace (backtrace) at runtime in a Rust program." }, - { - "name": "base64", - "version": "0.21.7", - "authors": "Alice Maz |Marshall Pierce ", - "repository": "https://github.com/marshallpierce/rust-base64", - "license": "Apache-2.0 OR MIT", - "license_file": null, - "description": "encodes and decodes base64 as bytes or utf8" - }, { "name": "base64", "version": "0.22.1", @@ -316,7 +307,7 @@ }, { "name": "base64ct", - "version": "1.7.3", + "version": "1.8.0", "authors": "RustCrypto Developers", "repository": "https://github.com/RustCrypto/formats", "license": "Apache-2.0 OR MIT", @@ -361,7 +352,7 @@ }, { "name": "bitflags", - "version": "2.9.0", + "version": "2.9.1", "authors": "The Rust Project Developers", "repository": "https://github.com/bitflags/bitflags", "license": "Apache-2.0 OR MIT", @@ -397,7 +388,7 @@ }, { "name": "bumpalo", - "version": "3.17.0", + "version": "3.18.1", "authors": "Nick Fitzgerald ", "repository": "https://github.com/fitzgen/bumpalo", "license": "Apache-2.0 OR MIT", @@ -550,7 +541,7 @@ }, { "name": "bytemuck", - "version": "1.22.0", + "version": "1.23.1", "authors": "Lokathor ", "repository": "https://github.com/Lokathor/bytemuck", "license": "Apache-2.0 OR MIT OR Zlib", @@ -595,7 +586,7 @@ }, { "name": "camino", - "version": "1.1.9", + "version": "1.1.10", "authors": "Without Boats |Ashley Williams |Steve Klabnik |Rain ", "repository": "https://github.com/camino-rs/camino", "license": "Apache-2.0 OR MIT", @@ -613,7 +604,7 @@ }, { "name": "cc", - "version": "1.2.20", + "version": "1.2.27", "authors": "Alex Crichton ", "repository": "https://github.com/rust-lang/cc-rs", "license": "Apache-2.0 OR MIT", @@ -622,9 +613,9 @@ }, { "name": "cfg-if", - "version": "1.0.0", + "version": "1.0.1", "authors": "Alex Crichton ", - "repository": "https://github.com/alexcrichton/cfg-if", + "repository": "https://github.com/rust-lang/cfg-if", "license": "Apache-2.0 OR MIT", "license_file": null, "description": "A macro to ergonomically define an item depending on a large number of #[cfg] parameters. Structured like an if-else chain, the first matching branch is the item that gets emitted." @@ -640,13 +631,22 @@ }, { "name": "chrono", - "version": "0.4.40", + "version": "0.4.41", "authors": null, "repository": "https://github.com/chronotope/chrono", "license": "Apache-2.0 OR MIT", "license_file": null, "description": "Date and time library for Rust" }, + { + "name": "client-ip", + "version": "0.1.1", + "authors": null, + "repository": "https://github.com/imbolc/client-ip", + "license": "MIT", + "license_file": null, + "description": "HTTP client IP address extractors" + }, { "name": "coarsetime", "version": "0.1.36", @@ -694,8 +694,8 @@ }, { "name": "convert_case", - "version": "0.6.0", - "authors": "Rutrum ", + "version": "0.8.0", + "authors": "rutrum ", "repository": "https://github.com/rutrum/convert-case", "license": "MIT", "license_file": null, @@ -712,7 +712,7 @@ }, { "name": "core-foundation", - "version": "0.10.0", + "version": "0.10.1", "authors": "The Servo Project Developers", "repository": "https://github.com/servo/core-foundation-rs", "license": "Apache-2.0 OR MIT", @@ -901,7 +901,7 @@ }, { "name": "cubecl-hip-sys", - "version": "6.4.0", + "version": "6.4.4348200", "authors": "Tracel Technologies Inc.", "repository": "https://github.com/tracel-ai/cubecl-hip/tree/main/crates/cubecl-hip-sys", "license": "Apache-2.0 OR MIT", @@ -1261,7 +1261,7 @@ }, { "name": "errno", - "version": "0.3.11", + "version": "0.3.12", "authors": "Chris Wong |Dan Gohman ", "repository": "https://github.com/lambda-fairy/rust-errno", "license": "Apache-2.0 OR MIT", @@ -1324,7 +1324,7 @@ }, { "name": "flate2", - "version": "1.1.1", + "version": "1.1.2", "authors": "Alex Crichton |Josh Triplett ", "repository": "https://github.com/rust-lang/flate2-rs", "license": "Apache-2.0 OR MIT", @@ -1342,21 +1342,21 @@ }, { "name": "fluent", - "version": "0.16.1", - "authors": "Zibi Braniecki |Staś Małolepszy ", + "version": "0.17.0", + "authors": "Caleb Maclennan |Bruce Mitchener |Staś Małolepszy ", "repository": "https://github.com/projectfluent/fluent-rs", "license": "Apache-2.0 OR MIT", "license_file": null, - "description": "A localization system designed to unleash the entire expressive power of natural language translations." + "description": "An umbrella crate exposing the combined features of fluent-rs crates with additional convenience macros for Project Fluent, a localization system designed to unleash the entire expressive power of natural language translations." }, { "name": "fluent-bundle", - "version": "0.15.3", - "authors": "Zibi Braniecki |Staś Małolepszy ", + "version": "0.16.0", + "authors": "Caleb Maclennan |Bruce Mitchener |Staś Małolepszy ", "repository": "https://github.com/projectfluent/fluent-rs", "license": "Apache-2.0 OR MIT", "license_file": null, - "description": "A localization system designed to unleash the entire expressive power of natural language translations." + "description": "A low-level implementation of a collection of localization messages for a single locale for Project Fluent, a localization system designed to unleash the entire expressive power of natural language translations." }, { "name": "fluent-langneg", @@ -1369,12 +1369,12 @@ }, { "name": "fluent-syntax", - "version": "0.11.1", - "authors": "Zibi Braniecki |Staś Małolepszy ", + "version": "0.12.0", + "authors": "Caleb Maclennan |Bruce Mitchener |Staś Małolepszy ", "repository": "https://github.com/projectfluent/fluent-rs", "license": "Apache-2.0 OR MIT", "license_file": null, - "description": "Parser/Serializer tools for Fluent Syntax." + "description": "A low-level parser, AST, and serializer API for the syntax used by Project Fluent, a localization system designed to unleash the entire expressive power of natural language translations." }, { "name": "fnv", @@ -1448,18 +1448,9 @@ "license_file": null, "description": "Parser and serializer for the application/x-www-form-urlencoded syntax, as used by HTML forms." }, - { - "name": "forwarded-header-value", - "version": "0.1.1", - "authors": "James Brown ", - "repository": "https://github.com/EasyPost/rust-forwarded-header-value", - "license": "ISC", - "license_file": null, - "description": "Parser for values from the Forwarded header (RFC 7239)" - }, { "name": "fsrs", - "version": "4.0.0", + "version": "4.1.1", "authors": "Open Spaced Repetition", "repository": "https://github.com/open-spaced-repetition/fsrs-rs", "license": "BSD-3-Clause", @@ -1711,12 +1702,12 @@ }, { "name": "getopts", - "version": "0.2.21", + "version": "0.2.23", "authors": "The Rust Project Developers", "repository": "https://github.com/rust-lang/getopts", "license": "Apache-2.0 OR MIT", "license_file": null, - "description": "getopts-like option parsing." + "description": "getopts-like option parsing" }, { "name": "getrandom", @@ -1729,7 +1720,7 @@ }, { "name": "getrandom", - "version": "0.3.2", + "version": "0.3.3", "authors": "The Rand Project Developers", "repository": "https://github.com/rust-random/getrandom", "license": "Apache-2.0 OR MIT", @@ -1810,7 +1801,7 @@ }, { "name": "gpu-descriptor", - "version": "0.3.1", + "version": "0.3.2", "authors": "Zakarum ", "repository": "https://github.com/zakarumych/gpu-descriptor", "license": "Apache-2.0 OR MIT", @@ -1828,7 +1819,7 @@ }, { "name": "h2", - "version": "0.4.9", + "version": "0.4.10", "authors": "Carl Lerche |Sean McArthur ", "repository": "https://github.com/hyperium/h2", "license": "MIT", @@ -1864,7 +1855,7 @@ }, { "name": "hashbrown", - "version": "0.15.2", + "version": "0.15.4", "authors": "Amanieu d'Antras ", "repository": "https://github.com/rust-lang/hashbrown", "license": "Apache-2.0 OR MIT", @@ -1873,7 +1864,7 @@ }, { "name": "hashlink", - "version": "0.8.4", + "version": "0.10.0", "authors": "kyren ", "repository": "https://github.com/kyren/hashlink", "license": "Apache-2.0 OR MIT", @@ -1882,7 +1873,7 @@ }, { "name": "headers", - "version": "0.4.0", + "version": "0.4.1", "authors": "Sean McArthur ", "repository": "https://github.com/hyperium/headers", "license": "MIT", @@ -1909,7 +1900,7 @@ }, { "name": "hermit-abi", - "version": "0.3.9", + "version": "0.5.2", "authors": "Stefan Lankes", "repository": "https://github.com/hermit-os/hermit-rs", "license": "Apache-2.0 OR MIT", @@ -2017,7 +2008,7 @@ }, { "name": "hyper-rustls", - "version": "0.27.5", + "version": "0.27.7", "authors": null, "repository": "https://github.com/rustls/hyper-rustls", "license": "Apache-2.0 OR ISC OR MIT", @@ -2035,7 +2026,7 @@ }, { "name": "hyper-util", - "version": "0.1.11", + "version": "0.1.14", "authors": "Sean McArthur ", "repository": "https://github.com/hyperium/hyper-util", "license": "MIT", @@ -2062,7 +2053,7 @@ }, { "name": "icu_collections", - "version": "1.5.0", + "version": "2.0.0", "authors": "The ICU4X Project Developers", "repository": "https://github.com/unicode-org/icu4x", "license": "Unicode-3.0", @@ -2070,35 +2061,17 @@ "description": "Collection of API for use in ICU libraries." }, { - "name": "icu_locid", - "version": "1.5.0", + "name": "icu_locale_core", + "version": "2.0.0", "authors": "The ICU4X Project Developers", "repository": "https://github.com/unicode-org/icu4x", "license": "Unicode-3.0", "license_file": null, "description": "API for managing Unicode Language and Locale Identifiers" }, - { - "name": "icu_locid_transform", - "version": "1.5.0", - "authors": "The ICU4X Project Developers", - "repository": "https://github.com/unicode-org/icu4x", - "license": "Unicode-3.0", - "license_file": null, - "description": "API for Unicode Language and Locale Identifiers canonicalization" - }, - { - "name": "icu_locid_transform_data", - "version": "1.5.1", - "authors": "The ICU4X Project Developers", - "repository": "https://github.com/unicode-org/icu4x", - "license": "Unicode-3.0", - "license_file": null, - "description": "Data for the icu_locid_transform crate" - }, { "name": "icu_normalizer", - "version": "1.5.0", + "version": "2.0.0", "authors": "The ICU4X Project Developers", "repository": "https://github.com/unicode-org/icu4x", "license": "Unicode-3.0", @@ -2107,7 +2080,7 @@ }, { "name": "icu_normalizer_data", - "version": "1.5.1", + "version": "2.0.0", "authors": "The ICU4X Project Developers", "repository": "https://github.com/unicode-org/icu4x", "license": "Unicode-3.0", @@ -2116,7 +2089,7 @@ }, { "name": "icu_properties", - "version": "1.5.1", + "version": "2.0.1", "authors": "The ICU4X Project Developers", "repository": "https://github.com/unicode-org/icu4x", "license": "Unicode-3.0", @@ -2125,7 +2098,7 @@ }, { "name": "icu_properties_data", - "version": "1.5.1", + "version": "2.0.1", "authors": "The ICU4X Project Developers", "repository": "https://github.com/unicode-org/icu4x", "license": "Unicode-3.0", @@ -2134,22 +2107,13 @@ }, { "name": "icu_provider", - "version": "1.5.0", + "version": "2.0.0", "authors": "The ICU4X Project Developers", "repository": "https://github.com/unicode-org/icu4x", "license": "Unicode-3.0", "license_file": null, "description": "Trait and struct definitions for the ICU data provider" }, - { - "name": "icu_provider_macros", - "version": "1.5.0", - "authors": "The ICU4X Project Developers", - "repository": "https://github.com/unicode-org/icu4x", - "license": "Unicode-3.0", - "license_file": null, - "description": "Proc macros for ICU data providers" - }, { "name": "id_tree", "version": "1.8.0", @@ -2179,7 +2143,7 @@ }, { "name": "idna_adapter", - "version": "1.2.0", + "version": "1.2.1", "authors": "The rust-url developers", "repository": "https://github.com/hsivonen/idna_adapter", "license": "Apache-2.0 OR MIT", @@ -2206,12 +2170,12 @@ }, { "name": "intl-memoizer", - "version": "0.5.2", - "authors": "Zibi Braniecki |Manish Goregaokar ", + "version": "0.5.3", + "authors": "Caleb Maclennan |Bruce Mitchener |Staś Małolepszy ", "repository": "https://github.com/projectfluent/fluent-rs", "license": "Apache-2.0 OR MIT", "license_file": null, - "description": "A memoizer specifically tailored for storing lazy-initialized intl formatters." + "description": "A memoizer specifically tailored for storing lazy-initialized intl formatters for Project Fluent, a localization system designed to unleash the entire expressive power of natural language translations." }, { "name": "intl_pluralrules", @@ -2232,13 +2196,13 @@ "description": "Provides types and useful methods for working with IPv4 and IPv6 network addresses, commonly called IP prefixes. The new `IpNet`, `Ipv4Net`, and `Ipv6Net` types build on the existing `IpAddr`, `Ipv4Addr`, and `Ipv6Addr` types already provided in Rust's standard library and align to their design to stay consistent. The module also provides useful traits that extend `Ipv4Addr` and `Ipv6Addr` with methods for `Add`, `Sub`, `BitAnd`, and `BitOr` operations. The module only uses stable feature so it is guaranteed to compile using the stable toolchain." }, { - "name": "itertools", - "version": "0.13.0", - "authors": "bluss", - "repository": "https://github.com/rust-itertools/itertools", + "name": "iri-string", + "version": "0.7.8", + "authors": "YOSHIOKA Takuma ", + "repository": "https://github.com/lo48576/iri-string", "license": "Apache-2.0 OR MIT", "license_file": null, - "description": "Extra iterator adaptors, iterator methods, free functions, and macros." + "description": "IRI as string types" }, { "name": "itertools", @@ -2314,7 +2278,7 @@ }, { "name": "libc", - "version": "0.2.172", + "version": "0.2.173", "authors": "The Rust Project Developers", "repository": "https://github.com/rust-lang/libc", "license": "Apache-2.0 OR MIT", @@ -2323,7 +2287,7 @@ }, { "name": "libloading", - "version": "0.8.6", + "version": "0.8.8", "authors": "Simonas Kazlauskas ", "repository": "https://github.com/nagisa/rust_libloading/", "license": "ISC", @@ -2332,7 +2296,7 @@ }, { "name": "libm", - "version": "0.2.13", + "version": "0.2.15", "authors": "Jorge Aparicio ", "repository": "https://github.com/rust-lang/compiler-builtins", "license": "MIT", @@ -2350,13 +2314,22 @@ }, { "name": "libsqlite3-sys", - "version": "0.27.0", + "version": "0.34.0", "authors": "The rusqlite developers", "repository": "https://github.com/rusqlite/rusqlite", "license": "MIT", "license_file": null, "description": "Native bindings to the libsqlite3 library" }, + { + "name": "libz-rs-sys", + "version": "0.5.1", + "authors": null, + "repository": "https://github.com/trifectatechfoundation/zlib-rs", + "license": "Zlib", + "license_file": null, + "description": "A memory-safe zlib implementation written in rust" + }, { "name": "linux-raw-sys", "version": "0.9.4", @@ -2368,7 +2341,7 @@ }, { "name": "litemap", - "version": "0.7.5", + "version": "0.8.0", "authors": "The ICU4X Project Developers", "repository": "https://github.com/unicode-org/icu4x", "license": "Unicode-3.0", @@ -2386,7 +2359,7 @@ }, { "name": "lock_api", - "version": "0.4.12", + "version": "0.4.13", "authors": "Amanieu d'Antras ", "repository": "https://github.com/Amanieu/parking_lot", "license": "Apache-2.0 OR MIT", @@ -2402,6 +2375,15 @@ "license_file": null, "description": "A lightweight logging facade for Rust" }, + { + "name": "lru-slab", + "version": "0.1.2", + "authors": "Benjamin Saunders ", + "repository": "https://github.com/Ralith/lru-slab", + "license": "Apache-2.0 OR MIT OR Zlib", + "license_file": null, + "description": "Pre-allocated storage with constant-time LRU tracking" + }, { "name": "mac", "version": "0.1.1", @@ -2413,7 +2395,7 @@ }, { "name": "macerator", - "version": "0.2.6", + "version": "0.2.8", "authors": "Genna Wingert", "repository": "https://github.com/wingertge/macerator", "license": "Apache-2.0 OR MIT", @@ -2422,7 +2404,7 @@ }, { "name": "macerator-macros", - "version": "0.1.1", + "version": "0.1.2", "authors": "Genna Wingert", "repository": "https://github.com/wingertge/macerator", "license": "Apache-2.0 OR MIT", @@ -2485,7 +2467,7 @@ }, { "name": "matchit", - "version": "0.7.3", + "version": "0.8.4", "authors": "Ibraheem Ahmed ", "repository": "https://github.com/ibraheemdev/matchit", "license": "MIT AND BSD-3-Clause", @@ -2494,7 +2476,7 @@ }, { "name": "matrixmultiply", - "version": "0.3.9", + "version": "0.3.10", "authors": "bluss|R. Janis Goldschmidt", "repository": "https://github.com/bluss/matrixmultiply/", "license": "Apache-2.0 OR MIT", @@ -2512,7 +2494,7 @@ }, { "name": "memchr", - "version": "2.7.4", + "version": "2.7.5", "authors": "Andrew Gallant |bluss", "repository": "https://github.com/BurntSushi/memchr", "license": "MIT OR Unlicense", @@ -2566,7 +2548,7 @@ }, { "name": "miniz_oxide", - "version": "0.8.8", + "version": "0.8.9", "authors": "Frommi |oyvindln |Rich Geldreich richgel99@gmail.com", "repository": "https://github.com/Frommi/miniz_oxide/tree/master/miniz_oxide", "license": "Apache-2.0 OR MIT OR Zlib", @@ -2575,13 +2557,22 @@ }, { "name": "mio", - "version": "1.0.3", + "version": "1.0.4", "authors": "Carl Lerche |Thomas de Zeeuw |Tokio Contributors ", "repository": "https://github.com/tokio-rs/mio", "license": "MIT", "license_file": null, "description": "Lightweight non-blocking I/O." }, + { + "name": "moddef", + "version": "0.2.6", + "authors": null, + "repository": "https://github.com/sigurd4/moddef", + "license": "MIT", + "license_file": null, + "description": "Macro for convenient module declaration. Each module can be put in a group, and visibility can be applied to the whole group with ease." + }, { "name": "multer", "version": "3.1.0", @@ -2593,7 +2584,7 @@ }, { "name": "multimap", - "version": "0.10.0", + "version": "0.10.1", "authors": "Håvar Nøvik ", "repository": "https://github.com/havarnov/multimap", "license": "Apache-2.0 OR MIT", @@ -2627,15 +2618,6 @@ "license_file": null, "description": "An n-dimensional array for general elements and for numerics. Lightweight array views and slicing; views support chunking and splitting." }, - { - "name": "ndarray-rand", - "version": "0.15.0", - "authors": "bluss", - "repository": "https://github.com/rust-ndarray/ndarray", - "license": "Apache-2.0 OR MIT", - "license_file": null, - "description": "Constructors for randomized arrays. `rand` integration for `ndarray`." - }, { "name": "ndk-sys", "version": "0.5.0+25.2.9519653", @@ -2664,13 +2646,13 @@ "description": "A byte-oriented, zero-copy, parser combinators library" }, { - "name": "nonempty", - "version": "0.7.0", - "authors": "Alexis Sellier ", - "repository": "https://github.com/cloudhead/nonempty", + "name": "nom", + "version": "8.0.0", + "authors": "contact@geoffroycouprie.com", + "repository": "https://github.com/rust-bakery/nom", "license": "MIT", "license_file": null, - "description": "Correct by construction non-empty vector" + "description": "A byte-oriented, zero-copy, parser combinators library" }, { "name": "ntapi", @@ -2773,7 +2755,7 @@ }, { "name": "num_cpus", - "version": "1.16.0", + "version": "1.17.0", "authors": "Sean McArthur ", "repository": "https://github.com/seanmonstar/num_cpus", "license": "Apache-2.0 OR MIT", @@ -2845,7 +2827,7 @@ }, { "name": "openssl", - "version": "0.10.72", + "version": "0.10.73", "authors": "Steven Fackler ", "repository": "https://github.com/sfackler/rust-openssl", "license": "Apache-2.0", @@ -2872,7 +2854,7 @@ }, { "name": "openssl-sys", - "version": "0.9.107", + "version": "0.9.109", "authors": "Alex Crichton |Steven Fackler ", "repository": "https://github.com/sfackler/rust-openssl", "license": "MIT", @@ -2926,7 +2908,7 @@ }, { "name": "parking_lot", - "version": "0.12.3", + "version": "0.12.4", "authors": "Amanieu d'Antras ", "repository": "https://github.com/Amanieu/parking_lot", "license": "Apache-2.0 OR MIT", @@ -2935,7 +2917,7 @@ }, { "name": "parking_lot_core", - "version": "0.9.10", + "version": "0.9.11", "authors": "Amanieu d'Antras ", "repository": "https://github.com/Amanieu/parking_lot", "license": "Apache-2.0 OR MIT", @@ -3088,7 +3070,7 @@ }, { "name": "portable-atomic", - "version": "1.11.0", + "version": "1.11.1", "authors": null, "repository": "https://github.com/taiki-e/portable-atomic", "license": "Apache-2.0 OR MIT", @@ -3104,6 +3086,15 @@ "license_file": null, "description": "Synchronization primitives built with portable-atomic." }, + { + "name": "potential_utf", + "version": "0.1.2", + "authors": "The ICU4X Project Developers", + "repository": "https://github.com/unicode-org/icu4x", + "license": "Unicode-3.0", + "license_file": null, + "description": "Unvalidated string and character types" + }, { "name": "powerfmt", "version": "0.2.0", @@ -3142,7 +3133,7 @@ }, { "name": "prettyplease", - "version": "0.2.32", + "version": "0.2.34", "authors": "David Tolnay ", "repository": "https://github.com/dtolnay/prettyplease", "license": "Apache-2.0 OR MIT", @@ -3241,13 +3232,22 @@ }, { "name": "pulldown-cmark", - "version": "0.9.6", + "version": "0.13.0", "authors": "Raph Levien |Marcus Klaas de Vries ", "repository": "https://github.com/raphlinus/pulldown-cmark", "license": "MIT", "license_file": null, "description": "A pull parser for CommonMark" }, + { + "name": "pulldown-cmark-escape", + "version": "0.11.0", + "authors": "Raph Levien |Marcus Klaas de Vries ", + "repository": "https://github.com/raphlinus/pulldown-cmark", + "license": "MIT", + "license_file": null, + "description": "An escape library for HTML created in the pulldown-cmark project" + }, { "name": "pulp", "version": "0.18.22", @@ -3259,7 +3259,7 @@ }, { "name": "pulp", - "version": "0.21.4", + "version": "0.21.5", "authors": "sarah <>", "repository": "https://github.com/sarah-ek/pulp/", "license": "MIT", @@ -3268,7 +3268,7 @@ }, { "name": "quinn", - "version": "0.11.7", + "version": "0.11.8", "authors": null, "repository": "https://github.com/quinn-rs/quinn", "license": "Apache-2.0 OR MIT", @@ -3277,7 +3277,7 @@ }, { "name": "quinn-proto", - "version": "0.11.11", + "version": "0.11.12", "authors": null, "repository": "https://github.com/quinn-rs/quinn", "license": "Apache-2.0 OR MIT", @@ -3286,7 +3286,7 @@ }, { "name": "quinn-udp", - "version": "0.5.11", + "version": "0.5.12", "authors": null, "repository": "https://github.com/quinn-rs/quinn", "license": "Apache-2.0 OR MIT", @@ -3365,15 +3365,6 @@ "license_file": null, "description": "Core random number generator traits and tools for implementation." }, - { - "name": "rand_distr", - "version": "0.4.3", - "authors": "The Rand Project Developers", - "repository": "https://github.com/rust-random/rand", - "license": "Apache-2.0 OR MIT", - "license_file": null, - "description": "Sampling from random number distributions" - }, { "name": "rand_distr", "version": "0.5.1", @@ -3457,7 +3448,7 @@ }, { "name": "redox_syscall", - "version": "0.5.11", + "version": "0.5.13", "authors": "Jeremy Soller ", "repository": "https://gitlab.redox-os.org/redox-os/syscall", "license": "MIT", @@ -3547,7 +3538,7 @@ }, { "name": "reqwest", - "version": "0.12.15", + "version": "0.12.20", "authors": "Sean McArthur ", "repository": "https://github.com/seanmonstar/reqwest", "license": "Apache-2.0 OR MIT", @@ -3601,7 +3592,7 @@ }, { "name": "rusqlite", - "version": "0.30.0", + "version": "0.36.0", "authors": "The rusqlite developers", "repository": "https://github.com/rusqlite/rusqlite", "license": "MIT", @@ -3610,7 +3601,7 @@ }, { "name": "rustc-demangle", - "version": "0.1.24", + "version": "0.1.25", "authors": "Alex Crichton ", "repository": "https://github.com/rust-lang/rustc-demangle", "license": "Apache-2.0 OR MIT", @@ -3646,7 +3637,7 @@ }, { "name": "rustix", - "version": "1.0.5", + "version": "1.0.7", "authors": "Dan Gohman |Jakub Konka ", "repository": "https://github.com/bytecodealliance/rustix", "license": "Apache-2.0 OR Apache-2.0 WITH LLVM-exception OR MIT", @@ -3655,7 +3646,7 @@ }, { "name": "rustls", - "version": "0.23.26", + "version": "0.23.28", "authors": null, "repository": "https://github.com/rustls/rustls", "license": "Apache-2.0 OR ISC OR MIT", @@ -3682,7 +3673,7 @@ }, { "name": "rustls-pki-types", - "version": "1.11.0", + "version": "1.12.0", "authors": null, "repository": "https://github.com/rustls/pki-types", "license": "Apache-2.0 OR MIT", @@ -3691,7 +3682,7 @@ }, { "name": "rustls-webpki", - "version": "0.103.1", + "version": "0.103.3", "authors": null, "repository": "https://github.com/rustls/webpki", "license": "ISC", @@ -3700,7 +3691,7 @@ }, { "name": "rustversion", - "version": "1.0.20", + "version": "1.0.21", "authors": "David Tolnay ", "repository": "https://github.com/dtolnay/rustversion", "license": "Apache-2.0 OR MIT", @@ -3797,15 +3788,6 @@ "license_file": null, "description": "Apple `Security.framework` low-level FFI bindings" }, - { - "name": "self_cell", - "version": "0.10.3", - "authors": "Lukas Bergdoll ", - "repository": "https://github.com/Voultapher/self_cell", - "license": "Apache-2.0", - "license_file": null, - "description": "Safe-to-use proc-macro-free self-referential structs in stable Rust." - }, { "name": "self_cell", "version": "1.2.0", @@ -3905,9 +3887,18 @@ "license_file": null, "description": "Derive Serialize and Deserialize that delegates to the underlying repr of a C-like enum." }, + { + "name": "serde_spanned", + "version": "0.6.9", + "authors": null, + "repository": "https://github.com/toml-rs/toml", + "license": "Apache-2.0 OR MIT", + "license_file": null, + "description": "Serde-compatible spanned Value" + }, { "name": "serde_tuple", - "version": "0.5.0", + "version": "1.1.0", "authors": "Jacob Brown ", "repository": "https://github.com/kardeiz/serde_tuple", "license": "MIT", @@ -3916,12 +3907,12 @@ }, { "name": "serde_tuple_macros", - "version": "0.5.0", + "version": "1.0.1", "authors": "Jacob Brown ", "repository": "https://github.com/kardeiz/serde_tuple", "license": "MIT", "license_file": null, - "description": "De/serialize structs with named fields as array of values" + "description": "Internal proc-macro crate for serde_tuple" }, { "name": "serde_urlencoded", @@ -3943,7 +3934,7 @@ }, { "name": "sha2", - "version": "0.10.8", + "version": "0.10.9", "authors": "RustCrypto Developers", "repository": "https://github.com/RustCrypto/hashes", "license": "Apache-2.0 OR MIT", @@ -3977,6 +3968,15 @@ "license_file": null, "description": "Backend crate for signal-hook" }, + { + "name": "simd-adler32", + "version": "0.3.7", + "authors": "Marvin Countryman ", + "repository": "https://github.com/mcountryman/simd-adler32", + "license": "MIT", + "license_file": null, + "description": "A SIMD-accelerated Adler-32 hash algorithm implementation." + }, { "name": "siphasher", "version": "1.0.1", @@ -3988,7 +3988,7 @@ }, { "name": "slab", - "version": "0.4.9", + "version": "0.4.10", "authors": "Carl Lerche ", "repository": "https://github.com/tokio-rs/slab", "license": "MIT", @@ -4006,7 +4006,7 @@ }, { "name": "smallvec", - "version": "1.15.0", + "version": "1.15.1", "authors": "The Servo Project Developers", "repository": "https://github.com/servo/rust-smallvec", "license": "Apache-2.0 OR MIT", @@ -4042,7 +4042,7 @@ }, { "name": "socket2", - "version": "0.5.9", + "version": "0.5.10", "authors": "Alex Crichton |Thomas de Zeeuw ", "repository": "https://github.com/rust-lang/socket2", "license": "Apache-2.0 OR MIT", @@ -4177,7 +4177,7 @@ }, { "name": "syn", - "version": "2.0.101", + "version": "2.0.103", "authors": "David Tolnay ", "repository": "https://github.com/dtolnay/syn", "license": "Apache-2.0 OR MIT", @@ -4195,7 +4195,7 @@ }, { "name": "synstructure", - "version": "0.13.1", + "version": "0.13.2", "authors": "Nika Layzell ", "repository": "https://github.com/mystor/synstructure", "license": "MIT", @@ -4240,7 +4240,7 @@ }, { "name": "tempfile", - "version": "3.19.1", + "version": "3.20.0", "authors": "Steven Allen |The Rust Project Developers|Ashley Mannix |Jason White ", "repository": "https://github.com/Stebalien/tempfile", "license": "Apache-2.0 OR MIT", @@ -4321,7 +4321,7 @@ }, { "name": "thread_local", - "version": "1.1.8", + "version": "1.1.9", "authors": "Amanieu d'Antras ", "repository": "https://github.com/Amanieu/thread_local-rs", "license": "Apache-2.0 OR MIT", @@ -4357,7 +4357,7 @@ }, { "name": "tinystr", - "version": "0.7.6", + "version": "0.8.1", "authors": "The ICU4X Project Developers", "repository": "https://github.com/unicode-org/icu4x", "license": "Unicode-3.0", @@ -4384,7 +4384,7 @@ }, { "name": "tokio", - "version": "1.44.2", + "version": "1.45.1", "authors": "Tokio Contributors ", "repository": "https://github.com/tokio-rs/tokio", "license": "MIT", @@ -4418,15 +4418,6 @@ "license_file": null, "description": "Asynchronous TLS/SSL streams for Tokio using Rustls." }, - { - "name": "tokio-socks", - "version": "0.5.2", - "authors": "Yilin Chen ", - "repository": "https://github.com/sticnarf/tokio-socks", - "license": "MIT", - "license_file": null, - "description": "Asynchronous SOCKS proxy support for Rust." - }, { "name": "tokio-util", "version": "0.7.15", @@ -4438,8 +4429,8 @@ }, { "name": "toml_datetime", - "version": "0.6.9", - "authors": "Alex Crichton ", + "version": "0.6.11", + "authors": null, "repository": "https://github.com/toml-rs/toml", "license": "Apache-2.0 OR MIT", "license_file": null, @@ -4447,13 +4438,22 @@ }, { "name": "toml_edit", - "version": "0.22.25", - "authors": "Andronik Ordian |Ed Page ", + "version": "0.22.27", + "authors": null, "repository": "https://github.com/toml-rs/toml", "license": "Apache-2.0 OR MIT", "license_file": null, "description": "Yet another format-preserving TOML parser." }, + { + "name": "toml_write", + "version": "0.1.2", + "authors": null, + "repository": "https://github.com/toml-rs/toml", + "license": "Apache-2.0 OR MIT", + "license_file": null, + "description": "A low-level interface for writing out TOML" + }, { "name": "tower", "version": "0.5.2", @@ -4465,7 +4465,7 @@ }, { "name": "tower-http", - "version": "0.5.2", + "version": "0.6.6", "authors": "Tower Maintainers ", "repository": "https://github.com/tower-rs/tower-http", "license": "MIT", @@ -4510,7 +4510,7 @@ }, { "name": "tracing-attributes", - "version": "0.1.28", + "version": "0.1.29", "authors": "Tokio Contributors |Eliza Weisman |David Barsky ", "repository": "https://github.com/tokio-rs/tracing", "license": "MIT", @@ -4519,7 +4519,7 @@ }, { "name": "tracing-core", - "version": "0.1.33", + "version": "0.1.34", "authors": "Tokio Contributors ", "repository": "https://github.com/tokio-rs/tracing", "license": "MIT", @@ -4555,7 +4555,7 @@ }, { "name": "type-map", - "version": "0.5.0", + "version": "0.5.1", "authors": "Jacob Brown ", "repository": "https://github.com/kardeiz/type-map", "license": "Apache-2.0 OR MIT", @@ -4609,7 +4609,7 @@ }, { "name": "unic-langid", - "version": "0.9.5", + "version": "0.9.6", "authors": "Zibi Braniecki ", "repository": "https://github.com/zbraniecki/unic-locale", "license": "Apache-2.0 OR MIT", @@ -4618,7 +4618,7 @@ }, { "name": "unic-langid-impl", - "version": "0.9.5", + "version": "0.9.6", "authors": "Zibi Braniecki ", "repository": "https://github.com/zbraniecki/unic-locale", "license": "Apache-2.0 OR MIT", @@ -4627,7 +4627,7 @@ }, { "name": "unic-langid-macros", - "version": "0.9.5", + "version": "0.9.6", "authors": "Zibi Braniecki ", "repository": "https://github.com/zbraniecki/unic-locale", "license": "Apache-2.0 OR MIT", @@ -4636,7 +4636,7 @@ }, { "name": "unic-langid-macros-impl", - "version": "0.9.5", + "version": "0.9.6", "authors": "Zibi Braniecki ", "repository": "https://github.com/zbraniecki/unic-locale", "license": "Apache-2.0 OR MIT", @@ -4699,16 +4699,7 @@ }, { "name": "unicode-width", - "version": "0.1.14", - "authors": "kwantam |Manish Goregaokar ", - "repository": "https://github.com/unicode-rs/unicode-width", - "license": "Apache-2.0 OR MIT", - "license_file": null, - "description": "Determine displayed width of `char` and `str` types according to Unicode Standard Annex #11 rules." - }, - { - "name": "unicode-width", - "version": "0.2.0", + "version": "0.2.1", "authors": "kwantam |Manish Goregaokar ", "repository": "https://github.com/unicode-rs/unicode-width", "license": "Apache-2.0 OR MIT", @@ -4760,15 +4751,6 @@ "license_file": null, "description": "Incremental, zero-copy UTF-8 decoding with error handling" }, - { - "name": "utf16_iter", - "version": "1.0.5", - "authors": "Henri Sivonen ", - "repository": "https://github.com/hsivonen/utf16_iter", - "license": "Apache-2.0 OR MIT", - "license_file": null, - "description": "Iterator by char over potentially-invalid UTF-16 in &[u16]" - }, { "name": "utf8_iter", "version": "1.0.4", @@ -4780,7 +4762,7 @@ }, { "name": "uuid", - "version": "1.16.0", + "version": "1.17.0", "authors": "Ashley Mannix|Dylan DPC|Hunar Roop Kahlon", "repository": "https://github.com/uuid-rs/uuid", "license": "Apache-2.0 OR MIT", @@ -4843,7 +4825,7 @@ }, { "name": "wasi", - "version": "0.11.0+wasi-snapshot-preview1", + "version": "0.11.1+wasi-snapshot-preview1", "authors": "The Cranelift Project Developers", "repository": "https://github.com/bytecodealliance/wasi", "license": "Apache-2.0 OR Apache-2.0 WITH LLVM-exception OR MIT", @@ -4951,7 +4933,7 @@ }, { "name": "web_atoms", - "version": "0.1.0", + "version": "0.1.3", "authors": "The html5ever Project Developers", "repository": "https://github.com/servo/html5ever", "license": "Apache-2.0 OR MIT", @@ -4960,16 +4942,16 @@ }, { "name": "webpki-roots", - "version": "0.26.8", + "version": "1.0.0", "authors": null, "repository": "https://github.com/rustls/webpki-roots", - "license": "MPL-2.0", + "license": "CDLA-Permissive-2.0", "license_file": null, "description": "Mozilla's CA root certificates for use with webpki" }, { "name": "wgpu", - "version": "25.0.0", + "version": "25.0.2", "authors": "gfx-rs developers", "repository": "https://github.com/gfx-rs/wgpu", "license": "Apache-2.0 OR MIT", @@ -4978,7 +4960,7 @@ }, { "name": "wgpu-core", - "version": "25.0.1", + "version": "25.0.2", "authors": "gfx-rs developers", "repository": "https://github.com/gfx-rs/wgpu", "license": "Apache-2.0 OR MIT", @@ -5014,7 +4996,7 @@ }, { "name": "wgpu-hal", - "version": "25.0.1", + "version": "25.0.2", "authors": "gfx-rs developers", "repository": "https://github.com/gfx-rs/wgpu", "license": "Apache-2.0 OR MIT", @@ -5077,7 +5059,7 @@ }, { "name": "windows", - "version": "0.57.0", + "version": "0.58.0", "authors": "Microsoft", "repository": "https://github.com/microsoft/windows-rs", "license": "Apache-2.0 OR MIT", @@ -5086,13 +5068,22 @@ }, { "name": "windows", - "version": "0.58.0", + "version": "0.61.3", "authors": "Microsoft", "repository": "https://github.com/microsoft/windows-rs", "license": "Apache-2.0 OR MIT", "license_file": null, "description": "Rust for Windows" }, + { + "name": "windows-collections", + "version": "0.2.0", + "authors": null, + "repository": "https://github.com/microsoft/windows-rs", + "license": "Apache-2.0 OR MIT", + "license_file": null, + "description": "Windows collection types" + }, { "name": "windows-core", "version": "0.56.0", @@ -5102,15 +5093,6 @@ "license_file": null, "description": "Rust for Windows" }, - { - "name": "windows-core", - "version": "0.57.0", - "authors": "Microsoft", - "repository": "https://github.com/microsoft/windows-rs", - "license": "Apache-2.0 OR MIT", - "license_file": null, - "description": "Rust for Windows" - }, { "name": "windows-core", "version": "0.58.0", @@ -5122,7 +5104,7 @@ }, { "name": "windows-core", - "version": "0.61.0", + "version": "0.61.2", "authors": "Microsoft", "repository": "https://github.com/microsoft/windows-rs", "license": "Apache-2.0 OR MIT", @@ -5130,17 +5112,17 @@ "description": "Core type support for COM and Windows" }, { - "name": "windows-implement", - "version": "0.56.0", - "authors": "Microsoft", + "name": "windows-future", + "version": "0.2.1", + "authors": null, "repository": "https://github.com/microsoft/windows-rs", "license": "Apache-2.0 OR MIT", "license_file": null, - "description": "The implement macro for the windows crate" + "description": "Windows async types" }, { "name": "windows-implement", - "version": "0.57.0", + "version": "0.56.0", "authors": "Microsoft", "repository": "https://github.com/microsoft/windows-rs", "license": "Apache-2.0 OR MIT", @@ -5174,15 +5156,6 @@ "license_file": null, "description": "The interface macro for the windows crate" }, - { - "name": "windows-interface", - "version": "0.57.0", - "authors": "Microsoft", - "repository": "https://github.com/microsoft/windows-rs", - "license": "Apache-2.0 OR MIT", - "license_file": null, - "description": "The interface macro for the windows crate" - }, { "name": "windows-interface", "version": "0.58.0", @@ -5203,7 +5176,7 @@ }, { "name": "windows-link", - "version": "0.1.1", + "version": "0.1.3", "authors": "Microsoft", "repository": "https://github.com/microsoft/windows-rs", "license": "Apache-2.0 OR MIT", @@ -5211,13 +5184,13 @@ "description": "Linking for Windows" }, { - "name": "windows-registry", - "version": "0.4.0", - "authors": "Microsoft", + "name": "windows-numerics", + "version": "0.2.0", + "authors": null, "repository": "https://github.com/microsoft/windows-rs", "license": "Apache-2.0 OR MIT", "license_file": null, - "description": "Windows registry" + "description": "Windows numeric types" }, { "name": "windows-result", @@ -5239,7 +5212,7 @@ }, { "name": "windows-result", - "version": "0.3.2", + "version": "0.3.4", "authors": "Microsoft", "repository": "https://github.com/microsoft/windows-rs", "license": "Apache-2.0 OR MIT", @@ -5257,16 +5230,7 @@ }, { "name": "windows-strings", - "version": "0.3.1", - "authors": "Microsoft", - "repository": "https://github.com/microsoft/windows-rs", - "license": "Apache-2.0 OR MIT", - "license_file": null, - "description": "Windows string types" - }, - { - "name": "windows-strings", - "version": "0.4.0", + "version": "0.4.2", "authors": "Microsoft", "repository": "https://github.com/microsoft/windows-rs", "license": "Apache-2.0 OR MIT", @@ -5300,6 +5264,15 @@ "license_file": null, "description": "Rust for Windows" }, + { + "name": "windows-sys", + "version": "0.60.2", + "authors": "Microsoft", + "repository": "https://github.com/microsoft/windows-rs", + "license": "Apache-2.0 OR MIT", + "license_file": null, + "description": "Rust for Windows" + }, { "name": "windows-targets", "version": "0.48.5", @@ -5320,13 +5293,22 @@ }, { "name": "windows-targets", - "version": "0.53.0", + "version": "0.53.2", "authors": "Microsoft", "repository": "https://github.com/microsoft/windows-rs", "license": "Apache-2.0 OR MIT", "license_file": null, "description": "Import libs for Windows" }, + { + "name": "windows-threading", + "version": "0.1.0", + "authors": "Microsoft", + "repository": "https://github.com/microsoft/windows-rs", + "license": "Apache-2.0 OR MIT", + "license_file": null, + "description": "Windows threading" + }, { "name": "windows_aarch64_gnullvm", "version": "0.48.5", @@ -5536,7 +5518,7 @@ }, { "name": "winnow", - "version": "0.7.7", + "version": "0.7.11", "authors": null, "repository": "https://github.com/winnow-rs/winnow", "license": "MIT", @@ -5570,18 +5552,9 @@ "license_file": null, "description": "Derive macro for nvml-wrapper, not for general use" }, - { - "name": "write16", - "version": "1.0.0", - "authors": null, - "repository": "https://github.com/hsivonen/write16", - "license": "Apache-2.0 OR MIT", - "license_file": null, - "description": "A UTF-16 analog of the Write trait" - }, { "name": "writeable", - "version": "0.5.5", + "version": "0.6.1", "authors": "The ICU4X Project Developers", "repository": "https://github.com/unicode-org/icu4x", "license": "Unicode-3.0", @@ -5606,6 +5579,15 @@ "license_file": null, "description": "Abstraction allowing borrowed data to be carried along with the backing data it borrows from" }, + { + "name": "yoke", + "version": "0.8.0", + "authors": "Manish Goregaokar ", + "repository": "https://github.com/unicode-org/icu4x", + "license": "Unicode-3.0", + "license_file": null, + "description": "Abstraction allowing borrowed data to be carried along with the backing data it borrows from" + }, { "name": "yoke-derive", "version": "0.7.5", @@ -5616,13 +5598,13 @@ "description": "Custom derive for the yoke crate" }, { - "name": "zerocopy", - "version": "0.7.35", - "authors": "Joshua Liebow-Feeser ", - "repository": "https://github.com/google/zerocopy", - "license": "Apache-2.0 OR BSD-2-Clause OR MIT", + "name": "yoke-derive", + "version": "0.8.0", + "authors": "Manish Goregaokar ", + "repository": "https://github.com/unicode-org/icu4x", + "license": "Unicode-3.0", "license_file": null, - "description": "Utilities for zero-copy parsing and serialization" + "description": "Custom derive for the yoke crate" }, { "name": "zerocopy", @@ -5633,15 +5615,6 @@ "license_file": null, "description": "Zerocopy makes zero-cost memory manipulation effortless. We write \"unsafe\" so you don't have to." }, - { - "name": "zerocopy-derive", - "version": "0.7.35", - "authors": "Joshua Liebow-Feeser ", - "repository": "https://github.com/google/zerocopy", - "license": "Apache-2.0 OR BSD-2-Clause OR MIT", - "license_file": null, - "description": "Custom derive for traits from the zerocopy crate" - }, { "name": "zerocopy-derive", "version": "0.8.25", @@ -5678,9 +5651,18 @@ "license_file": null, "description": "Securely clear secrets from memory with a simple trait built on stable Rust primitives which guarantee memory is zeroed using an operation will not be 'optimized away' by the compiler. Uses a portable pure Rust implementation that works everywhere, even WASM!" }, + { + "name": "zerotrie", + "version": "0.2.2", + "authors": "The ICU4X Project Developers", + "repository": "https://github.com/unicode-org/icu4x", + "license": "Unicode-3.0", + "license_file": null, + "description": "A data structure that efficiently maps strings to integers" + }, { "name": "zerovec", - "version": "0.10.4", + "version": "0.11.2", "authors": "The ICU4X Project Developers", "repository": "https://github.com/unicode-org/icu4x", "license": "Unicode-3.0", @@ -5689,22 +5671,13 @@ }, { "name": "zerovec-derive", - "version": "0.10.3", + "version": "0.11.1", "authors": "Manish Goregaokar ", "repository": "https://github.com/unicode-org/icu4x", "license": "Unicode-3.0", "license_file": null, "description": "Custom derive for the zerovec crate" }, - { - "name": "zip", - "version": "0.6.6", - "authors": "Mathijs van de Nes |Marli Frost |Ryan Levick ", - "repository": "https://github.com/zip-rs/zip.git", - "license": "MIT", - "license_file": null, - "description": "Library to support the reading and writing of zip files." - }, { "name": "zip", "version": "1.1.4", @@ -5714,6 +5687,33 @@ "license_file": null, "description": "Library to support the reading and writing of zip files." }, + { + "name": "zip", + "version": "4.1.0", + "authors": "Mathijs van de Nes |Marli Frost |Ryan Levick |Chris Hennick ", + "repository": "https://github.com/zip-rs/zip2.git", + "license": "MIT", + "license_file": null, + "description": "Library to support the reading and writing of zip files." + }, + { + "name": "zlib-rs", + "version": "0.5.1", + "authors": null, + "repository": "https://github.com/trifectatechfoundation/zlib-rs", + "license": "Zlib", + "license_file": null, + "description": "A memory-safe zlib implementation written in rust" + }, + { + "name": "zopfli", + "version": "0.8.2", + "authors": null, + "repository": "https://github.com/zopfli-rs/zopfli", + "license": "Apache-2.0", + "license_file": null, + "description": "A Rust implementation of the Zopfli compression algorithm." + }, { "name": "zstd", "version": "0.13.3", diff --git a/docs/development.md b/docs/development.md index a057b5c10..defe9ef1e 100644 --- a/docs/development.md +++ b/docs/development.md @@ -85,7 +85,7 @@ When formatting issues are reported, they can be fixed with ./ninja format ``` -## Fixing eslint/copyright header issues +## Fixing ruff/eslint/copyright header issues ``` ./ninja fix @@ -190,13 +190,10 @@ in the collection2.log file will also be printed on stdout. If ANKI_PROFILE_CODE is set, Python profiling data will be written on exit. -# Binary Bundles +# Installer/launcher -Anki's official binary packages are created with `./ninja bundle`. The bundling -process was created specifically for the official builds, and is provided as-is; -we are unfortunately not able to provide assistance with any issues you may run -into when using it. You'll need to run -`git submodule update --checkout qt/bundle/PyOxidizer` first. +- The anki-release package is created/published with the scripts in qt/release. +- The installer/launcher is created with the build scripts in qt/launcher/{platform}. ## Mixing development and study diff --git a/docs/linux.md b/docs/linux.md index 27e3ceeda..55794e074 100644 --- a/docs/linux.md +++ b/docs/linux.md @@ -51,13 +51,8 @@ Anki requires a recent glibc. If you are using a distro that uses musl, Anki will not work. -If your glibc version is 2.35+ on AMD64 or 2.39+ on ARM64, you can skip the rest of this section. - -If your system has an older glibc, you won't be able to use the PyQt wheels that are -available in pip/PyPy, and will need to use your system-installed PyQt instead. -Your distro will also need to have Python 3.9 or later. - -After installing the system libraries (eg: +You can use your system's Qt libraries if they are Qt 6.2 or later, if +you wish. After installing the system libraries (eg: 'sudo apt install python3-pyqt6.qt{quick,webengine} python3-venv pyqt6-dev-tools'), find the place they are installed (eg '/usr/lib/python3/dist-packages'). On modern Ubuntu, you'll also need 'sudo apt remove python3-protobuf'. Then before running any commands like './run', tell Anki where @@ -68,12 +63,6 @@ export PYTHONPATH=/usr/lib/python3/dist-packages export PYTHON_BINARY=/usr/bin/python3 ``` -There are a few things to be aware of: - -- You should use ./run and not tools/run-qt5\*, even if your system libraries are Qt5. -- If your system libraries are Qt5, when creating an aqt wheel, the wheel will not work - on Qt6 environments. - ## Packaging considerations Python, node and protoc are downloaded as part of the build. You can optionally define diff --git a/docs/protobuf.md b/docs/protobuf.md index 29094fc65..75796b473 100644 --- a/docs/protobuf.md +++ b/docs/protobuf.md @@ -98,12 +98,6 @@ should preferably be assigned a number between 1 and 15. If a message contains Protobuf has an official Python implementation with an extensive [reference](https://developers.google.com/protocol-buffers/docs/reference/python-generated). -- Every message used in aqt or pylib must be added to the respective `.pylintrc` - to avoid failing type checks. The unqualified protobuf message's name must be - used, not an alias from `collection.py` for example. This should be taken into - account when choosing a message name in order to prevent skipping typechecking - a Python class of the same name. - ### Typescript Anki uses [protobuf-es](https://github.com/bufbuild/protobuf-es), which offers diff --git a/docs/windows.md b/docs/windows.md index aae9f6869..12f4e7c39 100644 --- a/docs/windows.md +++ b/docs/windows.md @@ -9,7 +9,12 @@ You must be running 64 bit Windows 10, version 1703 or newer. **Rustup**: As mentioned in development.md, rustup must be installed. If you're on -ARM Windows, you must set the default target to x86_64-pc-windows-msvc. +ARM Windows and install the ARM64 version of rust-up, from this project folder, +run + +``` +rustup target add x86_64-pc-windows-msvc +``` **Visual Studio**: diff --git a/ftl/core-repo b/ftl/core-repo index 78412ce16..a9216499b 160000 --- a/ftl/core-repo +++ b/ftl/core-repo @@ -1 +1 @@ -Subproject commit 78412ce163d4dc50dd82f5b27cde3119086a2eb7 +Subproject commit a9216499ba1fb1538cfd740c698adaaa3410fd4b diff --git a/ftl/core/card-templates.ftl b/ftl/core/card-templates.ftl index 7ecda1968..edb2433f9 100644 --- a/ftl/core/card-templates.ftl +++ b/ftl/core/card-templates.ftl @@ -60,7 +60,6 @@ card-templates-this-will-create-card-proceed = } card-templates-type-boxes-warning = Only one typing box per card template is supported. card-templates-restore-to-default = Restore to Default -card-templates-restore-to-default-confirmation = This will reset all fields and templates in this note type to their default - values, removing any extra fields/templates and their content, and any custom styling. Do you wish to proceed? +card-templates-restore-to-default-confirmation = This will reset all fields and templates in this note type to their default values, removing any extra fields/templates and their content, and any custom styling. Do you wish to proceed? card-templates-restored-to-default = Note type has been restored to its original state. diff --git a/ftl/core/deck-config.ftl b/ftl/core/deck-config.ftl index 286e6bae8..a091dccef 100644 --- a/ftl/core/deck-config.ftl +++ b/ftl/core/deck-config.ftl @@ -425,6 +425,8 @@ deck-config-desired-retention-tooltip = less frequently, and you will forget more of them. Be conservative when adjusting this - higher values will greatly increase your workload, and lower values can be demoralizing when you forget a lot of material. +deck-config-desired-retention-tooltip2 = + The workload values provided by the info box are a rough approximation. For a greater level of accuracy, use the simulator. deck-config-historical-retention-tooltip = When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will assume that when you did those old reviews, you remembered 90% of the material. If your old retention diff --git a/ftl/core/importing.ftl b/ftl/core/importing.ftl index 70bc5f4d1..3b9f7c401 100644 --- a/ftl/core/importing.ftl +++ b/ftl/core/importing.ftl @@ -65,7 +65,6 @@ importing-with-deck-configs-help = If enabled, any deck options that the deck sharer included will also be imported. Otherwise, all decks will be assigned the default preset. importing-packaged-anki-deckcollection-apkg-colpkg-zip = Packaged Anki Deck/Collection (*.apkg *.colpkg *.zip) -importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz) # the '|' character importing-pipe = Pipe # Warning displayed when the csv import preview table is clipped (some columns were hidden) @@ -78,7 +77,6 @@ importing-rows-had-num1d-fields-expected-num2d = '{ $row }' had { $found } field importing-selected-file-was-not-in-utf8 = Selected file was not in UTF-8 format. Please see the importing section of the manual. importing-semicolon = Semicolon importing-skipped = Skipped -importing-supermemo-xml-export-xml = Supermemo XML export (*.xml) importing-tab = Tab importing-tag-modified-notes = Tag modified notes: importing-text-separated-by-tabs-or-semicolons = Text separated by tabs or semicolons (*) @@ -252,3 +250,5 @@ importing-importing-collection = Importing collection... importing-unable-to-import-filename = Unable to import { $filename }: file type not supported importing-notes-that-could-not-be-imported = Notes that could not be imported as note type has changed: { $val } importing-added = Added +importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz) +importing-supermemo-xml-export-xml = Supermemo XML export (*.xml) diff --git a/ftl/core/statistics.ftl b/ftl/core/statistics.ftl index c3a2bb613..8da1aace8 100644 --- a/ftl/core/statistics.ftl +++ b/ftl/core/statistics.ftl @@ -99,9 +99,9 @@ statistics-counts-relearning-cards = Relearning statistics-counts-title = Card Counts statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards -## True Retention represents your actual retention rate from past reviews, in -## comparison to the "desired retention" parameter of FSRS, which forecasts -## future retention. True Retention is the percentage of all reviewed cards +## Retention rate represents your actual retention rate from past reviews, in +## comparison to the "desired retention" setting of FSRS, which forecasts +## future retention. Retention rate is the percentage of all reviewed cards ## that were marked as "Hard," "Good," or "Easy" within a specific time period. ## ## Most of these strings are used as column / row headings in a table. @@ -112,9 +112,9 @@ statistics-counts-separate-suspended-buried-cards = Separate suspended/buried ca ## N.B. Stats cards may be very small on mobile devices and when the Stats ## window is certain sizes. -statistics-true-retention-title = True Retention +statistics-true-retention-title = Retention rate statistics-true-retention-subtitle = Pass rate of cards with an interval ≥ 1 day. -statistics-true-retention-tooltip = If you are using FSRS, your true retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data. +statistics-true-retention-tooltip = If you are using FSRS, your retention rate is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data. statistics-true-retention-range = Range statistics-true-retention-pass = Pass statistics-true-retention-fail = Fail diff --git a/ftl/extract-strings.py b/ftl/extract-strings.py deleted file mode 100644 index 7c3a63b3f..000000000 --- a/ftl/extract-strings.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python3 -# Copyright: Ankitects Pty Ltd and contributors -# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -""" -Tool to extract core strings and keys from .ftl files. -""" - -import glob -import json -import os - -from fluent.syntax import parse -from fluent.syntax.ast import Junk, Message -from fluent.syntax.serializer import serialize_element - -root = ".." -ftl_files = glob.glob(os.path.join(root, "ftl", "core", "*.ftl"), recursive=True) -keys_by_value: dict[str, list[str]] = {} - -for path in ftl_files: - obj = parse(open(path, encoding="utf8").read(), with_spans=False) - for ent in obj.body: - if isinstance(ent, Junk): - raise Exception(f"file had junk! {path} {ent}") - if isinstance(ent, Message): - key = ent.id.name - val = "".join(serialize_element(elem) for elem in ent.value.elements) - if val in keys_by_value: - print("duplicate found:", keys_by_value[val], key) - keys_by_value.setdefault(val, []).append(key) - -json.dump( - keys_by_value, open(os.path.join(root, "keys_by_value.json"), "w", encoding="utf8") -) -print("keys:", len(keys_by_value)) diff --git a/ftl/format.py b/ftl/format.py deleted file mode 100644 index a18aa5cfa..000000000 --- a/ftl/format.py +++ /dev/null @@ -1,99 +0,0 @@ -#!/usr/bin/env python3 -# Copyright: Ankitects Pty Ltd and contributors -# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -""" -Parse and re-serialize ftl files to get them in a consistent form. -""" - -import difflib -import glob -import os -from typing import List - -from compare_locales import parser -from compare_locales.checks.fluent import ReferenceMessageVisitor -from compare_locales.paths import File -from fluent.syntax import parse, serialize -from fluent.syntax.ast import Junk - - -def check_missing_terms(path: str) -> bool: - "True if file is ok." - file = File(path, os.path.basename(path)) - content = open(path, "rb").read() - p = parser.getParser(file.file) - p.readContents(content) - refList = p.parse() - - p.readContents(content) - for e in p.parse(): - ref_data = ReferenceMessageVisitor() - ref_data.visit(e.entry) - - for attr_or_val, refs in ref_data.entry_refs.items(): - for ref, ref_type in refs.items(): - if ref not in refList: - print(f"In {path}:{e}, missing '{ref}'") - return False - - return True - - -def check_file(path: str, fix: bool) -> bool: - "True if file is ok." - orig_text = open(path, encoding="utf8").read() - obj = parse(orig_text, with_spans=False) - # make sure there's no junk - for ent in obj.body: - if isinstance(ent, Junk): - raise Exception(f"file had junk! {path} {ent}") - # serialize - new_text = serialize(obj) - # make sure serializing did not introduce new junk - obj = parse(new_text, with_spans=False) - for ent in obj.body: - if isinstance(ent, Junk): - raise Exception(f"file introduced junk! {path} {ent}") - - if new_text == orig_text: - return check_missing_terms(path) - - if fix: - print(f"Fixing {path}") - open(path, "w", newline="\n", encoding="utf8").write(new_text) - return True - else: - print(f"Bad formatting in {path}") - print( - "\n".join( - difflib.unified_diff( - orig_text.splitlines(), - new_text.splitlines(), - fromfile="bad", - tofile="good", - lineterm="", - ) - ) - ) - return False - - -def check_files(files: List[str], fix: bool) -> bool: - "True if files ok." - - found_bad = False - for path in files: - ok = check_file(path, fix) - if not ok: - found_bad = True - return not found_bad - - -if __name__ == "__main__": - template_root = os.environ["BUILD_WORKSPACE_DIRECTORY"] - template_files = glob.glob( - os.path.join(template_root, "ftl", "*", "*.ftl"), recursive=True - ) - - check_files(template_files, fix=True) diff --git a/ftl/format_check.py b/ftl/format_check.py deleted file mode 100644 index 7faeacd2c..000000000 --- a/ftl/format_check.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright: Ankitects Pty Ltd and contributors -# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -import glob -import os -import sys - -import format - -template_root = os.path.dirname(sys.argv[1]) -template_files = glob.glob(os.path.join(template_root, "*", "*.ftl"), recursive=True) - -if not format.check_files(template_files, fix=False): - sys.exit(1) diff --git a/ftl/qt-repo b/ftl/qt-repo index fbe9d1c73..a1134ab59 160000 --- a/ftl/qt-repo +++ b/ftl/qt-repo @@ -1 +1 @@ -Subproject commit fbe9d1c731f7ad09953e63fdb0c455a6d3a3b6be +Subproject commit a1134ab59d3d23468af2968741aa1f21d16ff308 diff --git a/ftl/qt/qt-accel.ftl b/ftl/qt/qt-accel.ftl index 327cd6c46..3ab54eb24 100644 --- a/ftl/qt/qt-accel.ftl +++ b/ftl/qt/qt-accel.ftl @@ -1,4 +1,5 @@ qt-accel-about = &About +qt-accel-about-mac = About Anki... qt-accel-cards = &Cards qt-accel-check-database = &Check Database qt-accel-check-media = Check &Media @@ -45,3 +46,4 @@ qt-accel-zoom-editor-in = Zoom Editor &In qt-accel-zoom-editor-out = Zoom Editor &Out qt-accel-create-backup = Create &Backup qt-accel-load-backup = &Revert to Backup +qt-accel-upgrade-downgrade = Upgrade/Downgrade diff --git a/ftl/qt/qt-misc.ftl b/ftl/qt/qt-misc.ftl index 294cd8a83..d7bbef990 100644 --- a/ftl/qt/qt-misc.ftl +++ b/ftl/qt/qt-misc.ftl @@ -73,6 +73,7 @@ qt-misc-second = qt-misc-layout-auto-enabled = Responsive layout enabled qt-misc-layout-vertical-enabled = Vertical layout enabled qt-misc-layout-horizontal-enabled = Horizontal layout enabled +qt-misc-open-anki-launcher = Change to a different Anki version? ## deprecated- these strings will be removed in the future, and do not need ## to be translated diff --git a/ftl/src/serialize.rs b/ftl/src/serialize.rs index c6eda559c..73513df69 100644 --- a/ftl/src/serialize.rs +++ b/ftl/src/serialize.rs @@ -435,7 +435,7 @@ impl TextWriter { item = item.trim_start_matches(' '); } - write!(self.buffer, "{}", item) + write!(self.buffer, "{item}") } fn write_char_into_indent(&mut self, ch: char) { diff --git a/ftl/src/string/mod.rs b/ftl/src/string/mod.rs index 1b64dd91c..e7bf2c5bd 100644 --- a/ftl/src/string/mod.rs +++ b/ftl/src/string/mod.rs @@ -67,7 +67,7 @@ fn additional_template_folder(dst_folder: &Utf8Path) -> Option { fn all_langs(lang_folder: &Utf8Path) -> Result> { std::fs::read_dir(lang_folder) - .with_context(|| format!("reading {:?}", lang_folder))? + .with_context(|| format!("reading {lang_folder:?}"))? .filter_map(Result::ok) .map(|e| Ok(e.path().utf8()?)) .collect() diff --git a/ninja b/ninja index 5feee474b..c44f8c330 100755 --- a/ninja +++ b/ninja @@ -8,7 +8,7 @@ else out="$BUILD_ROOT" fi export CARGO_TARGET_DIR=$out/rust -export RECONFIGURE_KEY="${MAC_X86};${SOURCEMAP};${HMR}" +export RECONFIGURE_KEY="${MAC_X86};${LIN_ARM64};${SOURCEMAP};${HMR}" if [ "$SKIP_RUNNER_BUILD" = "1" ]; then echo "Runner not rebuilt." diff --git a/package.json b/package.json index d08655bad..9f12133db 100644 --- a/package.json +++ b/package.json @@ -19,8 +19,8 @@ "@poppanator/sveltekit-svg": "^5.0.0", "@sqltools/formatter": "^1.2.2", "@sveltejs/adapter-static": "^3.0.0", - "@sveltejs/kit": "^2.20.7", - "@sveltejs/vite-plugin-svelte": "4.0.0", + "@sveltejs/kit": "^2.22.2", + "@sveltejs/vite-plugin-svelte": "5.1", "@types/bootstrap": "^5.0.12", "@types/codemirror": "^5.60.0", "@types/d3": "^7.0.0", @@ -30,7 +30,7 @@ "@types/jqueryui": "^1.12.13", "@types/lodash-es": "^4.17.4", "@types/marked": "^5.0.0", - "@types/node": "^20", + "@types/node": "^22", "@typescript-eslint/eslint-plugin": "^5.60.1", "@typescript-eslint/parser": "^5.60.1", "caniuse-lite": "^1.0.30001431", @@ -48,16 +48,16 @@ "prettier": "^3.4.2", "prettier-plugin-svelte": "^3.3.2", "sass": "<1.77", - "svelte": "^5.17.3", - "svelte-check": "^3.4.4", - "svelte-preprocess": "^5.0.4", + "svelte": "^5.34.9", + "svelte-check": "^4.2.2", + "svelte-preprocess": "^6.0.3", "svelte-preprocess-esbuild": "^3.0.1", "svgo": "^3.2.0", "tslib": "^2.0.3", - "tsx": "^3.12.0", + "tsx": "^4.8.1", "typescript": "^5.0.4", - "vite": "5.4.19", - "vitest": "^2" + "vite": "6", + "vitest": "^3" }, "dependencies": { "@bufbuild/protobuf": "^1.2.1", @@ -81,7 +81,8 @@ }, "resolutions": { "canvas": "npm:empty-npm-package@1.0.0", - "cookie": "0.7.0" + "cookie": "0.7.0", + "vite": "6" }, "browserslist": [ "defaults", diff --git a/proto/anki/config.proto b/proto/anki/config.proto index d61f139d6..ea115f0fc 100644 --- a/proto/anki/config.proto +++ b/proto/anki/config.proto @@ -56,6 +56,7 @@ message ConfigKey { RENDER_LATEX = 25; LOAD_BALANCER_ENABLED = 26; FSRS_SHORT_TERM_WITH_STEPS_ENABLED = 27; + FSRS_LEGACY_EVALUATE = 28; } enum String { SET_DUE_BROWSER = 0; diff --git a/proto/anki/deck_config.proto b/proto/anki/deck_config.proto index 831283931..9dae49c6a 100644 --- a/proto/anki/deck_config.proto +++ b/proto/anki/deck_config.proto @@ -236,6 +236,7 @@ message DeckConfigsForUpdate { bool new_cards_ignore_review_limit = 7; bool fsrs = 8; bool fsrs_health_check = 11; + bool fsrs_legacy_evaluate = 12; bool apply_all_parent_limits = 9; uint32 days_since_last_fsrs_optimize = 10; } diff --git a/proto/anki/scheduler.proto b/proto/anki/scheduler.proto index ea483d3db..01f092a39 100644 --- a/proto/anki/scheduler.proto +++ b/proto/anki/scheduler.proto @@ -56,6 +56,8 @@ service SchedulerService { rpc SimulateFsrsReview(SimulateFsrsReviewRequest) returns (SimulateFsrsReviewResponse); rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse); + rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest) + returns (EvaluateParamsResponse); rpc ComputeMemoryState(cards.CardId) returns (ComputeMemoryStateResponse); // The number of days the calculated interval was fuzzed by on the previous // review (if any). Utilized by the FSRS add-on. @@ -442,6 +444,12 @@ message EvaluateParamsRequest { uint32 num_of_relearning_steps = 3; } +message EvaluateParamsLegacyRequest { + repeated float params = 1; + string search = 2; + int64 ignore_revlogs_before_ms = 3; +} + message EvaluateParamsResponse { float log_loss = 1; float rmse_bins = 2; @@ -450,6 +458,7 @@ message EvaluateParamsResponse { message ComputeMemoryStateResponse { optional cards.FsrsMemoryState state = 1; float desired_retention = 2; + float decay = 3; } message FuzzDeltaRequest { diff --git a/pylib/anki/_backend.py b/pylib/anki/_backend.py index a0d8f8949..03fbb30d6 100644 --- a/pylib/anki/_backend.py +++ b/pylib/anki/_backend.py @@ -46,7 +46,6 @@ from .errors import ( # the following comment is required to suppress a warning that only shows up # when there are other pylint failures -# pylint: disable=c-extension-no-member if _rsbridge.buildhash() != anki.buildinfo.buildhash: raise Exception( f"""rsbridge and anki build hashes do not match: @@ -164,7 +163,7 @@ class RustBackend(RustBackendGenerated): finally: elapsed = time.time() - start if current_thread() is main_thread() and elapsed > 0.2: - print(f"blocked main thread for {int(elapsed*1000)}ms:") + print(f"blocked main thread for {int(elapsed * 1000)}ms:") print("".join(traceback.format_stack())) err = backend_pb2.BackendError() diff --git a/pylib/anki/cards.py b/pylib/anki/cards.py index 093776ebb..02807ae73 100644 --- a/pylib/anki/cards.py +++ b/pylib/anki/cards.py @@ -7,7 +7,7 @@ import pprint import time from typing import NewType -import anki # pylint: disable=unused-import +import anki import anki.collection import anki.decks import anki.notes diff --git a/pylib/anki/collection.py b/pylib/anki/collection.py index 17ee08e2f..c64ffdb8b 100644 --- a/pylib/anki/collection.py +++ b/pylib/anki/collection.py @@ -122,6 +122,7 @@ class ComputedMemoryState: desired_retention: float stability: float | None = None difficulty: float | None = None + decay: float | None = None @dataclass @@ -157,7 +158,7 @@ class Collection(DeprecatedNamesMixin): self.tags = TagManager(self) self.conf = ConfigManager(self) self._load_scheduler() - self._startReps = 0 # pylint: disable=invalid-name + self._startReps = 0 def name(self) -> Any: return os.path.splitext(os.path.basename(self.path))[0] @@ -510,9 +511,7 @@ class Collection(DeprecatedNamesMixin): # Utils ########################################################################## - def nextID( # pylint: disable=invalid-name - self, type: str, inc: bool = True - ) -> Any: + def nextID(self, type: str, inc: bool = True) -> Any: type = f"next{type.capitalize()}" id = self.conf.get(type, 1) if inc: @@ -848,7 +847,6 @@ class Collection(DeprecatedNamesMixin): ) def _pb_search_separator(self, operator: SearchJoiner) -> SearchNode.Group.Joiner.V: - # pylint: disable=no-member if operator == "AND": return SearchNode.Group.Joiner.AND else: @@ -866,7 +864,9 @@ class Collection(DeprecatedNamesMixin): return column return None - def browser_row_for_id(self, id_: int) -> tuple[ + def browser_row_for_id( + self, id_: int + ) -> tuple[ Generator[tuple[str, bool, BrowserRow.Cell.TextElideMode.V], None, None], BrowserRow.Color.V, str, @@ -1189,9 +1189,13 @@ class Collection(DeprecatedNamesMixin): desired_retention=resp.desired_retention, stability=resp.state.stability, difficulty=resp.state.difficulty, + decay=resp.decay, ) else: - return ComputedMemoryState(desired_retention=resp.desired_retention) + return ComputedMemoryState( + desired_retention=resp.desired_retention, + decay=resp.decay, + ) def fuzz_delta(self, card_id: CardId, interval: int) -> int: "The delta days of fuzz applied if reviewing the card in v3." @@ -1207,8 +1211,6 @@ class Collection(DeprecatedNamesMixin): # the count on things like edits, which we probably could do by checking # the previous state in moveToState. - # pylint: disable=invalid-name - def startTimebox(self) -> None: self._startTime = time.time() self._startReps = self.sched.reps diff --git a/pylib/anki/exporting.py b/pylib/anki/exporting.py index 43713d8b2..ef6f02c63 100644 --- a/pylib/anki/exporting.py +++ b/pylib/anki/exporting.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name from __future__ import annotations @@ -351,7 +350,7 @@ class AnkiPackageExporter(AnkiExporter): colfile = path.replace(".apkg", ".anki2") AnkiExporter.exportInto(self, colfile) # prevent older clients from accessing - # pylint: disable=unreachable + self._addDummyCollection(z) z.write(colfile, "collection.anki21") diff --git a/pylib/anki/find.py b/pylib/anki/find.py index bcae6e556..106bf2876 100644 --- a/pylib/anki/find.py +++ b/pylib/anki/find.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name from __future__ import annotations diff --git a/pylib/anki/foreign_data/__init__.py b/pylib/anki/foreign_data/__init__.py index afcaf685e..8aac2cc42 100644 --- a/pylib/anki/foreign_data/__init__.py +++ b/pylib/anki/foreign_data/__init__.py @@ -1,8 +1,7 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -"""Helpers for serializing third-party collections to a common JSON form. -""" +"""Helpers for serializing third-party collections to a common JSON form.""" from __future__ import annotations diff --git a/pylib/anki/hooks.py b/pylib/anki/hooks.py index fcc3758f4..13148c649 100644 --- a/pylib/anki/hooks.py +++ b/pylib/anki/hooks.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name """ Tools for extending Anki. diff --git a/pylib/anki/importing/__init__.py b/pylib/anki/importing/__init__.py index cfc2cac3f..d4fccc643 100644 --- a/pylib/anki/importing/__init__.py +++ b/pylib/anki/importing/__init__.py @@ -11,8 +11,6 @@ from anki.importing.apkg import AnkiPackageImporter from anki.importing.base import Importer from anki.importing.csvfile import TextImporter from anki.importing.mnemo import MnemosyneImporter -from anki.importing.pauker import PaukerImporter -from anki.importing.supermemo_xml import SupermemoXmlImporter # type: ignore from anki.lang import TR @@ -24,8 +22,6 @@ def importers(col: Collection) -> Sequence[tuple[str, type[Importer]]]: AnkiPackageImporter, ), (col.tr.importing_mnemosyne_20_deck_db(), MnemosyneImporter), - (col.tr.importing_supermemo_xml_export_xml(), SupermemoXmlImporter), - (col.tr.importing_pauker_18_lesson_paugz(), PaukerImporter), ] anki.hooks.importing_importers(importers) return importers diff --git a/pylib/anki/importing/anki2.py b/pylib/anki/importing/anki2.py index 098265c3f..dcfa15c8d 100644 --- a/pylib/anki/importing/anki2.py +++ b/pylib/anki/importing/anki2.py @@ -1,7 +1,7 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name + from __future__ import annotations import os diff --git a/pylib/anki/importing/apkg.py b/pylib/anki/importing/apkg.py index ea2325960..012686ffa 100644 --- a/pylib/anki/importing/apkg.py +++ b/pylib/anki/importing/apkg.py @@ -1,7 +1,7 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name + from __future__ import annotations import json diff --git a/pylib/anki/importing/base.py b/pylib/anki/importing/base.py index 2ddcaaebf..fc27dc909 100644 --- a/pylib/anki/importing/base.py +++ b/pylib/anki/importing/base.py @@ -1,7 +1,7 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name + from __future__ import annotations from typing import Any diff --git a/pylib/anki/importing/csvfile.py b/pylib/anki/importing/csvfile.py index 6a2ed347c..fde7ec8ac 100644 --- a/pylib/anki/importing/csvfile.py +++ b/pylib/anki/importing/csvfile.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name from __future__ import annotations @@ -144,7 +143,6 @@ class TextImporter(NoteImporter): self.close() zuper = super() if hasattr(zuper, "__del__"): - # pylint: disable=no-member zuper.__del__(self) # type: ignore def noteFromFields(self, fields: list[str]) -> ForeignNote: diff --git a/pylib/anki/importing/mnemo.py b/pylib/anki/importing/mnemo.py index 5b7fda65f..a2f68ad4c 100644 --- a/pylib/anki/importing/mnemo.py +++ b/pylib/anki/importing/mnemo.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name import re import time @@ -35,7 +34,6 @@ f._id=d._fact_id""" ): if id != curid: if note: - # pylint: disable=unsubscriptable-object notes[note["_id"]] = note note = {"_id": _id} curid = id @@ -185,7 +183,6 @@ acq_reps+ret_reps, lapses, card_type_id from cards""" state = dict(n=1) def repl(match): - # pylint: disable=cell-var-from-loop # replace [...] with cloze refs res = "{{c%d::%s}}" % (state["n"], match.group(1)) state["n"] += 1 diff --git a/pylib/anki/importing/noteimp.py b/pylib/anki/importing/noteimp.py index f69696ef8..cb35a373a 100644 --- a/pylib/anki/importing/noteimp.py +++ b/pylib/anki/importing/noteimp.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name from __future__ import annotations @@ -167,9 +166,9 @@ class NoteImporter(Importer): firsts[fld0] = True # already exists? found = False - if csum in csums: + if csum in csums: # type: ignore[comparison-overlap] # csum is not a guarantee; have to check - for id in csums[csum]: + for id in csums[csum]: # type: ignore[index] flds = self.col.db.scalar("select flds from notes where id = ?", id) sflds = split_fields(flds) if fld0 == sflds[0]: diff --git a/pylib/anki/importing/pauker.py b/pylib/anki/importing/pauker.py deleted file mode 100644 index ea5c45082..000000000 --- a/pylib/anki/importing/pauker.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright: Andreas Klauer -# License: BSD-3 - -# pylint: disable=invalid-name - -import gzip -import html -import math -import random -import time -import xml.etree.ElementTree as ET - -from anki.importing.noteimp import ForeignCard, ForeignNote, NoteImporter -from anki.stdmodels import _legacy_add_forward_reverse - -ONE_DAY = 60 * 60 * 24 - - -class PaukerImporter(NoteImporter): - """Import Pauker 1.8 Lesson (*.pau.gz)""" - - needMapper = False - allowHTML = True - - def run(self): - model = _legacy_add_forward_reverse(self.col) - model["name"] = "Pauker" - self.col.models.save(model, updateReqs=False) - self.col.models.set_current(model) - self.model = model - self.initMapping() - NoteImporter.run(self) - - def fields(self): - """Pauker is Front/Back""" - return 2 - - def foreignNotes(self): - """Build and return a list of notes.""" - notes = [] - - try: - f = gzip.open(self.file) - tree = ET.parse(f) # type: ignore - lesson = tree.getroot() - assert lesson.tag == "Lesson" - finally: - f.close() - - index = -4 - - for batch in lesson.findall("./Batch"): - index += 1 - - for card in batch.findall("./Card"): - # Create a note for this card. - front = card.findtext("./FrontSide/Text") - back = card.findtext("./ReverseSide/Text") - note = ForeignNote() - assert front and back - note.fields = [ - html.escape(x.strip()) - .replace("\n", "
") - .replace(" ", "  ") - for x in [front, back] - ] - notes.append(note) - - # Determine due date for cards. - frontdue = card.find("./FrontSide[@LearnedTimestamp]") - backdue = card.find("./ReverseSide[@Batch][@LearnedTimestamp]") - - if frontdue is not None: - note.cards[0] = self._learnedCard( - index, int(frontdue.attrib["LearnedTimestamp"]) - ) - - if backdue is not None: - note.cards[1] = self._learnedCard( - int(backdue.attrib["Batch"]), - int(backdue.attrib["LearnedTimestamp"]), - ) - - return notes - - def _learnedCard(self, batch, timestamp): - ivl = math.exp(batch) - now = time.time() - due = ivl - (now - timestamp / 1000.0) / ONE_DAY - fc = ForeignCard() - fc.due = self.col.sched.today + int(due + 0.5) - fc.ivl = random.randint(int(ivl * 0.90), int(ivl + 0.5)) - fc.factor = random.randint(1500, 2500) - return fc diff --git a/pylib/anki/importing/supermemo_xml.py b/pylib/anki/importing/supermemo_xml.py deleted file mode 100644 index 202592c2e..000000000 --- a/pylib/anki/importing/supermemo_xml.py +++ /dev/null @@ -1,484 +0,0 @@ -# Copyright: petr.michalec@gmail.com -# License: GNU GPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pytype: disable=attribute-error -# type: ignore -# pylint: disable=C -from __future__ import annotations - -import re -import sys -import time -import unicodedata -from string import capwords -from xml.dom import minidom -from xml.dom.minidom import Element, Text - -from anki.collection import Collection -from anki.importing.noteimp import ForeignCard, ForeignNote, NoteImporter -from anki.stdmodels import _legacy_add_basic_model - - -class SmartDict(dict): - """ - See http://www.peterbe.com/plog/SmartDict - Copyright 2005, Peter Bengtsson, peter@fry-it.com - 0BSD - - A smart dict can be instantiated either from a pythonic dict - or an instance object (eg. SQL recordsets) but it ensures that you can - do all the convenient lookups such as x.first_name, x['first_name'] or - x.get('first_name'). - """ - - def __init__(self, *a, **kw) -> None: - if a: - if isinstance(type(a[0]), dict): - kw.update(a[0]) - elif isinstance(type(a[0]), object): - kw.update(a[0].__dict__) - elif hasattr(a[0], "__class__") and a[0].__class__.__name__ == "SmartDict": - kw.update(a[0].__dict__) - - dict.__init__(self, **kw) - self.__dict__ = self - - -class SuperMemoElement(SmartDict): - "SmartDict wrapper to store SM Element data" - - def __init__(self, *a, **kw) -> None: - SmartDict.__init__(self, *a, **kw) - # default content - self.__dict__["lTitle"] = None - self.__dict__["Title"] = None - self.__dict__["Question"] = None - self.__dict__["Answer"] = None - self.__dict__["Count"] = None - self.__dict__["Type"] = None - self.__dict__["ID"] = None - self.__dict__["Interval"] = None - self.__dict__["Lapses"] = None - self.__dict__["Repetitions"] = None - self.__dict__["LastRepetiton"] = None - self.__dict__["AFactor"] = None - self.__dict__["UFactor"] = None - - -# This is an AnkiImporter -class SupermemoXmlImporter(NoteImporter): - needMapper = False - allowHTML = True - - """ - Supermemo XML export's to Anki parser. - Goes through a SM collection and fetch all elements. - - My SM collection was a big mess where topics and items were mixed. - I was unable to parse my content in a regular way like for loop on - minidom.getElementsByTagName() etc. My collection had also an - limitation, topics were splited into branches with max 100 items - on each. Learning themes were in deep structure. I wanted to have - full title on each element to be stored in tags. - - Code should be upgrade to support importing of SM2006 exports. - """ - - def __init__(self, col: Collection, file: str) -> None: - """Initialize internal variables. - Pameters to be exposed to GUI are stored in self.META""" - NoteImporter.__init__(self, col, file) - m = _legacy_add_basic_model(self.col) - m["name"] = "Supermemo" - self.col.models.save(m) - self.initMapping() - - self.lines = None - self.numFields = int(2) - - # SmXmlParse VARIABLES - self.xmldoc = None - self.pieces = [] - self.cntBuf = [] # to store last parsed data - self.cntElm = [] # to store SM Elements data - self.cntCol = [] # to store SM Colections data - - # store some meta info related to parse algorithm - # SmartDict works like dict / class wrapper - self.cntMeta = SmartDict() - self.cntMeta.popTitles = False - self.cntMeta.title = [] - - # META stores controls of import script, should be - # exposed to import dialog. These are default values. - self.META = SmartDict() - self.META.resetLearningData = False # implemented - self.META.onlyMemorizedItems = False # implemented - self.META.loggerLevel = 2 # implemented 0no,1info,2error,3debug - self.META.tagAllTopics = True - self.META.pathsToBeTagged = [ - "English for beginners", - "Advanced English 97", - "Phrasal Verbs", - ] # path patterns to be tagged - in gui entered like 'Advanced English 97|My Vocablary' - self.META.tagMemorizedItems = True # implemented - self.META.logToStdOutput = False # implemented - - self.notes = [] - - ## TOOLS - - def _fudgeText(self, text: str) -> str: - "Replace sm syntax to Anki syntax" - text = text.replace("\n\r", "
") - text = text.replace("\n", "
") - return text - - def _unicode2ascii(self, str: str) -> str: - "Remove diacritic punctuation from strings (titles)" - return "".join( - [ - c - for c in unicodedata.normalize("NFKD", str) - if not unicodedata.combining(c) - ] - ) - - def _decode_htmlescapes(self, html: str) -> str: - """Unescape HTML code.""" - # In case of bad formatted html you can import MinimalSoup etc.. see BeautifulSoup source code - from bs4 import BeautifulSoup - - # my sm2004 also ecaped & char in escaped sequences. - html = re.sub("&", "&", html) - - # https://anki.tenderapp.com/discussions/ankidesktop/39543-anki-is-replacing-the-character-by-when-i-exit-the-html-edit-mode-ctrlshiftx - if html.find(">") < 0: - return html - - # unescaped solitary chars < or > that were ok for minidom confuse btfl soup - # html = re.sub(u'>',u'>',html) - # html = re.sub(u'<',u'<',html) - - return str(BeautifulSoup(html, "html.parser")) - - def _afactor2efactor(self, af: float) -> float: - # Adapted from - - # Ranges for A-factors and E-factors - af_min = 1.2 - af_max = 6.9 - ef_min = 1.3 - ef_max = 3.3 - - # Sanity checks for the A-factor - if af < af_min: - af = af_min - elif af > af_max: - af = af_max - - # Scale af to the range 0..1 - af_scaled = (af - af_min) / (af_max - af_min) - # Rescale to the interval ef_min..ef_max - ef = ef_min + af_scaled * (ef_max - ef_min) - - return ef - - ## DEFAULT IMPORTER METHODS - - def foreignNotes(self) -> list[ForeignNote]: - # Load file and parse it by minidom - self.loadSource(self.file) - - # Migrating content / time consuming part - # addItemToCards is called for each sm element - self.logger("Parsing started.") - self.parse() - self.logger("Parsing done.") - - # Return imported cards - self.total = len(self.notes) - self.log.append("%d cards imported." % self.total) - return self.notes - - def fields(self) -> int: - return 2 - - ## PARSER METHODS - - def addItemToCards(self, item: SuperMemoElement) -> None: - "This method actually do conversion" - - # new anki card - note = ForeignNote() - - # clean Q and A - note.fields.append(self._fudgeText(self._decode_htmlescapes(item.Question))) - note.fields.append(self._fudgeText(self._decode_htmlescapes(item.Answer))) - note.tags = [] - - # pre-process scheduling data - # convert learning data - if ( - not self.META.resetLearningData - and int(item.Interval) >= 1 - and getattr(item, "LastRepetition", None) - ): - # migration of LearningData algorithm - tLastrep = time.mktime(time.strptime(item.LastRepetition, "%d.%m.%Y")) - tToday = time.time() - card = ForeignCard() - card.ivl = int(item.Interval) - card.lapses = int(item.Lapses) - card.reps = int(item.Repetitions) + int(item.Lapses) - nextDue = tLastrep + (float(item.Interval) * 86400.0) - remDays = int((nextDue - time.time()) / 86400) - card.due = self.col.sched.today + remDays - card.factor = int( - self._afactor2efactor(float(item.AFactor.replace(",", "."))) * 1000 - ) - note.cards[0] = card - - # categories & tags - # it's worth to have every theme (tree structure of sm collection) stored in tags, but sometimes not - # you can deceide if you are going to tag all toppics or just that containing some pattern - tTaggTitle = False - for pattern in self.META.pathsToBeTagged: - if ( - item.lTitle is not None - and pattern.lower() in " ".join(item.lTitle).lower() - ): - tTaggTitle = True - break - if tTaggTitle or self.META.tagAllTopics: - # normalize - remove diacritic punctuation from unicode chars to ascii - item.lTitle = [self._unicode2ascii(topic) for topic in item.lTitle] - - # Transform xyz / aaa / bbb / ccc on Title path to Tag xyzAaaBbbCcc - # clean things like [999] or [111-2222] from title path, example: xyz / [1000-1200] zyx / xyz - # clean whitespaces - # set Capital letters for first char of the word - tmp = list( - {re.sub(r"(\[[0-9]+\])", " ", i).replace("_", " ") for i in item.lTitle} - ) - tmp = list({re.sub(r"(\W)", " ", i) for i in tmp}) - tmp = list({re.sub("^[0-9 ]+$", "", i) for i in tmp}) - tmp = list({capwords(i).replace(" ", "") for i in tmp}) - tags = [j[0].lower() + j[1:] for j in tmp if j.strip() != ""] - - note.tags += tags - - if self.META.tagMemorizedItems and int(item.Interval) > 0: - note.tags.append("Memorized") - - self.logger("Element tags\t- " + repr(note.tags), level=3) - - self.notes.append(note) - - def logger(self, text: str, level: int = 1) -> None: - "Wrapper for Anki logger" - - dLevels = {0: "", 1: "Info", 2: "Verbose", 3: "Debug"} - if level <= self.META.loggerLevel: - # self.deck.updateProgress(_(text)) - - if self.META.logToStdOutput: - print( - self.__class__.__name__ - + " - " - + dLevels[level].ljust(9) - + " -\t" - + text - ) - - # OPEN AND LOAD - def openAnything(self, source): - """Open any source / actually only opening of files is used - @return an open handle which must be closed after use, i.e., handle.close()""" - - if source == "-": - return sys.stdin - - # try to open with urllib (if source is http, ftp, or file URL) - import urllib.error - import urllib.parse - import urllib.request - - try: - return urllib.request.urlopen(source) - except OSError: - pass - - # try to open with native open function (if source is pathname) - try: - return open(source, encoding="utf8") - except OSError: - pass - - # treat source as string - import io - - return io.StringIO(str(source)) - - def loadSource(self, source: str) -> None: - """Load source file and parse with xml.dom.minidom""" - self.source = source - self.logger("Load started...") - sock = open(self.source, encoding="utf8") - self.xmldoc = minidom.parse(sock).documentElement - sock.close() - self.logger("Load done.") - - # PARSE - def parse(self, node: Text | Element | None = None) -> None: - "Parse method - parses document elements" - - if node is None and self.xmldoc is not None: - node = self.xmldoc - - _method = "parse_%s" % node.__class__.__name__ - if hasattr(self, _method): - parseMethod = getattr(self, _method) - parseMethod(node) - else: - self.logger("No handler for method %s" % _method, level=3) - - def parse_Document(self, node): - "Parse XML document" - - self.parse(node.documentElement) - - def parse_Element(self, node: Element) -> None: - "Parse XML element" - - _method = "do_%s" % node.tagName - if hasattr(self, _method): - handlerMethod = getattr(self, _method) - handlerMethod(node) - else: - self.logger("No handler for method %s" % _method, level=3) - # print traceback.print_exc() - - def parse_Text(self, node: Text) -> None: - "Parse text inside elements. Text is stored into local buffer." - - text = node.data - self.cntBuf.append(text) - - # def parse_Comment(self, node): - # """ - # Source can contain XML comments, but we ignore them - # """ - # pass - - # DO - def do_SuperMemoCollection(self, node: Element) -> None: - "Process SM Collection" - - for child in node.childNodes: - self.parse(child) - - def do_SuperMemoElement(self, node: Element) -> None: - "Process SM Element (Type - Title,Topics)" - - self.logger("=" * 45, level=3) - - self.cntElm.append(SuperMemoElement()) - self.cntElm[-1]["lTitle"] = self.cntMeta["title"] - - # parse all child elements - for child in node.childNodes: - self.parse(child) - - # strip all saved strings, just for sure - for key in list(self.cntElm[-1].keys()): - if hasattr(self.cntElm[-1][key], "strip"): - self.cntElm[-1][key] = self.cntElm[-1][key].strip() - - # pop current element - smel = self.cntElm.pop() - - # Process cntElm if is valid Item (and not an Topic etc..) - # if smel.Lapses != None and smel.Interval != None and smel.Question != None and smel.Answer != None: - if smel.Title is None and smel.Question is not None and smel.Answer is not None: - if smel.Answer.strip() != "" and smel.Question.strip() != "": - # migrate only memorized otherway skip/continue - if self.META.onlyMemorizedItems and not (int(smel.Interval) > 0): - self.logger("Element skipped \t- not memorized ...", level=3) - else: - # import sm element data to Anki - self.addItemToCards(smel) - self.logger("Import element \t- " + smel["Question"], level=3) - - # print element - self.logger("-" * 45, level=3) - for key in list(smel.keys()): - self.logger( - "\t{} {}".format((key + ":").ljust(15), smel[key]), level=3 - ) - else: - self.logger("Element skipped \t- no valid Q and A ...", level=3) - - else: - # now we know that item was topic - # parsing of whole node is now finished - - # test if it's really topic - if smel.Title is not None: - # remove topic from title list - t = self.cntMeta["title"].pop() - self.logger("End of topic \t- %s" % (t), level=2) - - def do_Content(self, node: Element) -> None: - "Process SM element Content" - - for child in node.childNodes: - if hasattr(child, "tagName") and child.firstChild is not None: - self.cntElm[-1][child.tagName] = child.firstChild.data - - def do_LearningData(self, node: Element) -> None: - "Process SM element LearningData" - - for child in node.childNodes: - if hasattr(child, "tagName") and child.firstChild is not None: - self.cntElm[-1][child.tagName] = child.firstChild.data - - # It's being processed in do_Content now - # def do_Question(self, node): - # for child in node.childNodes: self.parse(child) - # self.cntElm[-1][node.tagName]=self.cntBuf.pop() - - # It's being processed in do_Content now - # def do_Answer(self, node): - # for child in node.childNodes: self.parse(child) - # self.cntElm[-1][node.tagName]=self.cntBuf.pop() - - def do_Title(self, node: Element) -> None: - "Process SM element Title" - - t = self._decode_htmlescapes(node.firstChild.data) - self.cntElm[-1][node.tagName] = t - self.cntMeta["title"].append(t) - self.cntElm[-1]["lTitle"] = self.cntMeta["title"] - self.logger("Start of topic \t- " + " / ".join(self.cntMeta["title"]), level=2) - - def do_Type(self, node: Element) -> None: - "Process SM element Type" - - if len(self.cntBuf) >= 1: - self.cntElm[-1][node.tagName] = self.cntBuf.pop() - - -# if __name__ == '__main__': - -# for testing you can start it standalone - -# file = u'/home/epcim/hg2g/dev/python/sm2anki/ADVENG2EXP.xxe.esc.zaloha_FINAL.xml' -# file = u'/home/epcim/hg2g/dev/python/anki/libanki/tests/importing/supermemo/original_ENGLISHFORBEGGINERS_noOEM.xml' -# file = u'/home/epcim/hg2g/dev/python/anki/libanki/tests/importing/supermemo/original_ENGLISHFORBEGGINERS_oem_1250.xml' -# file = str(sys.argv[1]) -# impo = SupermemoXmlImporter(Deck(),file) -# impo.foreignCards() - -# sys.exit(1) - -# vim: ts=4 sts=2 ft=python diff --git a/pylib/anki/lang.py b/pylib/anki/lang.py index ca61f28bd..3cbb60319 100644 --- a/pylib/anki/lang.py +++ b/pylib/anki/lang.py @@ -157,13 +157,13 @@ def lang_to_disk_lang(lang: str) -> str: # the currently set interface language -current_lang = "en" # pylint: disable=invalid-name +current_lang = "en" # the current Fluent translation instance. Code in pylib/ should # not reference this, and should use col.tr instead. The global # instance exists for legacy reasons, and as a convenience for the # Qt code. -current_i18n: anki._backend.RustBackend | None = None # pylint: disable=invalid-name +current_i18n: anki._backend.RustBackend | None = None tr_legacyglobal = anki._backend.Translations(None) @@ -178,7 +178,7 @@ def ngettext(single: str, plural: str, num: int) -> str: def set_lang(lang: str) -> None: - global current_lang, current_i18n # pylint: disable=invalid-name + global current_lang, current_i18n current_lang = lang current_i18n = anki._backend.RustBackend(langs=[lang]) tr_legacyglobal.backend = weakref.ref(current_i18n) diff --git a/pylib/anki/models.py b/pylib/anki/models.py index 230084359..a2267663a 100644 --- a/pylib/anki/models.py +++ b/pylib/anki/models.py @@ -10,7 +10,7 @@ import time from collections.abc import Sequence from typing import Any, NewType, Union -import anki # pylint: disable=unused-import +import anki import anki.collection import anki.notes from anki import notetypes_pb2 @@ -419,7 +419,7 @@ and notes.mid = ? and cards.ord = ?""", # legacy API - used by unit tests and add-ons - def change( # pylint: disable=invalid-name + def change( self, notetype: NotetypeDict, nids: list[anki.notes.NoteId], @@ -478,8 +478,6 @@ and notes.mid = ? and cards.ord = ?""", # Legacy ########################################################################## - # pylint: disable=invalid-name - @deprecated(info="use note.cloze_numbers_in_fields()") def _availClozeOrds( self, notetype: NotetypeDict, flds: str, allow_empty: bool = True diff --git a/pylib/anki/notes.py b/pylib/anki/notes.py index 5de95bfb6..3d09d5632 100644 --- a/pylib/anki/notes.py +++ b/pylib/anki/notes.py @@ -7,7 +7,7 @@ import copy from collections.abc import Sequence from typing import NewType -import anki # pylint: disable=unused-import +import anki import anki.cards import anki.collection import anki.decks diff --git a/pylib/anki/rsbackend.py b/pylib/anki/rsbackend.py index 297c1b6d2..093712fca 100644 --- a/pylib/anki/rsbackend.py +++ b/pylib/anki/rsbackend.py @@ -4,10 +4,8 @@ # The backend code has moved into _backend; this file exists only to avoid breaking # some add-ons. They should be updated to point to the correct location in the # future. -# -# pylint: disable=unused-import -# pylint: enable=invalid-name +# ruff: noqa: F401 from anki.decks import DeckTreeNode from anki.errors import InvalidInput, NotFoundError from anki.lang import TR diff --git a/pylib/anki/scheduler/base.py b/pylib/anki/scheduler/base.py index 83ef9d393..ffe4a6ef9 100644 --- a/pylib/anki/scheduler/base.py +++ b/pylib/anki/scheduler/base.py @@ -42,6 +42,7 @@ from anki.utils import ids2str, int_time class SchedulerBase(DeprecatedNamesMixin): "Actions shared between schedulers." + version = 0 def __init__(self, col: anki.collection.Collection) -> None: diff --git a/pylib/anki/scheduler/dummy.py b/pylib/anki/scheduler/dummy.py index 5732ad346..08896b1e5 100644 --- a/pylib/anki/scheduler/dummy.py +++ b/pylib/anki/scheduler/dummy.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name from __future__ import annotations diff --git a/pylib/anki/scheduler/legacy.py b/pylib/anki/scheduler/legacy.py index 58bed7933..35092588d 100644 --- a/pylib/anki/scheduler/legacy.py +++ b/pylib/anki/scheduler/legacy.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name from __future__ import annotations diff --git a/pylib/anki/scheduler/v3.py b/pylib/anki/scheduler/v3.py index 2a18ee021..3c1123d0b 100644 --- a/pylib/anki/scheduler/v3.py +++ b/pylib/anki/scheduler/v3.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=invalid-name """ The V3/2021 scheduler. @@ -184,7 +183,7 @@ class Scheduler(SchedulerBaseWithLegacy): return self._interval_for_filtered_state(state.filtered) else: assert_exhaustive(kind) - return 0 # pylint: disable=unreachable + return 0 def _interval_for_normal_state( self, normal: scheduler_pb2.SchedulingState.Normal @@ -200,7 +199,7 @@ class Scheduler(SchedulerBaseWithLegacy): return normal.relearning.learning.scheduled_secs else: assert_exhaustive(kind) - return 0 # pylint: disable=unreachable + return 0 def _interval_for_filtered_state( self, filtered: scheduler_pb2.SchedulingState.Filtered @@ -212,7 +211,7 @@ class Scheduler(SchedulerBaseWithLegacy): return self._interval_for_normal_state(filtered.rescheduling.original_state) else: assert_exhaustive(kind) - return 0 # pylint: disable=unreachable + return 0 def nextIvl(self, card: Card, ease: int) -> Any: "Don't use this - it is only required by tests, and will be moved in the future." diff --git a/pylib/anki/stats.py b/pylib/anki/stats.py index 2f7de2e04..e8045decb 100644 --- a/pylib/anki/stats.py +++ b/pylib/anki/stats.py @@ -1,7 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pylint: disable=C from __future__ import annotations @@ -27,7 +26,7 @@ def _legacy_card_stats( col: anki.collection.Collection, card_id: anki.cards.CardId, include_revlog: bool ) -> str: "A quick hack to preserve compatibility with the old HTML string API." - random_id = f"cardinfo-{base62(random.randint(0, 2 ** 64 - 1))}" + random_id = f"cardinfo-{base62(random.randint(0, 2**64 - 1))}" varName = random_id.replace("-", "") return f"""

@@ -174,7 +173,7 @@ from revlog where type != {REVLOG_RESCHED} and id > ? """ cards=cards, seconds=float(thetime) ) # again/pass count - b += "
" + "Again count: %s" % bold(failed) + b += "
" + "Again count: %s" % bold(str(failed)) if cards: b += " " + "(%s correct)" % bold( "%0.1f%%" % ((1 - failed / float(cards)) * 100) @@ -182,7 +181,10 @@ from revlog where type != {REVLOG_RESCHED} and id > ? """ # type breakdown b += "
" b += "Learn: %(a)s, Review: %(b)s, Relearn: %(c)s, Filtered: %(d)s" % dict( - a=bold(lrn), b=bold(rev), c=bold(relrn), d=bold(filt) + a=bold(str(lrn)), + b=bold(str(rev)), + c=bold(str(relrn)), + d=bold(str(filt)), ) # mature today mcnt, msum = self.col.db.first( @@ -321,7 +323,6 @@ group by day order by day""" yaxes=[dict(min=0), dict(position="right", min=0)], ) if days is not None: - # pylint: disable=invalid-unary-operand-type conf["xaxis"]["min"] = -days + 0.5 def plot(id: str, data: Any, ylabel: str, ylabel2: str) -> str: @@ -356,7 +357,6 @@ group by day order by day""" yaxes=[dict(min=0), dict(position="right", min=0)], ) if days is not None: - # pylint: disable=invalid-unary-operand-type conf["xaxis"]["min"] = -days + 0.5 def plot(id: str, data: Any, ylabel: str, ylabel2: str) -> str: diff --git a/pylib/anki/statsbg.py b/pylib/anki/statsbg.py index 552dfb5a9..b9ebb5aa8 100644 --- a/pylib/anki/statsbg.py +++ b/pylib/anki/statsbg.py @@ -1,5 +1,3 @@ -# pylint: disable=invalid-name - # from subtlepatterns.com; CC BY 4.0. # by Daniel Beaton # https://www.toptal.com/designers/subtlepatterns/fancy-deboss/ diff --git a/pylib/anki/stdmodels.py b/pylib/anki/stdmodels.py index 721b96bc6..4edb83a7a 100644 --- a/pylib/anki/stdmodels.py +++ b/pylib/anki/stdmodels.py @@ -12,7 +12,6 @@ from anki import notetypes_pb2 from anki._legacy import DeprecatedNamesMixinForModule from anki.utils import from_json_bytes -# pylint: disable=no-member StockNotetypeKind = notetypes_pb2.StockNotetype.Kind # add-on authors can add ("note type name", function) diff --git a/pylib/anki/tags.py b/pylib/anki/tags.py index a54aa7901..0c0338b82 100644 --- a/pylib/anki/tags.py +++ b/pylib/anki/tags.py @@ -16,7 +16,7 @@ import re from collections.abc import Collection, Sequence from typing import Match -import anki # pylint: disable=unused-import +import anki import anki.collection from anki import tags_pb2 from anki._legacy import DeprecatedNamesMixin, deprecated diff --git a/pylib/anki/template.py b/pylib/anki/template.py index 118a23c6b..bc507f0f6 100644 --- a/pylib/anki/template.py +++ b/pylib/anki/template.py @@ -279,6 +279,7 @@ class TemplateRenderContext: @dataclass class TemplateRenderOutput: "Stores the rendered templates and extracted AV tags." + question_text: str answer_text: str question_av_tags: list[AVTag] diff --git a/pylib/anki/utils.py b/pylib/anki/utils.py index 1b4212620..60ae75507 100644 --- a/pylib/anki/utils.py +++ b/pylib/anki/utils.py @@ -24,7 +24,6 @@ from anki.dbproxy import DBProxy _tmpdir: str | None try: - # pylint: disable=c-extension-no-member import orjson to_json_bytes: Callable[[Any], bytes] = orjson.dumps @@ -156,12 +155,12 @@ def field_checksum(data: str) -> int: # Temp files ############################################################################## -_tmpdir = None # pylint: disable=invalid-name +_tmpdir = None def tmpdir() -> str: "A reusable temp folder which we clean out on each program invocation." - global _tmpdir # pylint: disable=invalid-name + global _tmpdir if not _tmpdir: def cleanup() -> None: @@ -216,7 +215,6 @@ def call(argv: list[str], wait: bool = True, **kwargs: Any) -> int: try: info.dwFlags |= subprocess.STARTF_USESHOWWINDOW # type: ignore except Exception: - # pylint: disable=no-member info.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW # type: ignore else: info = None @@ -244,8 +242,8 @@ def call(argv: list[str], wait: bool = True, **kwargs: Any) -> int: # OS helpers ############################################################################## -is_mac = sys.platform.startswith("darwin") -is_win = sys.platform.startswith("win32") +is_mac = sys.platform == "darwin" +is_win = sys.platform == "win32" # also covers *BSD is_lin = not is_mac and not is_win is_gnome = ( @@ -282,7 +280,7 @@ def plat_desc() -> str: elif is_win: theos = f"win:{platform.win32_ver()[0]}" elif system == "Linux": - import distro # pytype: disable=import-error # pylint: disable=import-error + import distro # pytype: disable=import-error dist_id = distro.id() dist_version = distro.version() @@ -309,12 +307,17 @@ def int_version() -> int: """Anki's version as an integer in the form YYMMPP, e.g. 230900. (year, month, patch). In 2.1.x releases, this was just the last number.""" + import re + from anki.buildinfo import version + # Strip non-numeric characters (handles beta/rc suffixes like '25.02b1' or 'rc3') + numeric_version = re.sub(r"[^0-9.]", "", version) + try: - [year, month, patch] = version.split(".") + [year, month, patch] = numeric_version.split(".") except ValueError: - [year, month] = version.split(".") + [year, month] = numeric_version.split(".") patch = "0" year_num = int(year) diff --git a/pylib/hatch_build.py b/pylib/hatch_build.py new file mode 100644 index 000000000..9e8ee9799 --- /dev/null +++ b/pylib/hatch_build.py @@ -0,0 +1,50 @@ +# Copyright: Ankitects Pty Ltd and contributors +# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +import os +import platform +import sys +from pathlib import Path +from typing import Any, Dict + +from hatchling.builders.hooks.plugin.interface import BuildHookInterface + + +class CustomBuildHook(BuildHookInterface): + """Build hook to include compiled rsbridge from out/pylib.""" + + PLUGIN_NAME = "custom" + + def initialize(self, version: str, build_data: Dict[str, Any]) -> None: + """Initialize the build hook.""" + force_include = build_data.setdefault("force_include", {}) + + # Set platform-specific wheel tag + if not (platform_tag := os.environ.get("ANKI_WHEEL_TAG")): + # On Windows, uv invokes this build hook during the initial uv sync, + # when the tag has not been declared by our build script. + return + build_data.setdefault("tag", platform_tag) + + # Mark as non-pure Python since we include compiled extension + build_data["pure_python"] = False + + # Look for generated files in out/pylib/anki + project_root = Path(self.root).parent + generated_root = project_root / "out" / "pylib" / "anki" + + assert generated_root.exists(), "you should build with --wheel" + for path in generated_root.rglob("*"): + if path.is_file() and not self._should_exclude(path): + relative_path = path.relative_to(generated_root) + # Place files under anki/ in the distribution + dist_path = "anki" / relative_path + force_include[str(path)] = str(dist_path) + + def _should_exclude(self, path: Path) -> bool: + """Check if a file should be excluded from the wheel.""" + # Exclude __pycache__ + path_str = str(path) + if "/__pycache__/" in path_str: + return True + return False diff --git a/pylib/pyproject.toml b/pylib/pyproject.toml new file mode 100644 index 000000000..23e10077f --- /dev/null +++ b/pylib/pyproject.toml @@ -0,0 +1,30 @@ +[project] +name = "anki" +dynamic = ["version"] +requires-python = ">=3.9" +license = "AGPL-3.0-or-later" +dependencies = [ + "decorator", + "markdown", + "orjson", + "protobuf>=4.21", + "requests[socks]", + # remove after we update to min python 3.11+ + "typing_extensions", + # platform-specific dependencies + "distro; sys_platform != 'darwin' and sys_platform != 'win32'", +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["anki"] + +[tool.hatch.version] +source = "code" +path = "../python/version.py" + +[tool.hatch.build.hooks.custom] +path = "hatch_build.py" diff --git a/pylib/rsbridge/Cargo.toml b/pylib/rsbridge/Cargo.toml index fbe76c8a5..22dca83fa 100644 --- a/pylib/rsbridge/Cargo.toml +++ b/pylib/rsbridge/Cargo.toml @@ -12,6 +12,7 @@ description = "Anki's Rust library code Python bindings" name = "rsbridge" crate-type = ["cdylib"] path = "lib.rs" +test = false [dependencies] anki.workspace = true diff --git a/pylib/rsbridge/build.rs b/pylib/rsbridge/build.rs index d9809be73..4671bc296 100644 --- a/pylib/rsbridge/build.rs +++ b/pylib/rsbridge/build.rs @@ -1,21 +1,33 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use std::path::Path; - fn main() { // macOS needs special link flags for PyO3 if cfg!(target_os = "macos") { println!("cargo:rustc-link-arg=-undefined"); println!("cargo:rustc-link-arg=dynamic_lookup"); - println!("cargo:rustc-link-arg=-mmacosx-version-min=10.13"); + println!("cargo:rustc-link-arg=-mmacosx-version-min=11"); } // On Windows, we need to be able to link with python3.lib if cfg!(windows) { - let lib_path = Path::new("../../out/extracted/python/libs") - .canonicalize() - .expect("libs"); - println!("cargo:rustc-link-search={}", lib_path.display()); + use std::process::Command; + + // Run Python to get sysconfig paths + let output = Command::new("../../out/pyenv/scripts/python") + .args([ + "-c", + "import sysconfig; print(sysconfig.get_paths()['stdlib'])", + ]) + .output() + .expect("Failed to execute Python"); + + let stdlib_path = String::from_utf8(output.stdout) + .expect("Failed to parse Python output") + .trim() + .to_string(); + + let libs_path = stdlib_path + "s"; + println!("cargo:rustc-link-search={libs_path}"); } } diff --git a/pylib/tests/test_find.py b/pylib/tests/test_find.py index d9c2c1f87..236096572 100644 --- a/pylib/tests/test_find.py +++ b/pylib/tests/test_find.py @@ -169,8 +169,7 @@ def test_find_cards(): # properties id = col.db.scalar("select id from cards limit 1") col.db.execute( - "update cards set queue=2, ivl=10, reps=20, due=30, factor=2200 " - "where id = ?", + "update cards set queue=2, ivl=10, reps=20, due=30, factor=2200 where id = ?", id, ) assert len(col.find_cards("prop:ivl>5")) == 1 diff --git a/pylib/tests/test_importing.py b/pylib/tests/test_importing.py index 191de51f4..b7b63de26 100644 --- a/pylib/tests/test_importing.py +++ b/pylib/tests/test_importing.py @@ -13,7 +13,6 @@ from anki.importing import ( Anki2Importer, AnkiPackageImporter, MnemosyneImporter, - SupermemoXmlImporter, TextImporter, ) from tests.shared import getEmptyCol, getUpgradeDeckPath @@ -306,22 +305,6 @@ def test_csv_tag_only_if_modified(): col.close() -@pytest.mark.filterwarnings("ignore:Using or importing the ABCs") -def test_supermemo_xml_01_unicode(): - col = getEmptyCol() - file = str(os.path.join(testDir, "support", "supermemo1.xml")) - i = SupermemoXmlImporter(col, file) - # i.META.logToStdOutput = True - i.run() - assert i.total == 1 - cid = col.db.scalar("select id from cards") - c = col.get_card(cid) - # Applies A Factor-to-E Factor conversion - assert c.factor == 2879 - assert c.reps == 7 - col.close() - - def test_mnemo(): col = getEmptyCol() file = str(os.path.join(testDir, "support", "mnemo.db")) diff --git a/pylib/tests/test_schedv3.py b/pylib/tests/test_schedv3.py index 0deff7bf9..a71fa7140 100644 --- a/pylib/tests/test_schedv3.py +++ b/pylib/tests/test_schedv3.py @@ -551,12 +551,10 @@ def test_bury(): col.addNote(note) c2 = note.cards()[0] # burying - col.sched.bury_cards([c.id], manual=True) # pylint: disable=unexpected-keyword-arg + col.sched.bury_cards([c.id], manual=True) c.load() assert c.queue == QUEUE_TYPE_MANUALLY_BURIED - col.sched.bury_cards( - [c2.id], manual=False - ) # pylint: disable=unexpected-keyword-arg + col.sched.bury_cards([c2.id], manual=False) c2.load() assert c2.queue == QUEUE_TYPE_SIBLING_BURIED diff --git a/pylib/tools/genbuildinfo.py b/pylib/tools/genbuildinfo.py index b997ca5b3..add188d41 100644 --- a/pylib/tools/genbuildinfo.py +++ b/pylib/tools/genbuildinfo.py @@ -15,6 +15,5 @@ with open(buildhash_file, "r", encoding="utf8") as f: with open(outpath, "w", encoding="utf8") as f: # if we switch to uppercase we'll need to add legacy aliases - f.write("# pylint: disable=invalid-name\n") f.write(f"version = '{version}'\n") f.write(f"buildhash = '{buildhash}'\n") diff --git a/pylib/tools/genhooks.py b/pylib/tools/genhooks.py index e0e4924be..3644e3e95 100644 --- a/pylib/tools/genhooks.py +++ b/pylib/tools/genhooks.py @@ -133,7 +133,7 @@ prefix = """\ # This file is automatically generated; edit tools/genhooks.py instead. # Please import from anki.hooks instead of this file. -# pylint: disable=unused-import +# ruff: noqa: F401 from __future__ import annotations diff --git a/pylib/tools/hookslib.py b/pylib/tools/hookslib.py index 8920cdcfc..99f08fa1e 100644 --- a/pylib/tools/hookslib.py +++ b/pylib/tools/hookslib.py @@ -7,7 +7,6 @@ Code for generating hooks. from __future__ import annotations -import os import subprocess import sys from dataclasses import dataclass @@ -204,9 +203,6 @@ def write_file(path: str, hooks: list[Hook], prefix: str, suffix: str): code += f"\n{suffix}" - # work around issue with latest black - if sys.platform == "win32" and "HOME" in os.environ: - os.environ["USERPROFILE"] = os.environ["HOME"] with open(path, "wb") as file: file.write(code.encode("utf8")) - subprocess.run([sys.executable, "-m", "black", "-q", path], check=True) + subprocess.run([sys.executable, "-m", "ruff", "format", "-q", path], check=True) diff --git a/pyproject.toml b/pyproject.toml index d88101a1e..7de32ec73 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,43 @@ -[tool.black] -target-version = ["py39", "py310", "py311", "py312"] -extend-exclude = "qt/bundle" +[project] +name = "anki-dev" +version = "0.0.0" +description = "Local-only environment" +requires-python = ">=3.9" +classifiers = ["Private :: Do Not Upload"] -[tool.pyright] -include = ["pylib/anki", "qt/aqt"] -stubPath = "" -pythonVersion = "3.9" +[dependency-groups] +dev = [ + "mypy", + "mypy-protobuf", + "ruff", + "pytest", + "PyChromeDevTools", + "wheel", + "hatchling", # for type checking hatch_build.py files + "mock", + "types-protobuf", + "types-requests", + "types-orjson", + "types-decorator", + "types-flask", + "types-flask-cors", + "types-markdown", + "types-waitress", + "types-pywin32", +] + +[project.optional-dependencies] +sphinx = [ + "sphinx", + "sphinx_rtd_theme", + "sphinx-autoapi", +] + +[tool.uv.workspace] +members = ["pylib", "qt"] + +[[tool.uv.index]] +name = "testpypi" +url = "https://test.pypi.org/simple/" +publish-url = "https://test.pypi.org/legacy/" +explicit = true diff --git a/python/README.md b/python/README.md deleted file mode 100644 index 574c231e9..000000000 --- a/python/README.md +++ /dev/null @@ -1,3 +0,0 @@ -- To achieve reproducible builds we use pip-tools to lock packages to a particular version - see - update_python_deps.sh -- write_wheel.py is used to generate our wheels. diff --git a/python/licenses.json b/python/licenses.json deleted file mode 100644 index 67098b005..000000000 --- a/python/licenses.json +++ /dev/null @@ -1,152 +0,0 @@ -[ - { - "License": "BSD License", - "Name": "Flask", - "Version": "1.1.2" - }, - { - "License": "MIT License", - "Name": "Flask-Cors", - "Version": "3.0.9" - }, - { - "License": "BSD License", - "Name": "Jinja2", - "Version": "2.11.2" - }, - { - "License": "BSD License", - "Name": "Markdown", - "Version": "3.3.3" - }, - { - "License": "BSD License", - "Name": "MarkupSafe", - "Version": "1.1.1" - }, - { - "License": "GPL v3", - "Name": "PyQt5", - "Version": "5.15.1" - }, - { - "License": "SIP", - "Name": "PyQt5-sip", - "Version": "12.8.1" - }, - { - "License": "GPL v3", - "Name": "PyQtWebEngine", - "Version": "5.15.1" - }, - { - "License": "BSD", - "Name": "PySocks", - "Version": "1.7.1" - }, - { - "License": "BSD License", - "Name": "Send2Trash", - "Version": "1.5.0" - }, - { - "License": "BSD License", - "Name": "Werkzeug", - "Version": "1.0.1" - }, - { - "License": "MIT License", - "Name": "attrs", - "Version": "20.3.0" - }, - { - "License": "MIT License", - "Name": "beautifulsoup4", - "Version": "4.9.3" - }, - { - "License": "Mozilla Public License 2.0 (MPL 2.0)", - "Name": "certifi", - "Version": "2020.11.8" - }, - { - "License": "GNU Library or Lesser General Public License (LGPL)", - "Name": "chardet", - "Version": "3.0.4" - }, - { - "License": "BSD License", - "Name": "click", - "Version": "7.1.2" - }, - { - "License": "BSD License", - "Name": "decorator", - "Version": "4.4.2" - }, - { - "License": "BSD License", - "Name": "idna", - "Version": "2.10" - }, - { - "License": "BSD License", - "Name": "itsdangerous", - "Version": "1.1.0" - }, - { - "License": "MIT License", - "Name": "jsonschema", - "Version": "3.2.0" - }, - { - "License": "Apache Software License, MIT License", - "Name": "orjson", - "Version": "3.4.3" - }, - { - "License": "3-Clause BSD License", - "Name": "protobuf", - "Version": "3.13.0" - }, - { - "License": "BSD License", - "Name": "psutil", - "Version": "5.7.3" - }, - { - "License": "MIT License", - "Name": "pyrsistent", - "Version": "0.17.3" - }, - { - "License": "Python Software Foundation License", - "Name": "pywin32", - "Version": "228" - }, - { - "License": "Apache Software License", - "Name": "requests", - "Version": "2.25.0" - }, - { - "License": "MIT License", - "Name": "six", - "Version": "1.15.0" - }, - { - "License": "MIT License", - "Name": "soupsieve", - "Version": "2.0.1" - }, - { - "License": "MIT License", - "Name": "urllib3", - "Version": "1.26.1" - }, - { - "License": "Zope Public License", - "Name": "waitress", - "Version": "1.4.4" - } -] diff --git a/python/licenses.sh b/python/licenses.sh deleted file mode 100755 index a7985a429..000000000 --- a/python/licenses.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -# -# Install runtime requirements into a venv and extract their licenses. -# As Windows currently uses extra deps, running this on Windows should -# capture all packages. -# Run with 'bash licenses.sh' to update 'license.json' - -set -e - -# setup venv -python -m venv venv - -# build wheels -../bazel.bat --output_base=/c/bazel/anki/base build //pylib/anki:wheel //qt/aqt:wheel - -# install wheels, bound to constrained versions -venv/tools/pip install -c requirements.txt ../bazel-bin/pylib/anki/*.whl ../bazel-bin/qt/aqt/*.whl pip-licenses - -# dump licenses - ptable is a pip-licenses dep -venv/tools/pip-licenses --format=json --ignore-packages anki aqt pip-license PTable > licenses.json - -# clean up -rm -rf venv diff --git a/python/requirements.anki.in b/python/requirements.anki.in deleted file mode 100644 index 45796e6fe..000000000 --- a/python/requirements.anki.in +++ /dev/null @@ -1,9 +0,0 @@ -beautifulsoup4 -decorator -markdown -orjson -protobuf>=4.21 -requests[socks] -distro; sys_platform != "darwin" and sys_platform != "win32" -psutil; sys_platform == "win32" -typing_extensions diff --git a/python/requirements.aqt.in b/python/requirements.aqt.in deleted file mode 100644 index f18e65513..000000000 --- a/python/requirements.aqt.in +++ /dev/null @@ -1,10 +0,0 @@ -beautifulsoup4 -flask -flask_cors -jsonschema -requests -send2trash -waitress>=2.0.0 -psutil; sys.platform == "win32" -pywin32; sys.platform == "win32" -pip-system-certs diff --git a/python/requirements.base.in b/python/requirements.base.in deleted file mode 100644 index 705e50e83..000000000 --- a/python/requirements.base.in +++ /dev/null @@ -1,2 +0,0 @@ -pip-tools -colorama # required on windows diff --git a/python/requirements.base.txt b/python/requirements.base.txt deleted file mode 100644 index 24e3870f9..000000000 --- a/python/requirements.base.txt +++ /dev/null @@ -1,54 +0,0 @@ -build==1.2.1 \ - --hash=sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d \ - --hash=sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4 - # via pip-tools -click==8.1.7 \ - --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ - --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de - # via pip-tools -colorama==0.4.6 \ - --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ - --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 - # via -r requirements.base.in -importlib-metadata==8.4.0 \ - --hash=sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1 \ - --hash=sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5 - # via build -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - # via build -pip-tools==7.4.1 \ - --hash=sha256:4c690e5fbae2f21e87843e89c26191f0d9454f362d8acdbd695716493ec8b3a9 \ - --hash=sha256:864826f5073864450e24dbeeb85ce3920cdfb09848a3d69ebf537b521f14bcc9 - # via -r requirements.base.in -pyproject-hooks==1.1.0 \ - --hash=sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965 \ - --hash=sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2 - # via - # build - # pip-tools -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f - # via - # build - # pip-tools -wheel==0.44.0 \ - --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ - --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 - # via pip-tools -zipp==3.20.1 \ - --hash=sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064 \ - --hash=sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -pip==24.2 \ - --hash=sha256:2cd581cf58ab7fcfca4ce8efa6dcacd0de5bf8d0a3eb9ec927e07405f4d9e2a2 \ - --hash=sha256:5b5e490b5e9cb275c879595064adce9ebd31b854e3e803740b72f9ccf34a45b8 - # via pip-tools -setuptools==74.1.1 \ - --hash=sha256:2353af060c06388be1cecbf5953dcdb1f38362f87a2356c480b6b4d5fcfc8847 \ - --hash=sha256:fc91b5f89e392ef5b77fe143b17e32f65d3024744fba66dc3afe07201684d766 - # via pip-tools diff --git a/python/requirements.bundle.in b/python/requirements.bundle.in deleted file mode 100644 index 80835ce6f..000000000 --- a/python/requirements.bundle.in +++ /dev/null @@ -1,8 +0,0 @@ -# currently broken in pyoxidizer -jsonschema<4.2 -setuptools<70 - --r requirements.base.in --r requirements.anki.in --r requirements.aqt.in - diff --git a/python/requirements.bundle.txt b/python/requirements.bundle.txt deleted file mode 100644 index f1af449ac..000000000 --- a/python/requirements.bundle.txt +++ /dev/null @@ -1,494 +0,0 @@ -attrs==24.2.0 \ - --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ - --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 - # via jsonschema -beautifulsoup4==4.12.3 \ - --hash=sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051 \ - --hash=sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed - # via - # -r requirements.anki.in - # -r requirements.aqt.in -blinker==1.8.2 \ - --hash=sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01 \ - --hash=sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83 - # via flask -build==1.2.1 \ - --hash=sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d \ - --hash=sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4 - # via pip-tools -certifi==2024.8.30 \ - --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ - --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 - # via requests -charset-normalizer==3.3.2 \ - --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ - --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \ - --hash=sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786 \ - --hash=sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8 \ - --hash=sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09 \ - --hash=sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185 \ - --hash=sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574 \ - --hash=sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e \ - --hash=sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519 \ - --hash=sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898 \ - --hash=sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269 \ - --hash=sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3 \ - --hash=sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f \ - --hash=sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6 \ - --hash=sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8 \ - --hash=sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a \ - --hash=sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73 \ - --hash=sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc \ - --hash=sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714 \ - --hash=sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 \ - --hash=sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc \ - --hash=sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce \ - --hash=sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d \ - --hash=sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e \ - --hash=sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6 \ - --hash=sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269 \ - --hash=sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 \ - --hash=sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d \ - --hash=sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a \ - --hash=sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4 \ - --hash=sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 \ - --hash=sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d \ - --hash=sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0 \ - --hash=sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed \ - --hash=sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068 \ - --hash=sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac \ - --hash=sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25 \ - --hash=sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 \ - --hash=sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab \ - --hash=sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26 \ - --hash=sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2 \ - --hash=sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db \ - --hash=sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f \ - --hash=sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5 \ - --hash=sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99 \ - --hash=sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c \ - --hash=sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d \ - --hash=sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811 \ - --hash=sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa \ - --hash=sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a \ - --hash=sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03 \ - --hash=sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b \ - --hash=sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04 \ - --hash=sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c \ - --hash=sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 \ - --hash=sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458 \ - --hash=sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389 \ - --hash=sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99 \ - --hash=sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985 \ - --hash=sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537 \ - --hash=sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238 \ - --hash=sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f \ - --hash=sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d \ - --hash=sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796 \ - --hash=sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a \ - --hash=sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143 \ - --hash=sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8 \ - --hash=sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c \ - --hash=sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5 \ - --hash=sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5 \ - --hash=sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711 \ - --hash=sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4 \ - --hash=sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6 \ - --hash=sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c \ - --hash=sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7 \ - --hash=sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4 \ - --hash=sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b \ - --hash=sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae \ - --hash=sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12 \ - --hash=sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c \ - --hash=sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae \ - --hash=sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8 \ - --hash=sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887 \ - --hash=sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b \ - --hash=sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4 \ - --hash=sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f \ - --hash=sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5 \ - --hash=sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33 \ - --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ - --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 - # via requests -click==8.1.7 \ - --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ - --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de - # via - # flask - # pip-tools -colorama==0.4.6 \ - --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ - --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 - # via -r requirements.base.in -decorator==5.1.1 \ - --hash=sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330 \ - --hash=sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186 - # via -r requirements.anki.in -distro==1.9.0 ; sys_platform != "darwin" and sys_platform != "win32" \ - --hash=sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed \ - --hash=sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2 - # via -r requirements.anki.in -flask==3.0.3 \ - --hash=sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3 \ - --hash=sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842 - # via - # -r requirements.aqt.in - # flask-cors -flask-cors==6.0.0 \ - --hash=sha256:4592c1570246bf7beee96b74bc0adbbfcb1b0318f6ba05c412e8909eceec3393 \ - --hash=sha256:6332073356452343a8ccddbfec7befdc3fdd040141fe776ec9b94c262f058657 - # via -r requirements.aqt.in -idna==3.8 \ - --hash=sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac \ - --hash=sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603 - # via requests -importlib-metadata==8.4.0 \ - --hash=sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1 \ - --hash=sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5 - # via - # build - # flask - # markdown -itsdangerous==2.2.0 \ - --hash=sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef \ - --hash=sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173 - # via flask -jinja2==3.1.5 \ - --hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \ - --hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb - # via flask -jsonschema==4.1.2 \ - --hash=sha256:166870c8ab27bd712a8627e0598de4685bd8d199c4d7bd7cacc3d941ba0c6ca0 \ - --hash=sha256:5c1a282ee6b74235057421fd0f766ac5f2972f77440927f6471c9e8493632fac - # via - # -r requirements.aqt.in - # -r requirements.bundle.in -markdown==3.7 \ - --hash=sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2 \ - --hash=sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803 - # via -r requirements.anki.in -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 - # via - # jinja2 - # werkzeug -orjson==3.10.7 \ - --hash=sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23 \ - --hash=sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9 \ - --hash=sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5 \ - --hash=sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad \ - --hash=sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98 \ - --hash=sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412 \ - --hash=sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1 \ - --hash=sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864 \ - --hash=sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6 \ - --hash=sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91 \ - --hash=sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac \ - --hash=sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c \ - --hash=sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1 \ - --hash=sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f \ - --hash=sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250 \ - --hash=sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09 \ - --hash=sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0 \ - --hash=sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225 \ - --hash=sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354 \ - --hash=sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f \ - --hash=sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e \ - --hash=sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469 \ - --hash=sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c \ - --hash=sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12 \ - --hash=sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3 \ - --hash=sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3 \ - --hash=sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149 \ - --hash=sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb \ - --hash=sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2 \ - --hash=sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2 \ - --hash=sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f \ - --hash=sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0 \ - --hash=sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a \ - --hash=sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58 \ - --hash=sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe \ - --hash=sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09 \ - --hash=sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e \ - --hash=sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2 \ - --hash=sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c \ - --hash=sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313 \ - --hash=sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6 \ - --hash=sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93 \ - --hash=sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7 \ - --hash=sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866 \ - --hash=sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c \ - --hash=sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b \ - --hash=sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5 \ - --hash=sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175 \ - --hash=sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9 \ - --hash=sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0 \ - --hash=sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff \ - --hash=sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20 \ - --hash=sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5 \ - --hash=sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960 \ - --hash=sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024 \ - --hash=sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd \ - --hash=sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84 - # via -r requirements.anki.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - # via build -pip-system-certs==4.0 \ - --hash=sha256:47202b9403a6f40783a9674bbc8873f5fc86544ec01a49348fa913e99e2ff68b \ - --hash=sha256:db8e6a31388d9795ec9139957df1a89fa5274fb66164456fd091a5d3e94c350c - # via -r requirements.aqt.in -pip-tools==7.4.1 \ - --hash=sha256:4c690e5fbae2f21e87843e89c26191f0d9454f362d8acdbd695716493ec8b3a9 \ - --hash=sha256:864826f5073864450e24dbeeb85ce3920cdfb09848a3d69ebf537b521f14bcc9 - # via -r requirements.base.in -protobuf==5.28.2 \ - --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ - --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ - --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ - --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ - --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ - --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ - --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ - --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ - --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ - --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ - --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d - # via -r requirements.anki.in -pyproject-hooks==1.1.0 \ - --hash=sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965 \ - --hash=sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2 - # via - # build - # pip-tools -pyrsistent==0.20.0 \ - --hash=sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f \ - --hash=sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e \ - --hash=sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958 \ - --hash=sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34 \ - --hash=sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca \ - --hash=sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d \ - --hash=sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d \ - --hash=sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4 \ - --hash=sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714 \ - --hash=sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf \ - --hash=sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee \ - --hash=sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8 \ - --hash=sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224 \ - --hash=sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d \ - --hash=sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054 \ - --hash=sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656 \ - --hash=sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7 \ - --hash=sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423 \ - --hash=sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce \ - --hash=sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e \ - --hash=sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3 \ - --hash=sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0 \ - --hash=sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f \ - --hash=sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b \ - --hash=sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce \ - --hash=sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a \ - --hash=sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174 \ - --hash=sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86 \ - --hash=sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f \ - --hash=sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b \ - --hash=sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98 \ - --hash=sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022 - # via jsonschema -pysocks==1.7.1 \ - --hash=sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299 \ - --hash=sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5 \ - --hash=sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0 - # via requests -requests==2.32.4 \ - --hash=sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c \ - --hash=sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422 - # via - # -r requirements.anki.in - # -r requirements.aqt.in -send2trash==1.8.3 \ - --hash=sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9 \ - --hash=sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf - # via -r requirements.aqt.in -soupsieve==2.6 \ - --hash=sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb \ - --hash=sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9 - # via beautifulsoup4 -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f - # via - # build - # pip-tools -typing-extensions==4.13.2 \ - --hash=sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c \ - --hash=sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef - # via -r requirements.anki.in -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 - # via requests -waitress==3.0.1 \ - --hash=sha256:26cdbc593093a15119351690752c99adc13cbc6786d75f7b6341d1234a3730ac \ - --hash=sha256:ef0c1f020d9f12a515c4ec65c07920a702613afcad1dbfdc3bcec256b6c072b3 - # via -r requirements.aqt.in -werkzeug==3.0.6 \ - --hash=sha256:1bc0c2310d2fbb07b1dd1105eba2f7af72f322e1e455f2f93c993bee8c8a5f17 \ - --hash=sha256:a8dd59d4de28ca70471a34cba79bed5f7ef2e036a76b3ab0835474246eb41f8d - # via - # flask - # flask-cors -wheel==0.44.0 \ - --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ - --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 - # via pip-tools -wrapt==1.16.0 \ - --hash=sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc \ - --hash=sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81 \ - --hash=sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09 \ - --hash=sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e \ - --hash=sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca \ - --hash=sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0 \ - --hash=sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb \ - --hash=sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487 \ - --hash=sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40 \ - --hash=sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c \ - --hash=sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060 \ - --hash=sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202 \ - --hash=sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41 \ - --hash=sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9 \ - --hash=sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b \ - --hash=sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664 \ - --hash=sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d \ - --hash=sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362 \ - --hash=sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00 \ - --hash=sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc \ - --hash=sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1 \ - --hash=sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267 \ - --hash=sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956 \ - --hash=sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966 \ - --hash=sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1 \ - --hash=sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228 \ - --hash=sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72 \ - --hash=sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d \ - --hash=sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292 \ - --hash=sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0 \ - --hash=sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0 \ - --hash=sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36 \ - --hash=sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c \ - --hash=sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5 \ - --hash=sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f \ - --hash=sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73 \ - --hash=sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b \ - --hash=sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2 \ - --hash=sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593 \ - --hash=sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39 \ - --hash=sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389 \ - --hash=sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf \ - --hash=sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf \ - --hash=sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89 \ - --hash=sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c \ - --hash=sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c \ - --hash=sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f \ - --hash=sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440 \ - --hash=sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465 \ - --hash=sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136 \ - --hash=sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b \ - --hash=sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8 \ - --hash=sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3 \ - --hash=sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8 \ - --hash=sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6 \ - --hash=sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e \ - --hash=sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f \ - --hash=sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c \ - --hash=sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e \ - --hash=sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8 \ - --hash=sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2 \ - --hash=sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020 \ - --hash=sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35 \ - --hash=sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d \ - --hash=sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3 \ - --hash=sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537 \ - --hash=sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809 \ - --hash=sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d \ - --hash=sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a \ - --hash=sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4 - # via pip-system-certs -zipp==3.20.1 \ - --hash=sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064 \ - --hash=sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -pip==24.2 \ - --hash=sha256:2cd581cf58ab7fcfca4ce8efa6dcacd0de5bf8d0a3eb9ec927e07405f4d9e2a2 \ - --hash=sha256:5b5e490b5e9cb275c879595064adce9ebd31b854e3e803740b72f9ccf34a45b8 - # via pip-tools -setuptools==69.5.1 \ - --hash=sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987 \ - --hash=sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32 - # via - # -r requirements.bundle.in - # pip-tools diff --git a/python/requirements.dev.in b/python/requirements.dev.in deleted file mode 100644 index c981f127c..000000000 --- a/python/requirements.dev.in +++ /dev/null @@ -1,27 +0,0 @@ --r requirements.base.in --r requirements.anki.in --r requirements.aqt.in - -black -compare-locales -isort -mock -mypy -mypy-protobuf -pip-tools -pylint -pytest -PyChromeDevTools -fluent.syntax -types-decorator -types-flask -types-flask-cors -types-markdown -types-orjson -types-protobuf -types-requests -types-waitress - -# transitive windows dependencies -atomicwrites -colorama diff --git a/python/requirements.dev.txt b/python/requirements.dev.txt deleted file mode 100644 index 9b96a1cd4..000000000 --- a/python/requirements.dev.txt +++ /dev/null @@ -1,715 +0,0 @@ -astroid==3.2.4 \ - --hash=sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a \ - --hash=sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25 - # via pylint -atomicwrites==1.4.1 \ - --hash=sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11 - # via -r requirements.dev.in -attrs==24.2.0 \ - --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ - --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 - # via jsonschema -beautifulsoup4==4.12.3 \ - --hash=sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051 \ - --hash=sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed - # via - # -r requirements.anki.in - # -r requirements.aqt.in -black==24.8.0 \ - --hash=sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6 \ - --hash=sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e \ - --hash=sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f \ - --hash=sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018 \ - --hash=sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e \ - --hash=sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd \ - --hash=sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4 \ - --hash=sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed \ - --hash=sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2 \ - --hash=sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42 \ - --hash=sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af \ - --hash=sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb \ - --hash=sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368 \ - --hash=sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb \ - --hash=sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af \ - --hash=sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed \ - --hash=sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47 \ - --hash=sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2 \ - --hash=sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a \ - --hash=sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c \ - --hash=sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920 \ - --hash=sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1 - # via -r requirements.dev.in -blinker==1.8.2 \ - --hash=sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01 \ - --hash=sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83 - # via flask -build==1.2.1 \ - --hash=sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d \ - --hash=sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4 - # via pip-tools -certifi==2024.8.30 \ - --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ - --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 - # via requests -charset-normalizer==3.3.2 \ - --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ - --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \ - --hash=sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786 \ - --hash=sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8 \ - --hash=sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09 \ - --hash=sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185 \ - --hash=sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574 \ - --hash=sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e \ - --hash=sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519 \ - --hash=sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898 \ - --hash=sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269 \ - --hash=sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3 \ - --hash=sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f \ - --hash=sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6 \ - --hash=sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8 \ - --hash=sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a \ - --hash=sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73 \ - --hash=sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc \ - --hash=sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714 \ - --hash=sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 \ - --hash=sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc \ - --hash=sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce \ - --hash=sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d \ - --hash=sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e \ - --hash=sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6 \ - --hash=sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269 \ - --hash=sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 \ - --hash=sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d \ - --hash=sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a \ - --hash=sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4 \ - --hash=sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 \ - --hash=sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d \ - --hash=sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0 \ - --hash=sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed \ - --hash=sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068 \ - --hash=sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac \ - --hash=sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25 \ - --hash=sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 \ - --hash=sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab \ - --hash=sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26 \ - --hash=sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2 \ - --hash=sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db \ - --hash=sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f \ - --hash=sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5 \ - --hash=sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99 \ - --hash=sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c \ - --hash=sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d \ - --hash=sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811 \ - --hash=sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa \ - --hash=sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a \ - --hash=sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03 \ - --hash=sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b \ - --hash=sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04 \ - --hash=sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c \ - --hash=sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 \ - --hash=sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458 \ - --hash=sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389 \ - --hash=sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99 \ - --hash=sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985 \ - --hash=sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537 \ - --hash=sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238 \ - --hash=sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f \ - --hash=sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d \ - --hash=sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796 \ - --hash=sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a \ - --hash=sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143 \ - --hash=sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8 \ - --hash=sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c \ - --hash=sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5 \ - --hash=sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5 \ - --hash=sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711 \ - --hash=sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4 \ - --hash=sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6 \ - --hash=sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c \ - --hash=sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7 \ - --hash=sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4 \ - --hash=sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b \ - --hash=sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae \ - --hash=sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12 \ - --hash=sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c \ - --hash=sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae \ - --hash=sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8 \ - --hash=sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887 \ - --hash=sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b \ - --hash=sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4 \ - --hash=sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f \ - --hash=sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5 \ - --hash=sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33 \ - --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ - --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 - # via requests -click==8.1.7 \ - --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ - --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de - # via - # black - # flask - # pip-tools -colorama==0.4.6 \ - --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ - --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 - # via - # -r requirements.base.in - # -r requirements.dev.in -compare-locales==9.0.4 \ - --hash=sha256:73d0d384aefa0bc96f5fd8521c08c8bb89b16a37316701323a77960accabd551 \ - --hash=sha256:933d2b6e20f460d3ac2d3176295684505a42085b25e6c31944fcafbaf52f1cc0 - # via -r requirements.dev.in -decorator==5.1.1 \ - --hash=sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330 \ - --hash=sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186 - # via -r requirements.anki.in -dill==0.3.8 \ - --hash=sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca \ - --hash=sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7 - # via pylint -distro==1.9.0 ; sys_platform != "darwin" and sys_platform != "win32" \ - --hash=sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed \ - --hash=sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2 - # via -r requirements.anki.in -exceptiongroup==1.2.2 \ - --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ - --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc - # via pytest -flask==3.0.3 \ - --hash=sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3 \ - --hash=sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842 - # via - # -r requirements.aqt.in - # flask-cors - # types-flask-cors -flask-cors==6.0.0 \ - --hash=sha256:4592c1570246bf7beee96b74bc0adbbfcb1b0318f6ba05c412e8909eceec3393 \ - --hash=sha256:6332073356452343a8ccddbfec7befdc3fdd040141fe776ec9b94c262f058657 - # via -r requirements.aqt.in -fluent-syntax==0.19.0 \ - --hash=sha256:920326d7f46864b9758f0044e9968e3112198bc826acee16ddd8f11d359004fd \ - --hash=sha256:b352b3475fac6c6ed5f06527921f432aac073d764445508ee5218aeccc7cc5c4 - # via - # -r requirements.dev.in - # compare-locales -idna==3.8 \ - --hash=sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac \ - --hash=sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603 - # via requests -importlib-metadata==8.4.0 \ - --hash=sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1 \ - --hash=sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5 - # via - # build - # flask - # markdown -iniconfig==2.0.0 \ - --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ - --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 - # via pytest -isort==5.13.2 \ - --hash=sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109 \ - --hash=sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6 - # via - # -r requirements.dev.in - # pylint -itsdangerous==2.2.0 \ - --hash=sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef \ - --hash=sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173 - # via flask -jinja2==3.1.5 \ - --hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \ - --hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb - # via flask -jsonschema==4.1.2 \ - --hash=sha256:166870c8ab27bd712a8627e0598de4685bd8d199c4d7bd7cacc3d941ba0c6ca0 \ - --hash=sha256:5c1a282ee6b74235057421fd0f766ac5f2972f77440927f6471c9e8493632fac - # via -r requirements.aqt.in -markdown==3.7 \ - --hash=sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2 \ - --hash=sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803 - # via -r requirements.anki.in -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 - # via - # jinja2 - # werkzeug -mccabe==0.7.0 \ - --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325 \ - --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e - # via pylint -mock==5.1.0 \ - --hash=sha256:18c694e5ae8a208cdb3d2c20a993ca1a7b0efa258c247a1e565150f477f83744 \ - --hash=sha256:5e96aad5ccda4718e0a229ed94b2024df75cc2d55575ba5762d31f5767b8767d - # via -r requirements.dev.in -mypy==1.11.2 \ - --hash=sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36 \ - --hash=sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce \ - --hash=sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6 \ - --hash=sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b \ - --hash=sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca \ - --hash=sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24 \ - --hash=sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383 \ - --hash=sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7 \ - --hash=sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86 \ - --hash=sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d \ - --hash=sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4 \ - --hash=sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8 \ - --hash=sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987 \ - --hash=sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385 \ - --hash=sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79 \ - --hash=sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef \ - --hash=sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6 \ - --hash=sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70 \ - --hash=sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca \ - --hash=sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70 \ - --hash=sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12 \ - --hash=sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104 \ - --hash=sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a \ - --hash=sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318 \ - --hash=sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1 \ - --hash=sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b \ - --hash=sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d - # via -r requirements.dev.in -mypy-extensions==1.0.0 \ - --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ - --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 - # via - # black - # mypy -mypy-protobuf==3.6.0 \ - --hash=sha256:02f242eb3409f66889f2b1a3aa58356ec4d909cdd0f93115622e9e70366eca3c \ - --hash=sha256:56176e4d569070e7350ea620262478b49b7efceba4103d468448f1d21492fd6c - # via -r requirements.dev.in -orjson==3.10.7 \ - --hash=sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23 \ - --hash=sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9 \ - --hash=sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5 \ - --hash=sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad \ - --hash=sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98 \ - --hash=sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412 \ - --hash=sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1 \ - --hash=sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864 \ - --hash=sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6 \ - --hash=sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91 \ - --hash=sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac \ - --hash=sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c \ - --hash=sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1 \ - --hash=sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f \ - --hash=sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250 \ - --hash=sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09 \ - --hash=sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0 \ - --hash=sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225 \ - --hash=sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354 \ - --hash=sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f \ - --hash=sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e \ - --hash=sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469 \ - --hash=sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c \ - --hash=sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12 \ - --hash=sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3 \ - --hash=sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3 \ - --hash=sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149 \ - --hash=sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb \ - --hash=sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2 \ - --hash=sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2 \ - --hash=sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f \ - --hash=sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0 \ - --hash=sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a \ - --hash=sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58 \ - --hash=sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe \ - --hash=sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09 \ - --hash=sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e \ - --hash=sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2 \ - --hash=sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c \ - --hash=sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313 \ - --hash=sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6 \ - --hash=sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93 \ - --hash=sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7 \ - --hash=sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866 \ - --hash=sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c \ - --hash=sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b \ - --hash=sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5 \ - --hash=sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175 \ - --hash=sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9 \ - --hash=sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0 \ - --hash=sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff \ - --hash=sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20 \ - --hash=sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5 \ - --hash=sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960 \ - --hash=sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024 \ - --hash=sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd \ - --hash=sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84 - # via -r requirements.anki.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - # via - # black - # build - # pytest -pathspec==0.12.1 \ - --hash=sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 \ - --hash=sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712 - # via black -pip-system-certs==4.0 \ - --hash=sha256:47202b9403a6f40783a9674bbc8873f5fc86544ec01a49348fa913e99e2ff68b \ - --hash=sha256:db8e6a31388d9795ec9139957df1a89fa5274fb66164456fd091a5d3e94c350c - # via -r requirements.aqt.in -pip-tools==7.4.1 \ - --hash=sha256:4c690e5fbae2f21e87843e89c26191f0d9454f362d8acdbd695716493ec8b3a9 \ - --hash=sha256:864826f5073864450e24dbeeb85ce3920cdfb09848a3d69ebf537b521f14bcc9 - # via - # -r requirements.base.in - # -r requirements.dev.in -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 - # via - # black - # pylint -pluggy==1.5.0 \ - --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ - --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 - # via pytest -protobuf==5.28.2 \ - --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ - --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ - --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ - --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ - --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ - --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ - --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ - --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ - --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ - --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ - --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d - # via - # -r requirements.anki.in - # mypy-protobuf -pychromedevtools==1.0.3 \ - --hash=sha256:a429968bb18d34322da4ed1b727980d35fbd8104d4e764f6d1850b4ffc6e563b - # via -r requirements.dev.in -pylint==3.2.7 \ - --hash=sha256:02f4aedeac91be69fb3b4bea997ce580a4ac68ce58b89eaefeaf06749df73f4b \ - --hash=sha256:1b7a721b575eaeaa7d39db076b6e7743c993ea44f57979127c517c6c572c803e - # via -r requirements.dev.in -pyproject-hooks==1.1.0 \ - --hash=sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965 \ - --hash=sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2 - # via - # build - # pip-tools -pyrsistent==0.20.0 \ - --hash=sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f \ - --hash=sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e \ - --hash=sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958 \ - --hash=sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34 \ - --hash=sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca \ - --hash=sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d \ - --hash=sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d \ - --hash=sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4 \ - --hash=sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714 \ - --hash=sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf \ - --hash=sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee \ - --hash=sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8 \ - --hash=sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224 \ - --hash=sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d \ - --hash=sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054 \ - --hash=sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656 \ - --hash=sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7 \ - --hash=sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423 \ - --hash=sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce \ - --hash=sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e \ - --hash=sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3 \ - --hash=sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0 \ - --hash=sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f \ - --hash=sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b \ - --hash=sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce \ - --hash=sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a \ - --hash=sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174 \ - --hash=sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86 \ - --hash=sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f \ - --hash=sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b \ - --hash=sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98 \ - --hash=sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022 - # via jsonschema -pysocks==1.7.1 \ - --hash=sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299 \ - --hash=sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5 \ - --hash=sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0 - # via requests -pytest==8.3.2 \ - --hash=sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5 \ - --hash=sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce - # via -r requirements.dev.in -requests==2.32.4 \ - --hash=sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c \ - --hash=sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422 - # via - # -r requirements.anki.in - # -r requirements.aqt.in - # pychromedevtools -send2trash==1.8.3 \ - --hash=sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9 \ - --hash=sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf - # via -r requirements.aqt.in -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via compare-locales -soupsieve==2.6 \ - --hash=sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb \ - --hash=sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9 - # via beautifulsoup4 -toml==0.10.2 \ - --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ - --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f - # via compare-locales -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f - # via - # black - # build - # mypy - # pip-tools - # pylint - # pytest -tomlkit==0.13.2 \ - --hash=sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde \ - --hash=sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79 - # via pylint -types-click==7.1.8 \ - --hash=sha256:8cb030a669e2e927461be9827375f83c16b8178c365852c060a34e24871e7e81 \ - --hash=sha256:b6604968be6401dc516311ca50708a0a28baa7a0cb840efd7412f0dbbff4e092 - # via types-flask -types-decorator==5.1.8.20240310 \ - --hash=sha256:3af75dc38f5baf65b9b53ea6661ce2056c5ca7d70d620d0b1f620285c1242757 \ - --hash=sha256:52e316b03783886a8a2abdc228f7071680ba65894545cd2085ebe3cf88684a0e - # via -r requirements.dev.in -types-flask==1.1.6 \ - --hash=sha256:6ab8a9a5e258b76539d652f6341408867298550b19b81f0e41e916825fc39087 \ - --hash=sha256:aac777b3abfff9436e6b01f6d08171cf23ea6e5be71cbf773aaabb1c5763e9cf - # via -r requirements.dev.in -types-flask-cors==5.0.0.20240902 \ - --hash=sha256:595e5f36056cd128ab905832e055f2e5d116fbdc685356eea4490bc77df82137 \ - --hash=sha256:8921b273bf7cd9636df136b66408efcfa6338a935e5c8f53f5eff1cee03f3394 - # via -r requirements.dev.in -types-jinja2==2.11.9 \ - --hash=sha256:60a1e21e8296979db32f9374d8a239af4cb541ff66447bb915d8ad398f9c63b2 \ - --hash=sha256:dbdc74a40aba7aed520b7e4d89e8f0fe4286518494208b35123bcf084d4b8c81 - # via types-flask -types-markdown==3.7.0.20240822 \ - --hash=sha256:183557c9f4f865bdefd8f5f96a38145c31819271cde111d35557c3bd2069e78d \ - --hash=sha256:bec91c410aaf2470ffdb103e38438fbcc53689b00133f19e64869eb138432ad7 - # via -r requirements.dev.in -types-markupsafe==1.1.10 \ - --hash=sha256:85b3a872683d02aea3a5ac2a8ef590193c344092032f58457287fbf8e06711b1 \ - --hash=sha256:ca2bee0f4faafc45250602567ef38d533e877d2ddca13003b319c551ff5b3cc5 - # via types-jinja2 -types-orjson==3.6.2 \ - --hash=sha256:22ee9a79236b6b0bfb35a0684eded62ad930a88a56797fa3c449b026cf7dbfe4 \ - --hash=sha256:cf9afcc79a86325c7aff251790338109ed6f6b1bab09d2d4262dd18c85a3c638 - # via -r requirements.dev.in -types-protobuf==5.27.0.20240626 \ - --hash=sha256:683ba14043bade6785e3f937a7498f243b37881a91ac8d81b9202ecf8b191e9c \ - --hash=sha256:688e8f7e8d9295db26bc560df01fb731b27a25b77cbe4c1ce945647f7024f5c1 - # via - # -r requirements.dev.in - # mypy-protobuf -types-pywin32==306.0.0.20240822 \ - --hash=sha256:31a16f7eaf711166e8aec50ee1ddf0f16b4512e19ecc92a019ae7a0860b64bad \ - --hash=sha256:34d22b58aaa2cc86fe585b6e2e1eda88a60b010badea0e0e4a410ebe28744645 - # via -r requirements.dev.in -types-requests==2.32.0.20240712 \ - --hash=sha256:90c079ff05e549f6bf50e02e910210b98b8ff1ebdd18e19c873cd237737c1358 \ - --hash=sha256:f754283e152c752e46e70942fa2a146b5bc70393522257bb85bd1ef7e019dcc3 - # via -r requirements.dev.in -types-waitress==3.0.0.20240423 \ - --hash=sha256:7e9f77a3bc3c20436b9b7ef93da88c8fe0d1e2205d5891ae7526cbd93554f5a4 \ - --hash=sha256:ec3af592b5868ccf151645afc74d2e606cd5dec3ed326c9fd0259691b39430fe - # via -r requirements.dev.in -types-werkzeug==1.0.9 \ - --hash=sha256:194bd5715a13c598f05c63e8a739328657590943bce941e8a3619a6b5d4a54ec \ - --hash=sha256:5cc269604c400133d452a40cee6397655f878fc460e03fde291b9e3a5eaa518c - # via types-flask -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via - # -r requirements.anki.in - # astroid - # black - # fluent-syntax - # mypy - # pylint -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 - # via - # requests - # types-requests -waitress==3.0.1 \ - --hash=sha256:26cdbc593093a15119351690752c99adc13cbc6786d75f7b6341d1234a3730ac \ - --hash=sha256:ef0c1f020d9f12a515c4ec65c07920a702613afcad1dbfdc3bcec256b6c072b3 - # via -r requirements.aqt.in -websocket-client==1.8.0 \ - --hash=sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526 \ - --hash=sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da - # via pychromedevtools -werkzeug==3.0.6 \ - --hash=sha256:1bc0c2310d2fbb07b1dd1105eba2f7af72f322e1e455f2f93c993bee8c8a5f17 \ - --hash=sha256:a8dd59d4de28ca70471a34cba79bed5f7ef2e036a76b3ab0835474246eb41f8d - # via - # flask - # flask-cors -wheel==0.44.0 \ - --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ - --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 - # via pip-tools -wrapt==1.16.0 \ - --hash=sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc \ - --hash=sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81 \ - --hash=sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09 \ - --hash=sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e \ - --hash=sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca \ - --hash=sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0 \ - --hash=sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb \ - --hash=sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487 \ - --hash=sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40 \ - --hash=sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c \ - --hash=sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060 \ - --hash=sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202 \ - --hash=sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41 \ - --hash=sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9 \ - --hash=sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b \ - --hash=sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664 \ - --hash=sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d \ - --hash=sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362 \ - --hash=sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00 \ - --hash=sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc \ - --hash=sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1 \ - --hash=sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267 \ - --hash=sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956 \ - --hash=sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966 \ - --hash=sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1 \ - --hash=sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228 \ - --hash=sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72 \ - --hash=sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d \ - --hash=sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292 \ - --hash=sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0 \ - --hash=sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0 \ - --hash=sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36 \ - --hash=sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c \ - --hash=sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5 \ - --hash=sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f \ - --hash=sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73 \ - --hash=sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b \ - --hash=sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2 \ - --hash=sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593 \ - --hash=sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39 \ - --hash=sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389 \ - --hash=sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf \ - --hash=sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf \ - --hash=sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89 \ - --hash=sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c \ - --hash=sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c \ - --hash=sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f \ - --hash=sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440 \ - --hash=sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465 \ - --hash=sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136 \ - --hash=sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b \ - --hash=sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8 \ - --hash=sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3 \ - --hash=sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8 \ - --hash=sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6 \ - --hash=sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e \ - --hash=sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f \ - --hash=sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c \ - --hash=sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e \ - --hash=sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8 \ - --hash=sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2 \ - --hash=sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020 \ - --hash=sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35 \ - --hash=sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d \ - --hash=sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3 \ - --hash=sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537 \ - --hash=sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809 \ - --hash=sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d \ - --hash=sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a \ - --hash=sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4 - # via pip-system-certs -zipp==3.20.1 \ - --hash=sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064 \ - --hash=sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -pip==24.2 \ - --hash=sha256:2cd581cf58ab7fcfca4ce8efa6dcacd0de5bf8d0a3eb9ec927e07405f4d9e2a2 \ - --hash=sha256:5b5e490b5e9cb275c879595064adce9ebd31b854e3e803740b72f9ccf34a45b8 - # via pip-tools -setuptools==75.1.0 \ - --hash=sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 \ - --hash=sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538 - # via pip-tools diff --git a/python/requirements.qt5_14.in b/python/requirements.qt5_14.in deleted file mode 100644 index 0d2b2be9d..000000000 --- a/python/requirements.qt5_14.in +++ /dev/null @@ -1,3 +0,0 @@ -pyqt5==5.14.1 -pyqtwebengine==5.14.0 -pyqt5_sip==12.8.1 diff --git a/python/requirements.qt5_14.txt b/python/requirements.qt5_14.txt deleted file mode 100644 index 6e609f027..000000000 --- a/python/requirements.qt5_14.txt +++ /dev/null @@ -1,42 +0,0 @@ -pyqt5==5.14.1 \ - --hash=sha256:2d94ec761fb656707050c68b41958e3a9f755bb1df96c064470f4096d2899e32 \ - --hash=sha256:2f230f2dbd767099de7a0cb915abdf0cbc3256a0b5bb910eb09b99117db7a65b \ - --hash=sha256:31b142a868152d60c6323e0527edb692fdf05fd7cb4fe2fe9ce07d1ce560221a \ - --hash=sha256:713b9a201f5e7b2fca8691373e5d5c8c2552a51d87ca9ffbb1461e34e3241211 \ - --hash=sha256:a0bfe9fd718bca4de3e33000347e048f73126b6dc46530eb020b0251a638ee9d - # via - # -r requirements.in - # pyqtwebengine -pyqt5-sip==12.8.1 \ - --hash=sha256:0304ca9114b9817a270f67f421355075b78ff9fc25ac58ffd72c2601109d2194 \ - --hash=sha256:0cd969be528c27bbd4755bd323dff4a79a8fdda28215364e6ce3e069cb56c2a9 \ - --hash=sha256:2f35e82fd7ec1e1f6716e9154721c7594956a4f5bd4f826d8c6a6453833cc2f0 \ - --hash=sha256:30e944db9abee9cc757aea16906d4198129558533eb7fadbe48c5da2bd18e0bd \ - --hash=sha256:34dcd29be47553d5f016ff86e89e24cbc5eebae92eb2f96fb32d2d7ba028c43c \ - --hash=sha256:5a011aeff89660622a6d5c3388d55a9d76932f3b82c95e82fc31abd8b1d2990d \ - --hash=sha256:6c1ebee60f1d2b3c70aff866b7933d8d8d7646011f7c32f9321ee88c290aa4f9 \ - --hash=sha256:7b81382ce188d63890a0e35abe0f9bb946cabc873a31873b73583b0fc84ac115 \ - --hash=sha256:832fd60a264de4134c2824d393320838f3ab648180c9c357ec58a74524d24507 \ - --hash=sha256:84ba7746762bd223bed22428e8561aa267a229c28344c2d28c5d5d3f8970cffb \ - --hash=sha256:9312ec47cac4e33c11503bc1cbeeb0bdae619620472f38e2078c5a51020a930f \ - --hash=sha256:a1b8ef013086e224b8e86c93f880f776d01b59195bdfa2a8e0b23f0480678fec \ - --hash=sha256:a29e2ac399429d3b7738f73e9081e50783e61ac5d29344e0802d0dcd6056c5a2 \ - --hash=sha256:b6d42250baec52a5f77de64e2951d001c5501c3a2df2179f625b241cbaec3369 \ - --hash=sha256:bb5a87b66fc1445915104ee97f7a20a69decb42f52803e3b0795fa17ff88226c \ - --hash=sha256:c317ab1263e6417c498b81f5c970a9b1af7acefab1f80b4cc0f2f8e661f29fc5 \ - --hash=sha256:c9800729badcb247765e4ffe2241549d02da1fa435b9db224845bc37c3e99cb0 \ - --hash=sha256:c9d6d448c29dc6606bb7974696608f81f4316c8234f7c7216396ed110075e777 \ - --hash=sha256:da9c9f1e65b9d09e73bd75befc82961b6b61b5a3b9d0a7c832168e1415f163c6 \ - --hash=sha256:ed897c58acf4a3cdca61469daa31fe6e44c33c6c06a37c3f21fab31780b3b86a \ - --hash=sha256:f168f0a7f32b81bfeffdf003c36f25d81c97dee5eb67072a5183e761fe250f13 - # via - # -r requirements.in - # pyqt5 - # pyqtwebengine -pyqtwebengine==5.14.0 \ - --hash=sha256:01cd7f38ba4efa5f4c0983219ab15dad7747a0ca9378c7832a3077a53988f5ea \ - --hash=sha256:37c4a820c5bcc82a6cb43ad33b8c81eee4c4772fc03e180a8fa37a59f99f6a48 \ - --hash=sha256:3d0cba04f64d4f66087cc92e254ff8b33ec4a4e6c7751417fe2bd53c3ed740a7 \ - --hash=sha256:85e1fac1b2c9bebf0b2e8cd9a75c14a38aad75165a8d8bcb8f6318944b779b25 \ - --hash=sha256:e11595051f8bfbfa49175d899b2c8c2eea3a3deac4141edf4db68c3555221c92 - # via -r requirements.in diff --git a/python/requirements.qt5_15.in b/python/requirements.qt5_15.in deleted file mode 100644 index 66642166f..000000000 --- a/python/requirements.qt5_15.in +++ /dev/null @@ -1,3 +0,0 @@ -pyqt5==5.15.5 -pyqtwebengine==5.15.5 -pyqt5_sip==12.9.0 diff --git a/python/requirements.qt5_15.txt b/python/requirements.qt5_15.txt deleted file mode 100644 index 8ac1e2225..000000000 --- a/python/requirements.qt5_15.txt +++ /dev/null @@ -1,54 +0,0 @@ -pyqt5==5.15.5 \ - --hash=sha256:521130eea1eaac55cc6867b1dc627d292b6468fb8e525ce2a015cdf39028d6e8 \ - --hash=sha256:5966fb291f316f8e35bc8775dda63acf1bb9855baeb5af3e33d3e7c4f1cd98d4 \ - --hash=sha256:85e76b7a96995b9da12083850bf2a9f4f0aeba2b0b99461b3337ad7e44f428c3 \ - --hash=sha256:b411b7a8fa03901c9feb1dcbac7ea1fc3ce20b9ae682762b777cd5398749ca2b \ - --hash=sha256:b8e23c1a3fe1b7749c9106f36fba0bd4676dc77bcacca95304c6b840b782e24d - # via - # -r requirements.in - # pyqtwebengine -pyqt5-qt5==5.15.2 \ - --hash=sha256:1988f364ec8caf87a6ee5d5a3a5210d57539988bf8e84714c7d60972692e2f4a \ - --hash=sha256:750b78e4dba6bdf1607febedc08738e318ea09e9b10aea9ff0d73073f11f6962 \ - --hash=sha256:76980cd3d7ae87e3c7a33bfebfaee84448fd650bad6840471d6cae199b56e154 \ - --hash=sha256:9cc7a768b1921f4b982ebc00a318ccb38578e44e45316c7a4a850e953e1dd327 - # via pyqt5 -pyqt5-sip==12.9.0 \ - --hash=sha256:055581c6fed44ba4302b70eeb82e979ff70400037358908f251cd85cbb3dbd93 \ - --hash=sha256:0fc9aefacf502696710b36cdc9fa2a61487f55ee883dbcf2c2a6477e261546f7 \ - --hash=sha256:42274a501ab4806d2c31659170db14c282b8313d2255458064666d9e70d96206 \ - --hash=sha256:4347bd81d30c8e3181e553b3734f91658cfbdd8f1a19f254777f906870974e6d \ - --hash=sha256:485972daff2fb0311013f471998f8ec8262ea381bded244f9d14edaad5f54271 \ - --hash=sha256:4f8e05fe01d54275877c59018d8e82dcdd0bc5696053a8b830eecea3ce806121 \ - --hash=sha256:69a3ad4259172e2b1aa9060de211efac39ddd734a517b1924d9c6c0cc4f55f96 \ - --hash=sha256:6a8701892a01a5a2a4720872361197cc80fdd5f49c8482d488ddf38c9c84f055 \ - --hash=sha256:6d5bca2fc222d58e8093ee8a81a6e3437067bb22bc3f86d06ec8be721e15e90a \ - --hash=sha256:83c3220b1ca36eb8623ba2eb3766637b19eb0ce9f42336ad8253656d32750c0a \ - --hash=sha256:a25b9843c7da6a1608f310879c38e6434331aab1dc2fe6cb65c14f1ecf33780e \ - --hash=sha256:ac57d796c78117eb39edd1d1d1aea90354651efac9d3590aac67fa4983f99f1f \ - --hash=sha256:b09f4cd36a4831229fb77c424d89635fa937d97765ec90685e2f257e56a2685a \ - --hash=sha256:c446971c360a0a1030282a69375a08c78e8a61d568bfd6dab3dcc5cf8817f644 \ - --hash=sha256:c5216403d4d8d857ec4a61f631d3945e44fa248aa2415e9ee9369ab7c8a4d0c7 \ - --hash=sha256:d3e4489d7c2b0ece9d203ae66e573939f7f60d4d29e089c9f11daa17cfeaae32 \ - --hash=sha256:d59af63120d1475b2bf94fe8062610720a9be1e8940ea146c7f42bb449d49067 \ - --hash=sha256:d85002238b5180bce4b245c13d6face848faa1a7a9e5c6e292025004f2fd619a \ - --hash=sha256:d8b2bdff7bbf45bc975c113a03b14fd669dc0c73e1327f02706666a7dd51a197 \ - --hash=sha256:dd05c768c2b55ffe56a9d49ce6cc77cdf3d53dbfad935258a9e347cbfd9a5850 \ - --hash=sha256:fc43f2d7c438517ee33e929e8ae77132749c15909afab6aeece5fcf4147ffdb5 - # via - # -r requirements.in - # pyqt5 - # pyqtwebengine -pyqtwebengine==5.15.5 \ - --hash=sha256:30cebf455406990d5a0b859eac261ba6b45c32ce18956271733e0e96dbdca9b7 \ - --hash=sha256:5c77f71d88d871bc7400c68ef6433fadc5bd57b86d1a9c4d8094cea42f3607f1 \ - --hash=sha256:782aeee6bc8699bc029fe5c169a045c2bc9533d781cf3f5e9fb424b85a204e68 \ - --hash=sha256:ab47608dccf2b5e4b950d5a3cc704b17711af035024d07a9b71ad29fc103b941 \ - --hash=sha256:b827ad7ba0a65d5cd176797478f0ec8f599df6746b06c548649ff5674482a022 - # via -r requirements.in -pyqtwebengine-qt5==5.15.2 \ - --hash=sha256:24231f19e1595018779977de6722b5c69f3d03f34a5f7574ff21cd1e764ef76d \ - --hash=sha256:9e80b408d8de09d4e708d5d84c3ceaf3603292ff8f5e566ae44bb0320fa59c33 \ - --hash=sha256:bc7b1fd1f4f8138d59b0b0245d601fb2c5c0aa1e1e7e853b713e52a3165d147e \ - --hash=sha256:ec2acb1780c0124ef060c310e00ca701f388d8b6c35bba9127f7a6f0dc536f77 - # via pyqtwebengine diff --git a/python/requirements.qt6_6.in b/python/requirements.qt6_6.in deleted file mode 100644 index af94affd8..000000000 --- a/python/requirements.qt6_6.in +++ /dev/null @@ -1,5 +0,0 @@ -pyqt6==6.6.1 -pyqt6-qt6==6.6.2 -pyqt6-webengine==6.6.0 -pyqt6-webengine-qt6==6.6.2 -pyqt6_sip==13.6.0 diff --git a/python/requirements.qt6_6.txt b/python/requirements.qt6_6.txt deleted file mode 100644 index 5b1a3ba9b..000000000 --- a/python/requirements.qt6_6.txt +++ /dev/null @@ -1,56 +0,0 @@ -pyqt6==6.6.1 \ - --hash=sha256:03a656d5dc5ac31b6a9ad200f7f4f7ef49fa00ad7ce7a991b9bb691617141d12 \ - --hash=sha256:5aa0e833cb5a79b93813f8181d9f145517dd5a46f4374544bcd1e93a8beec537 \ - --hash=sha256:6b43878d0bbbcf8b7de165d305ec0cb87113c8930c92de748a11c473a6db5085 \ - --hash=sha256:9f158aa29d205142c56f0f35d07784b8df0be28378d20a97bcda8bd64ffd0379 - # via - # -r requirements.qt6_6.in - # pyqt6-webengine -pyqt6-qt6==6.6.2 \ - --hash=sha256:5a41fe9d53b9e29e9ec5c23f3c5949dba160f90ca313ee8b96b8ffe6a5059387 \ - --hash=sha256:7ef446d3ffc678a8586ff6dc9f0d27caf4dff05dea02c353540d2f614386faf9 \ - --hash=sha256:8d7f674a4ec43ca00191e14945ca4129acbe37a2172ed9d08214ad58b170bc11 \ - --hash=sha256:b8363d88623342a72ac17da9127dc12f259bb3148796ea029762aa2d499778d9 - # via - # -r requirements.qt6_6.in - # pyqt6 -pyqt6-sip==13.6.0 \ - --hash=sha256:0dfd22cfedd87e96f9d51e0778ca2ba3dc0be83e424e9e0f98f6994d8d9c90f0 \ - --hash=sha256:13885361ca2cb2f5085d50359ba61b3fabd41b139fb58f37332acbe631ef2357 \ - --hash=sha256:24441032a29791e82beb7dfd76878339058def0e97fdb7c1cea517f3a0e6e96b \ - --hash=sha256:2486e1588071943d4f6657ba09096dc9fffd2322ad2c30041e78ea3f037b5778 \ - --hash=sha256:3075d8b325382750829e6cde6971c943352309d35768a4d4da0587459606d562 \ - --hash=sha256:33ea771fe777eb0d1a2c3ef35bcc3f7a286eb3ff09cd5b2fdd3d87d1f392d7e8 \ - --hash=sha256:39854dba35f8e5a4288da26ecb5f40b4c5ec1932efffb3f49d5ea435a7f37fb3 \ - --hash=sha256:3bf03e130fbfd75c9c06e687b86ba375410c7a9e835e4e03285889e61dd4b0c4 \ - --hash=sha256:43fb8551796030aae3d66d6e35e277494071ec6172cd182c9569ab7db268a2f5 \ - --hash=sha256:58f68a48400e0b3d1ccb18090090299bad26e3aed7ccb7057c65887b79b8aeea \ - --hash=sha256:5b9c6b6f9cfccb48cbb78a59603145a698fb4ffd176764d7083e5bf47631d8df \ - --hash=sha256:747f6ca44af81777a2c696bd501bc4815a53ec6fc94d4e25830e10bc1391f8ab \ - --hash=sha256:86a7b67c64436e32bffa9c28c9f21bf14a9faa54991520b12c3f6f435f24df7f \ - --hash=sha256:8c282062125eea5baf830c6998587d98c50be7c3a817a057fb95fef647184012 \ - --hash=sha256:8f9df9f7ccd8a9f0f1d36948c686f03ce1a1281543a3e636b7b7d5e086e1a436 \ - --hash=sha256:98bf954103b087162fa63b3a78f30b0b63da22fd6450b610ec1b851dbb798228 \ - --hash=sha256:9adf672f9114687533a74d5c2d4c03a9a929ad5ad9c3e88098a7da1a440ab916 \ - --hash=sha256:a6ce80bc24618d8a41be8ca51ad9f10e8bc4296dd90ab2809573df30a23ae0e5 \ - --hash=sha256:d6b5f699aaed0ac1fcd23e8fbca70d8a77965831b7c1ce474b81b1678817a49d \ - --hash=sha256:fa759b6339ff7e25f9afe2a6b651b775f0a36bcb3f5fa85e81a90d3b033c83f4 \ - --hash=sha256:fa7b10af7488efc5e53b41dd42c0f421bde6c2865a107af7ae259aff9d841da9 - # via - # -r requirements.qt6_6.in - # pyqt6 - # pyqt6-webengine -pyqt6-webengine==6.6.0 \ - --hash=sha256:9d542738ed6e11c1978ce59035c07627def7c63eef0f59581d327f01209141bc \ - --hash=sha256:cb7793f06525ca054fcc6039afd93e23b82228b880d0b1301ce635f7f3ed2edf \ - --hash=sha256:d50b984c3f85e409e692b156132721522d4e8cf9b6c25e0cf927eea2dfb39487 \ - --hash=sha256:fded35fba636c4916fec84aa7c6840ad2e75d211462feb3e966f9545a59d56e6 - # via -r requirements.qt6_6.in -pyqt6-webengine-qt6==6.6.2 \ - --hash=sha256:27b1b6a6f4ea115b3dd300d2df906d542009d9eb0e62b05e6b7cb85dfe68e9c3 \ - --hash=sha256:3da4db9ddd984b647d0b79fa10fc6cf65364dfe283cd702b12cb7164be2307cd \ - --hash=sha256:5d6f3ae521115cee77fea22b0248e7b219995390b951b51e4d519aef9c304ca8 \ - --hash=sha256:f2364dfa3a6e751ead71b7ba759081be677fcf1c6bbd8a2a2a250eb5f06432e8 - # via - # -r requirements.qt6_6.in - # pyqt6-webengine diff --git a/python/requirements.qt6_8.in b/python/requirements.qt6_8.in deleted file mode 100644 index e16ae92e8..000000000 --- a/python/requirements.qt6_8.in +++ /dev/null @@ -1,5 +0,0 @@ -pyqt6==6.8.0 -pyqt6-qt6==6.8.1 -pyqt6-webengine==6.8.0 -pyqt6-webengine-qt6==6.8.1 -pyqt6_sip==13.9.1 diff --git a/python/requirements.qt6_8.txt b/python/requirements.qt6_8.txt deleted file mode 100644 index 21b567ff7..000000000 --- a/python/requirements.qt6_8.txt +++ /dev/null @@ -1,71 +0,0 @@ -pyqt6==6.8.0 \ - --hash=sha256:3a4354816f11e812b727206a9ea6e79ff3774f1bb7228ad4b9318442d2c64ff9 \ - --hash=sha256:452bae5840077bf0f146c798d7777f70d7bdd0c7dcfa9ee7a415c1daf2d10038 \ - --hash=sha256:48bace7b87676bba5e6114482f3a20ca20be90c7f261b5d340464313f5f2bf5e \ - --hash=sha256:6d8628de4c2a050f0b74462e4c9cb97f839bf6ffabbca91711722ffb281570d9 \ - --hash=sha256:8c5c05f5fdff31a5887dbc29b27615b09df467631238d7b449283809ffca6228 \ - --hash=sha256:a9913d479f1ffee804bf7f232079baea4fb4b221a8f4890117588917a54ea30d \ - --hash=sha256:cf7123caea14e7ecf10bd12cae48e8d9970ef7caf627bc7d7988b0baa209adb3 - # via - # -r requirements.qt6_8.in - # pyqt6-webengine -pyqt6-qt6==6.8.1 \ - --hash=sha256:006d786693d0511fbcf184a862edbd339c6ed1bb3bd9de363d73a19ed4b23dff \ - --hash=sha256:08065d595f1e6fc2dde9f4450eeff89082f4bad26f600a8e9b9cc5966716bfcf \ - --hash=sha256:1eb8460a1fdb38d0b2458c2974c01d471c1e59e4eb19ea63fc447aaba3ad530e \ - --hash=sha256:20843cb86bd94942d1cd99e39bf1aeabb875b241a35a8ab273e4bbbfa63776db \ - --hash=sha256:2f4b8b55b1414b93f340f22e8c88d25550efcdebc4b65a3927dd947b73bd4358 \ - --hash=sha256:98aa99fe38ae68c5318284cd28f3479ba538c40bf6ece293980abae0925c1b24 \ - --hash=sha256:9f3790c4ce4dc576e48b8718d55fb8743057e6cbd53a6ca1dd253ffbac9b7287 \ - --hash=sha256:a8bc2ed4ee5e7c6ff4dd1c7db0b27705d151fee5dc232bbd1bf17618f937f515 \ - --hash=sha256:d6ca5d2b9d2ec0ee4a814b2175f641a5c4299cb80b45e0f5f8356632663f89b3 - # via - # -r requirements.qt6_8.in - # pyqt6 -pyqt6-sip==13.9.1 \ - --hash=sha256:14f95c6352e3b85dc26bf59cfbf77a470ecbd5fcdcf00af4b648f0e1b9eefb9e \ - --hash=sha256:15be741d1ae8c82bb7afe9a61f3cf8c50457f7d61229a1c39c24cd6e8f4d86dc \ - --hash=sha256:1d322ded1d1fea339cc6ac65b768e72c69c486eebb7db6ccde061b5786d74cc5 \ - --hash=sha256:1ec52e962f54137a19208b6e95b6bd9f7a403eb25d7237768a99306cd9db26d1 \ - --hash=sha256:1fb405615970e85b622b13b4cad140ff1e4182eb8334a0b27a4698e6217b89b0 \ - --hash=sha256:22d66256b800f552ade51a463510bf905f3cb318aae00ff4288fae4de5d0e600 \ - --hash=sha256:2ab85aaf155828331399c59ebdd4d3b0358e42c08250e86b43d56d9873df148a \ - --hash=sha256:3c269052c770c09b61fce2f2f9ea934a67dfc65f443d59629b4ccc8f89751890 \ - --hash=sha256:5004514b08b045ad76425cf3618187091a668d972b017677b1b4b193379ef553 \ - --hash=sha256:552ff8fdc41f5769d3eccc661f022ed496f55f6e0a214c20aaf56e56385d61b6 \ - --hash=sha256:5643c92424fe62cb0b33378fef3d28c1525f91ada79e8a15bd9a05414a09503d \ - --hash=sha256:56ce0afb19cd8a8c63ff93ae506dffb74f844b88adaa6673ebc0dec43af48a76 \ - --hash=sha256:57b5312ef13c1766bdf69b317041140b184eb24a51e1e23ce8fc5386ba8dffb2 \ - --hash=sha256:5d7726556d1ca7a7ed78e19ba53285b64a2a8f6ad7ff4cb18a1832efca1a3102 \ - --hash=sha256:69a879cfc94f4984d180321b76f52923861cd5bf4969aa885eef7591ee932517 \ - --hash=sha256:6e6c1e2592187934f4e790c0c099d0033e986dcef7bdd3c06e3895ffa995e9fc \ - --hash=sha256:8b2ac36d6e04db6099614b9c1178a2f87788c7ffc3826571fb63d36ddb4c401d \ - --hash=sha256:8c207528992d59b0801458aa6fcff118e5c099608ef0fc6ff8bccbdc23f29c04 \ - --hash=sha256:976c7758f668806d4df7a8853f390ac123d5d1f73591ed368bdb8963574ff589 \ - --hash=sha256:accab6974b2758296400120fdcc9d1f37785b2ea2591f00656e1776f058ded6c \ - --hash=sha256:c1942e107b0243ced9e510d507e0f27aeea9d6b13e0a1b7c06fd52a62e0d41f7 \ - --hash=sha256:c800db3464481e87b1d2b84523b075df1e8fc7856c6f9623dc243f89be1cb604 \ - --hash=sha256:e996d320744ca8342cad6f9454345330d4f06bce129812d032bda3bad6967c5c \ - --hash=sha256:fa27b51ae4c7013b3700cf0ecf46907d1333ae396fc6511311920485cbce094b - # via - # -r requirements.qt6_8.in - # pyqt6 - # pyqt6-webengine -pyqt6-webengine==6.8.0 \ - --hash=sha256:5b5090dcc71dd36172ca8370db7dcaadfa0a022a8e58f6e172301289036c666b \ - --hash=sha256:5b9231b58014965b72504e49f39a6dbc3ecd05d4d725af011d75e6c8a7e2d5f7 \ - --hash=sha256:64045ea622b6a41882c2b18f55ae9714b8660acff06a54e910eb72822c2f3ff2 \ - --hash=sha256:c549f0f72c285eeea94000f6764dfaebf6bb3b13224580c7169a409bf1bf1bb7 \ - --hash=sha256:c7a5731923112acf23fbf93efad91f7b1545221063572106273e34c15a029fe7 \ - --hash=sha256:d7366809d681bcc096fa565f2a81d0ab040f7da5bb4f12f78e834a2b173c87d1 - # via -r requirements.qt6_8.in -pyqt6-webengine-qt6==6.8.1 \ - --hash=sha256:0405b6ce35f406affb27547c6c3608dc82405568af71505fefae4081c8b4ac39 \ - --hash=sha256:0ced2a10433da2571cfa29ed882698e0e164184d54068d17ba73799c45af5f0f \ - --hash=sha256:79f67a459ecb452f865e04f19122a1d6f30c83d9a1ffd06e7e6f0d652204083a \ - --hash=sha256:8059118591641cc9da6616343d893c77fbd065bef3e0764679543345e2c75123 \ - --hash=sha256:a375dbb34e03707b0ab4830b61e4d77a31dc3ef880421c8936472f2af34a3f80 \ - --hash=sha256:e36574aa55b30633a12aa000835f01e488a0f0c13513fd9a0d50c2281e0a9068 - # via - # -r requirements.qt6_8.in - # pyqt6-webengine diff --git a/python/requirements.win.in b/python/requirements.win.in deleted file mode 100644 index 24fd9cb64..000000000 --- a/python/requirements.win.in +++ /dev/null @@ -1,2 +0,0 @@ -pywin32 - diff --git a/python/requirements.win.txt b/python/requirements.win.txt deleted file mode 100644 index 65c2b6e3e..000000000 --- a/python/requirements.win.txt +++ /dev/null @@ -1,16 +0,0 @@ -pywin32==305 \ - --hash=sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d \ - --hash=sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1 \ - --hash=sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2 \ - --hash=sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990 \ - --hash=sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116 \ - --hash=sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863 \ - --hash=sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db \ - --hash=sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271 \ - --hash=sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7 \ - --hash=sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478 \ - --hash=sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4 \ - --hash=sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918 \ - --hash=sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504 \ - --hash=sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496 - # via -r requirements.win.in diff --git a/python/sphinx/build.py b/python/sphinx/build.py index 7d979c510..61091e6e1 100644 --- a/python/sphinx/build.py +++ b/python/sphinx/build.py @@ -2,6 +2,7 @@ # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import os import subprocess + os.environ["REPO_ROOT"] = os.path.abspath(".") subprocess.run(["out/pyenv/bin/sphinx-apidoc", "-o", "out/python/sphinx", "pylib", "qt"], check=True) subprocess.run(["out/pyenv/bin/sphinx-build", "out/python/sphinx", "out/python/sphinx/html"], check=True) diff --git a/python/update_python_deps.sh b/python/update_python_deps.sh deleted file mode 100755 index 9040e3456..000000000 --- a/python/update_python_deps.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash - -set -e - -if [ "$1" == "all" ]; then - upgrade="--upgrade" -elif [ "$1" != "" ]; then - upgrade="--upgrade-package $1" -else - upgrade="" -fi - -args="--resolver=backtracking --allow-unsafe --no-header --strip-extras --generate-hashes" - -# initial pyenv bootstrap -../out/pyenv/bin/pip-compile $args $upgrade requirements.base.in - -# during build/development/testing -../out/pyenv/bin/pip-compile $args $upgrade requirements.dev.in - -# during bundle -../out/pyenv/bin/pip-compile $args $upgrade requirements.bundle.in -for i in requirements.{bundle,qt6*}.in; do ../out/pyenv/bin/pip-compile $args $upgrade $i; done - - diff --git a/python/update_win_deps.bat b/python/update_win_deps.bat deleted file mode 100644 index 2cee31f3f..000000000 --- a/python/update_win_deps.bat +++ /dev/null @@ -1 +0,0 @@ -..\out\pyenv\scripts\pip-compile --resolver=backtracking --allow-unsafe --no-header --strip-extras --generate-hashes requirements.win.in diff --git a/python/version.py b/python/version.py new file mode 100644 index 000000000..5cf3c4f12 --- /dev/null +++ b/python/version.py @@ -0,0 +1,10 @@ +# Copyright: Ankitects Pty Ltd and contributors +# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +"""Version helper for wheel builds.""" + +import pathlib + +# Read version from .version file in project root +_version_file = pathlib.Path(__file__).parent.parent / ".version" +__version__ = _version_file.read_text().strip() diff --git a/python/write_wheel.py b/python/write_wheel.py deleted file mode 100644 index 09899e292..000000000 --- a/python/write_wheel.py +++ /dev/null @@ -1,191 +0,0 @@ -# Based on https://github.com/ziglang/zig-pypi/blob/de14cf728fa35c014821f62a4fa9abd9f4bb560e/make_wheels.py -# MIT - -from __future__ import annotations - -import os -import sys -from email.message import EmailMessage -from pathlib import Path -from typing import Sequence -from zipfile import ZIP_DEFLATED, ZipInfo - -from wheel.wheelfile import WheelFile - -def make_message(headers, payload=None): - msg = EmailMessage() - for name, value in headers.items(): - if name == "_dependencies": - for dep in value: - if isinstance(dep, ExtraRequires): - msg["Provides-Extra"] = dep.name - for inner_dep in dep.deps: - msg["Requires-Dist"] = f"{inner_dep}; extra == '{dep.name}'" - else: - msg["Requires-Dist"] = dep - elif isinstance(value, list): - for value_part in value: - msg[name] = value_part - else: - msg[name] = value - if payload: - msg.set_payload(payload) - # EmailMessage wraps the license line, which results in an invalid file - out = bytes(msg) - out = out.replace(b"License v3 or\n later", b"License v3 or later") - return out - - -def write_wheel_file(filename, contents): - with WheelFile(filename, "w") as wheel: - for member_info, member_source in contents.items(): - if not isinstance(member_info, ZipInfo): - member_info = ZipInfo(member_info) - member_info.external_attr = 0o644 << 16 - member_info.file_size = len(member_source) - member_info.compress_type = ZIP_DEFLATED - wheel.writestr(member_info, bytes(member_source)) - return filename - - -def write_wheel( - wheel_path, - *, - name, - version, - tag, - metadata, - description, - contents, - entrypoints: list[str] | None = None, - top_level: list[str] | None = None, -): - dist_info = f"{name}-{version}.dist-info" - extra = {} - if entrypoints: - entrypoints_joined = "\n".join(entrypoints) - text = f"[console_scripts]\n{entrypoints_joined}" - file = f"{dist_info}/entry_points.txt" - extra[file] = text.encode("utf8") - if top_level: - top_level_joined = "\n".join(top_level) + "\n" - file = f"{dist_info}/top_level.txt" - extra[file] = top_level_joined.encode("utf8") - return write_wheel_file( - wheel_path, - { - **contents, - **extra, - f"{dist_info}/METADATA": make_message( - { - "Metadata-Version": "2.1", - "Name": name, - "Version": version, - **metadata, - }, - description, - ), - f"{dist_info}/WHEEL": make_message( - { - "Wheel-Version": "1.0", - "Generator": "anki write_wheel.py", - "Root-Is-Purelib": "false", - "Tag": tag, - } - ), - }, - ) - - -def merge_sources(contents, root, exclude): - root = Path(root) - for path in root.glob("**/*"): - if path.is_dir() or exclude(path): - continue - path_str = str(path.relative_to(root.parent)) - if path_str.endswith(".pyc"): - continue - contents[path_str] = path.read_bytes() - - -def split_wheel_path(path: str): - path2 = Path(path) - components = path2.stem.split("-", maxsplit=2) - return components - - -class ExtraRequires: - def __init__(self, name, deps): - self.name = name - self.deps = deps - - -src_root = sys.argv[1] -generated_root = sys.argv[2] -wheel_path = sys.argv[3] - -name, version, tag = split_wheel_path(wheel_path) - - -def exclude_aqt(path: Path) -> bool: - if path.suffix in [".ui", ".scss", ".map", ".ts"]: - return True - if path.name.startswith("tsconfig"): - return True - if "/aqt/data" in str(path): - return True - return False - - -def exclude_nothing(path: Path) -> bool: - return False - - -def extract_requirements(path: Path) -> list[str]: - return path.read_text().splitlines() - - -if name == "aqt": - exclude = exclude_aqt -else: - exclude = exclude_nothing - -contents: dict[str, str] = {} -merge_sources(contents, src_root, exclude) -merge_sources(contents, generated_root, exclude) -all_requires: Sequence[str | ExtraRequires] - -if name == "anki": - all_requires = extract_requirements(Path("python/requirements.anki.in")) - entrypoints = None - top_level = None -else: - all_requires = extract_requirements(Path("python/requirements.aqt.in")) + [ - "anki==" + version, - "pyqt6>=6.2", - "pyqt6-webengine>=6.2", - ] - entrypoints = ["anki = aqt:run"] - top_level = ["aqt", "_aqt"] - -# reproducible builds -os.environ["SOURCE_DATE_EPOCH"] = "0" - -write_wheel( - wheel_path, - name=name, - version=version, - tag=tag, - metadata={ - "License": "AGPL-3", - "Classifier": [ - "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", - ], - "Requires-Python": ">=3.9", - "_dependencies": all_requires, - }, - description="Please see https://apps.ankiweb.net\n\n", - contents=contents, - entrypoints=entrypoints, - top_level=top_level, -) diff --git a/qt/.isort.cfg b/qt/.isort.cfg deleted file mode 100644 index aa01f87c7..000000000 --- a/qt/.isort.cfg +++ /dev/null @@ -1,5 +0,0 @@ -[settings] -py_version=39 -profile=black -known_first_party=anki,aqt -extend_skip=aqt/forms,hooks_gen.py diff --git a/qt/aqt/__init__.py b/qt/aqt/__init__.py index cdbd05ebe..53bdc3c92 100644 --- a/qt/aqt/__init__.py +++ b/qt/aqt/__init__.py @@ -3,12 +3,19 @@ from __future__ import annotations +# ruff: noqa: F401 import atexit import logging +import os import sys from collections.abc import Callable from typing import TYPE_CHECKING, Any, Union, cast +if "ANKI_FIRST_RUN" in os.environ: + from .package import first_run_setup + + first_run_setup() + try: import pip_system_certs.wrapt_requests except ModuleNotFoundError: @@ -22,7 +29,7 @@ if sys.version_info[0] < 3 or sys.version_info[1] < 9: # ensure unicode filenames are supported try: "テスト".encode(sys.getfilesystemencoding()) -except UnicodeEncodeError as exc: +except UnicodeEncodeError: print("Anki requires a UTF-8 locale.") print("Please Google 'how to change locale on [your Linux distro]'") sys.exit(1) @@ -32,24 +39,19 @@ if "--syncserver" in sys.argv: from anki.syncserver import run_sync_server from anki.utils import is_mac - from .package import _fix_protobuf_path - - if is_mac and getattr(sys, "frozen", False): - _fix_protobuf_path() - # does not return run_sync_server() -from .package import packaged_build_setup +if sys.platform == "win32": + from win32com.shell import shell -packaged_build_setup() + shell.SetCurrentProcessExplicitAppUserModelID("Ankitects.Anki") import argparse import builtins import cProfile import getpass import locale -import os import tempfile import traceback from pathlib import Path @@ -270,13 +272,7 @@ def setupLangAndBackend( # load qt translations _qtrans = QTranslator() - if is_mac and getattr(sys, "frozen", False): - qt_dir = os.path.join(sys.prefix, "../Resources/qt_translations") - else: - if qtmajor == 5: - qt_dir = QLibraryInfo.location(QLibraryInfo.TranslationsPath) # type: ignore - else: - qt_dir = QLibraryInfo.path(QLibraryInfo.LibraryPath.TranslationsPath) + qt_dir = QLibraryInfo.path(QLibraryInfo.LibraryPath.TranslationsPath) qt_lang = lang.replace("-", "_") if _qtrans.load(f"qtbase_{qt_lang}", qt_dir): app.installTranslator(_qtrans) @@ -294,8 +290,7 @@ def setupLangAndBackend( class NativeEventFilter(QAbstractNativeEventFilter): def nativeEventFilter( self, eventType: Any, message: Any - ) -> tuple[bool, sip.voidptr | None]: - + ) -> tuple[bool, Any | None]: if eventType == "windows_generic_MSG": import ctypes.wintypes @@ -386,6 +381,8 @@ class AnkiApp(QApplication): def onRecv(self) -> None: sock = self._srv.nextPendingConnection() + if sock is None: + return if not sock.waitForReadyRead(self.TMOUT): sys.stderr.write(sock.errorString()) return @@ -416,14 +413,12 @@ class AnkiApp(QApplication): QRadioButton, QMenu, QSlider, - # classes with PyQt5 compatibility proxy - without_qt5_compat_wrapper(QToolButton), - without_qt5_compat_wrapper(QTabBar), + QToolButton, + QTabBar, ) if evt.type() in [QEvent.Type.Enter, QEvent.Type.HoverEnter]: if (isinstance(src, pointer_classes) and src.isEnabled()) or ( - isinstance(src, without_qt5_compat_wrapper(QComboBox)) - and not src.isEditable() + isinstance(src, QComboBox) and not src.isEditable() ): self.setOverrideCursor(QCursor(Qt.CursorShape.PointingHandCursor)) else: @@ -535,15 +530,12 @@ def setupGL(pm: aqt.profiles.ProfileManager) -> None: QQuickWindow.setGraphicsApi(QSGRendererInterface.GraphicsApi.OpenGL) elif driver in (VideoDriver.Software, VideoDriver.ANGLE): if is_win: - # on Windows, this appears to be sufficient on Qt5/Qt6. + # on Windows, this appears to be sufficient # On Qt6, ANGLE is excluded by the enum. os.environ["QT_OPENGL"] = driver.value elif is_mac: QCoreApplication.setAttribute(Qt.ApplicationAttribute.AA_UseSoftwareOpenGL) elif is_lin: - # Qt5 only - os.environ["QT_XCB_FORCE_SOFTWARE_OPENGL"] = "1" - # Required on Qt6 if "QTWEBENGINE_CHROMIUM_FLAGS" not in os.environ: os.environ["QTWEBENGINE_CHROMIUM_FLAGS"] = "--disable-gpu" if qtmajor > 5: @@ -571,7 +563,7 @@ def run() -> None: print(f"Starting Anki {_version}...") try: _run() - except Exception as e: + except Exception: traceback.print_exc() QMessageBox.critical( None, @@ -607,14 +599,13 @@ def _run(argv: list[str] | None = None, exec: bool = True) -> AnkiApp | None: profiler = cProfile.Profile() profiler.enable() - packaged = getattr(sys, "frozen", False) x11_available = os.getenv("DISPLAY") wayland_configured = qtmajor > 5 and ( os.getenv("QT_QPA_PLATFORM") == "wayland" or os.getenv("WAYLAND_DISPLAY") ) wayland_forced = os.getenv("ANKI_WAYLAND") - if (packaged or is_gnome) and wayland_configured: + if is_gnome and wayland_configured: if wayland_forced or not x11_available: # Work around broken fractional scaling in Wayland # https://bugreports.qt.io/browse/QTBUG-113574 @@ -674,12 +665,6 @@ def _run(argv: list[str] | None = None, exec: bool = True) -> AnkiApp | None: if is_win and "QT_QPA_PLATFORM" not in os.environ: os.environ["QT_QPA_PLATFORM"] = "windows:altgr" - # Disable sandbox on Qt5 PyPi/packaged builds, as it causes blank screens on modern - # glibc versions. We check for specific patch versions, because distros may have - # fixed the issue in their own Qt builds. - if is_lin and qtfullversion in ([5, 15, 2], [5, 14, 1]): - os.environ["QTWEBENGINE_DISABLE_SANDBOX"] = "1" - # create the app QCoreApplication.setApplicationName("Anki") QGuiApplication.setDesktopFileName("anki") diff --git a/qt/aqt/_macos_helper.py b/qt/aqt/_macos_helper.py index 859cb4b0a..27b368e80 100644 --- a/qt/aqt/_macos_helper.py +++ b/qt/aqt/_macos_helper.py @@ -3,55 +3,9 @@ from __future__ import annotations -import os import sys -from collections.abc import Callable -from ctypes import CDLL, CFUNCTYPE, c_bool, c_char_p -import aqt -import aqt.utils - - -class _MacOSHelper: - def __init__(self) -> None: - if getattr(sys, "frozen", False): - path = os.path.join(sys.prefix, "libankihelper.dylib") - else: - path = os.path.join( - aqt.utils.aqt_data_folder(), "lib", "libankihelper.dylib" - ) - - self._dll = CDLL(path) - self._dll.system_is_dark.restype = c_bool - - def system_is_dark(self) -> bool: - return self._dll.system_is_dark() - - def set_darkmode_enabled(self, enabled: bool) -> bool: - return self._dll.set_darkmode_enabled(enabled) - - def start_wav_record(self, path: str, on_error: Callable[[str], None]) -> None: - global _on_audio_error - _on_audio_error = on_error - self._dll.start_wav_record(path.encode("utf8"), _audio_error_callback) - - def end_wav_record(self) -> None: - "On completion, file should be saved if no error has arrived." - self._dll.end_wav_record() - - -# this must not be overwritten or deallocated -@CFUNCTYPE(None, c_char_p) # type: ignore -def _audio_error_callback(msg: str) -> None: - if handler := _on_audio_error: - handler(msg) - - -_on_audio_error: Callable[[str], None] | None = None - -macos_helper: _MacOSHelper | None = None if sys.platform == "darwin": - try: - macos_helper = _MacOSHelper() - except Exception as e: - print("macos_helper:", e) + from anki_mac_helper import macos_helper +else: + macos_helper = None diff --git a/qt/aqt/about.py b/qt/aqt/about.py index fb90a9355..228d3cfeb 100644 --- a/qt/aqt/about.py +++ b/qt/aqt/about.py @@ -66,7 +66,8 @@ def show(mw: aqt.AnkiQt) -> QDialog: # WebView contents ###################################################################### abouttext = "
" - abouttext += f"

{tr.about_anki_is_a_friendly_intelligent_spaced()}" + lede = tr.about_anki_is_a_friendly_intelligent_spaced().replace("Anki", "Anki®") + abouttext += f"

{lede}" abouttext += f"

{tr.about_anki_is_licensed_under_the_agpl3()}" abouttext += f"

{tr.about_version(val=version_with_build())}
" abouttext += ("Python %s Qt %s PyQt %s
") % ( @@ -223,6 +224,7 @@ def show(mw: aqt.AnkiQt) -> QDialog: "Mukunda Madhav Dey", "Adnane Taghi", "Anon_0000", + "Bilolbek Normuminov", ) ) diff --git a/qt/aqt/addons.py b/qt/aqt/addons.py index fdce9142a..a940fb208 100644 --- a/qt/aqt/addons.py +++ b/qt/aqt/addons.py @@ -927,7 +927,6 @@ class AddonsDialog(QDialog): or self.mgr.configAction(addon.dir_name) ) ) - return def _onAddonItemSelected(self, row_int: int) -> None: try: @@ -1457,7 +1456,9 @@ class ChooseAddonsToUpdateDialog(QDialog): layout.addWidget(addons_list_widget) self.addons_list_widget = addons_list_widget - button_box = QDialogButtonBox(QDialogButtonBox.StandardButton.Ok | QDialogButtonBox.StandardButton.Cancel) # type: ignore + button_box = QDialogButtonBox( + QDialogButtonBox.StandardButton.Ok | QDialogButtonBox.StandardButton.Cancel + ) # type: ignore qconnect( button_box.button(QDialogButtonBox.StandardButton.Ok).clicked, self.accept ) diff --git a/qt/aqt/ankihub.py b/qt/aqt/ankihub.py index 4d3b00c8a..0ea9b6dac 100644 --- a/qt/aqt/ankihub.py +++ b/qt/aqt/ankihub.py @@ -36,7 +36,6 @@ def ankihub_login( username: str = "", password: str = "", ) -> None: - def on_future_done(fut: Future[str], username: str, password: str) -> None: try: token = fut.result() @@ -73,7 +72,6 @@ def ankihub_logout( on_success: Callable[[], None], token: str, ) -> None: - def logout() -> None: mw.pm.set_ankihub_username(None) mw.pm.set_ankihub_token(None) diff --git a/qt/aqt/browser/__init__.py b/qt/aqt/browser/__init__.py index ffff667e7..130167124 100644 --- a/qt/aqt/browser/__init__.py +++ b/qt/aqt/browser/__init__.py @@ -2,6 +2,7 @@ # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html from __future__ import annotations +# ruff: noqa: F401 import sys import aqt diff --git a/qt/aqt/browser/sidebar/__init__.py b/qt/aqt/browser/sidebar/__init__.py index 99ca8f7c4..555ed3cdd 100644 --- a/qt/aqt/browser/sidebar/__init__.py +++ b/qt/aqt/browser/sidebar/__init__.py @@ -1,5 +1,6 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +# ruff: noqa: F401 from anki.utils import is_mac from aqt.theme import theme_manager diff --git a/qt/aqt/browser/sidebar/model.py b/qt/aqt/browser/sidebar/model.py index 286811aca..fd27926b5 100644 --- a/qt/aqt/browser/sidebar/model.py +++ b/qt/aqt/browser/sidebar/model.py @@ -2,8 +2,6 @@ # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html from __future__ import annotations -from typing import cast - import aqt import aqt.browser from aqt.browser.sidebar.item import SidebarItem @@ -107,11 +105,11 @@ class SidebarModel(QAbstractItemModel): return self.sidebar._on_rename(index.internalPointer(), text) def supportedDropActions(self) -> Qt.DropAction: - return cast(Qt.DropAction, Qt.DropAction.MoveAction) + return Qt.DropAction.MoveAction def flags(self, index: QModelIndex) -> Qt.ItemFlag: if not index.isValid(): - return cast(Qt.ItemFlag, Qt.ItemFlag.ItemIsEnabled) + return Qt.ItemFlag.ItemIsEnabled flags = ( Qt.ItemFlag.ItemIsEnabled | Qt.ItemFlag.ItemIsSelectable diff --git a/qt/aqt/browser/sidebar/tree.py b/qt/aqt/browser/sidebar/tree.py index e28f166e9..22d7fa4a6 100644 --- a/qt/aqt/browser/sidebar/tree.py +++ b/qt/aqt/browser/sidebar/tree.py @@ -106,7 +106,7 @@ class SidebarTreeView(QTreeView): def _setup_style(self) -> None: # match window background color and tweak style bgcolor = QPalette().window().color().name() - border = theme_manager.var(colors.BORDER) + theme_manager.var(colors.BORDER) styles = [ "padding: 3px", "padding-right: 0px", @@ -711,7 +711,6 @@ class SidebarTreeView(QTreeView): def _flags_tree(self, root: SidebarItem) -> None: icon_off = "icons:flag-variant-off-outline.svg" - icon = "icons:flag-variant.svg" icon_outline = "icons:flag-variant-outline.svg" root = self._section_root( diff --git a/qt/aqt/browser/table/__init__.py b/qt/aqt/browser/table/__init__.py index bd666cf1a..c942dc30f 100644 --- a/qt/aqt/browser/table/__init__.py +++ b/qt/aqt/browser/table/__init__.py @@ -2,6 +2,7 @@ # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html from __future__ import annotations +# ruff: noqa: F401 import copy import time from collections.abc import Generator, Sequence diff --git a/qt/aqt/browser/table/model.py b/qt/aqt/browser/table/model.py index 5b42c0ca3..732e8c99c 100644 --- a/qt/aqt/browser/table/model.py +++ b/qt/aqt/browser/table/model.py @@ -105,11 +105,11 @@ class DataModel(QAbstractTableModel): row = CellRow(*self.col.browser_row_for_id(item)) except BackendError as e: return CellRow.disabled(self.len_columns(), str(e)) - except Exception as e: + except Exception: return CellRow.disabled( self.len_columns(), tr.errors_please_check_database() ) - except BaseException as e: + except BaseException: # fatal error like a panic in the backend - dump it to the # console so it gets picked up by the error handler import traceback @@ -325,15 +325,13 @@ class DataModel(QAbstractTableModel): return 0 return self.len_columns() - _QFont = without_qt5_compat_wrapper(QFont) - def data(self, index: QModelIndex = QModelIndex(), role: int = 0) -> Any: if not index.isValid(): return QVariant() if role == Qt.ItemDataRole.FontRole: if not self.column_at(index).uses_cell_font: return QVariant() - qfont = self._QFont() + qfont = QFont() row = self.get_row(index) qfont.setFamily(row.font_name) qfont.setPixelSize(row.font_size) diff --git a/qt/aqt/browser/table/state.py b/qt/aqt/browser/table/state.py index 8054d2597..4faf88611 100644 --- a/qt/aqt/browser/table/state.py +++ b/qt/aqt/browser/table/state.py @@ -59,7 +59,7 @@ class ItemState(ABC): # abstractproperty is deprecated but used due to mypy limitations # (https://github.com/python/mypy/issues/1362) - @abstractproperty # pylint: disable=deprecated-decorator + @abstractproperty def active_columns(self) -> list[str]: """Return the saved or default columns for the state.""" diff --git a/qt/aqt/browser/table/table.py b/qt/aqt/browser/table/table.py index f3d543d93..e28075b3f 100644 --- a/qt/aqt/browser/table/table.py +++ b/qt/aqt/browser/table/table.py @@ -361,8 +361,7 @@ class Table: for m in self.col.models.all(): for t in m["tmpls"]: bsize = t.get("bsize", 0) - if bsize > curmax: - curmax = bsize + curmax = max(curmax, bsize) assert self._view is not None vh = self._view.verticalHeader() @@ -382,10 +381,7 @@ class Table: hh.setContextMenuPolicy(Qt.ContextMenuPolicy.CustomContextMenu) self._restore_header() qconnect(hh.customContextMenuRequested, self._on_header_context) - if qtmajor == 5: - qconnect(hh.sortIndicatorChanged, self._on_sort_column_changed_qt5) - else: - qconnect(hh.sortIndicatorChanged, self._on_sort_column_changed) + qconnect(hh.sortIndicatorChanged, self._on_sort_column_changed) qconnect(hh.sectionMoved, self._on_column_moved) # Slots @@ -495,12 +491,6 @@ class Table: if checked: self._scroll_to_column(self._model.len_columns() - 1) - def _on_sort_column_changed_qt5(self, section: int, order: int) -> None: - self._on_sort_column_changed( - section, - Qt.SortOrder.AscendingOrder if not order else Qt.SortOrder.DescendingOrder, - ) - def _on_sort_column_changed(self, section: int, order: Qt.SortOrder) -> None: column = self._model.column_at_section(section) sorting = column.sorting_notes if self.is_notes_mode() else column.sorting_cards diff --git a/qt/aqt/clayout.py b/qt/aqt/clayout.py index 388ae46c0..aec5326f4 100644 --- a/qt/aqt/clayout.py +++ b/qt/aqt/clayout.py @@ -221,7 +221,7 @@ class CardLayout(QDialog): ) for i in range(min(len(self.cloze_numbers), 9)): QShortcut( # type: ignore - QKeySequence(f"Alt+{i+1}"), + QKeySequence(f"Alt+{i + 1}"), self, activated=lambda n=i: self.pform.cloze_number_combo.setCurrentIndex(n), ) @@ -790,7 +790,7 @@ class CardLayout(QDialog): assert a is not None qconnect( a.triggered, - lambda: self.on_restore_to_default(), # pylint: disable=unnecessary-lambda + lambda: self.on_restore_to_default(), ) if not self._isCloze(): diff --git a/qt/aqt/debug_console.py b/qt/aqt/debug_console.py index a37d14010..54fa8a17a 100644 --- a/qt/aqt/debug_console.py +++ b/qt/aqt/debug_console.py @@ -294,7 +294,6 @@ class DebugConsole(QDialog): } self._captureOutput(True) try: - # pylint: disable=exec-used exec(text, vars) except Exception: self._output += traceback.format_exc() diff --git a/qt/aqt/deckbrowser.py b/qt/aqt/deckbrowser.py index 77bd84220..5dc688155 100644 --- a/qt/aqt/deckbrowser.py +++ b/qt/aqt/deckbrowser.py @@ -386,9 +386,7 @@ class DeckBrowser: if b[0]: b[0] = tr.actions_shortcut_key(val=shortcut(b[0])) buf += """ -""" % tuple( - b - ) +""" % tuple(b) self.bottom.draw( buf=buf, link_handler=self._linkHandler, diff --git a/qt/aqt/editor.py b/qt/aqt/editor.py index c1eb14b18..f2f267097 100644 --- a/qt/aqt/editor.py +++ b/qt/aqt/editor.py @@ -36,7 +36,7 @@ from anki.hooks import runFilter from anki.httpclient import HttpClient from anki.models import NotetypeDict, NotetypeId, StockNotetype from anki.notes import Note, NoteFieldsCheckResult, NoteId -from anki.utils import checksum, is_lin, is_mac, is_win, namedtmp +from anki.utils import checksum, is_lin, is_win, namedtmp from aqt import AnkiQt, colors, gui_hooks from aqt.operations import QueryOp from aqt.operations.note import update_note @@ -343,7 +343,7 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too gui_hooks.editor_did_init_shortcuts(cuts, self) for row in cuts: if len(row) == 2: - keys, fn = row # pylint: disable=unbalanced-tuple-unpacking + keys, fn = row fn = self._addFocusCheck(fn) else: keys, fn, _ = row @@ -796,7 +796,7 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too def accept(file: str) -> None: self.resolve_media(file) - file = getFile( + getFile( parent=self.widget, title=tr.editing_add_media(), cb=cast(Callable[[Any], None], accept), @@ -999,21 +999,24 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too if html.find(">") < 0: return html - with warnings.catch_warnings() as w: + with warnings.catch_warnings(): warnings.simplefilter("ignore", UserWarning) doc = BeautifulSoup(html, "html.parser") - tag: bs4.element.Tag if not internal: - for tag in self.removeTags: - for node in doc(tag): + for tag_name in self.removeTags: + for node in doc(tag_name): node.decompose() # convert p tags to divs for node in doc("p"): - node.name = "div" + if hasattr(node, "name"): + node.name = "div" - for tag in doc("img"): + for element in doc("img"): + if not isinstance(element, bs4.Tag): + continue + tag = element try: src = tag["src"] except KeyError: @@ -1023,18 +1026,17 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too # in internal pastes, rewrite mediasrv references to relative if internal: - m = re.match(r"http://127.0.0.1:\d+/(.*)$", src) + m = re.match(r"http://127.0.0.1:\d+/(.*)$", str(src)) if m: tag["src"] = m.group(1) - else: - # in external pastes, download remote media - if self.isURL(src): - fname = self._retrieveURL(src) - if fname: - tag["src"] = fname - elif src.startswith("data:image/"): - # and convert inlined data - tag["src"] = self.inlinedImageToFilename(src) + # in external pastes, download remote media + elif isinstance(src, str) and self.isURL(src): + fname = self._retrieveURL(src) + if fname: + tag["src"] = fname + elif isinstance(src, str) and src.startswith("data:image/"): + # and convert inlined data + tag["src"] = self.inlinedImageToFilename(str(src)) html = str(doc) return html @@ -1099,7 +1101,7 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too ) filter = f"{tr.editing_media()} ({extension_filter})" - file = getFile( + getFile( parent=self.widget, title=tr.editing_add_media(), cb=cast(Callable[[Any], None], self.setup_mask_editor), @@ -1732,10 +1734,9 @@ class EditorWebView(AnkiWebView): assert a is not None qconnect(a.triggered, lambda: openFolder(path)) - if is_win or is_mac: - a = menu.addAction(tr.editing_show_in_folder()) - assert a is not None - qconnect(a.triggered, lambda: show_in_folder(path)) + a = menu.addAction(tr.editing_show_in_folder()) + assert a is not None + qconnect(a.triggered, lambda: show_in_folder(path)) def _clipboard(self) -> QClipboard: clipboard = self.editor.mw.app.clipboard() diff --git a/qt/aqt/errors.py b/qt/aqt/errors.py index c2b2f2ae6..a6d9251e2 100644 --- a/qt/aqt/errors.py +++ b/qt/aqt/errors.py @@ -23,25 +23,36 @@ from aqt.utils import openHelp, showWarning, supportText, tooltip, tr if TYPE_CHECKING: from aqt.main import AnkiQt +# so we can be non-modal/non-blocking, without Python deallocating the message +# box ahead of time +_mbox: QMessageBox | None = None + def show_exception(*, parent: QWidget, exception: Exception) -> None: "Present a caught exception to the user using a pop-up." if isinstance(exception, Interrupted): # nothing to do return + global _mbox + error_lines = [] + help_page = HelpPage.TROUBLESHOOTING if isinstance(exception, BackendError): if exception.context: - print(exception.context) + error_lines.append(exception.context) if exception.backtrace: - print(exception.backtrace) - showWarning(str(exception), parent=parent, help=exception.help_page) + error_lines.append(exception.backtrace) + if exception.help_page is not None: + help_page = exception.help_page else: # if the error is not originating from the backend, dump # a traceback to the console to aid in debugging - traceback.print_exception( - None, exception, exception.__traceback__, file=sys.stdout + error_lines = traceback.format_exception( + None, exception, exception.__traceback__ ) - showWarning(str(exception), parent=parent) + error_text = "\n".join(error_lines) + print(error_lines) + _mbox = _init_message_box(str(exception), error_text, help_page) + _mbox.show() def is_chromium_cert_error(error: str) -> bool: @@ -158,13 +169,44 @@ if not os.environ.get("DEBUG"): sys.excepthook = excepthook -# so we can be non-modal/non-blocking, without Python deallocating the message -# box ahead of time -_mbox: QMessageBox | None = None + +def _init_message_box( + user_text: str, debug_text: str, help_page=HelpPage.TROUBLESHOOTING +): + global _mbox + + _mbox = QMessageBox() + _mbox.setWindowTitle("Anki") + _mbox.setText(user_text) + _mbox.setIcon(QMessageBox.Icon.Warning) + _mbox.setTextFormat(Qt.TextFormat.PlainText) + + def show_help(): + openHelp(help_page) + + def copy_debug_info(): + QApplication.clipboard().setText(debug_text) + tooltip(tr.errors_copied_to_clipboard(), parent=_mbox) + + help = _mbox.addButton(QMessageBox.StandardButton.Help) + if debug_text: + debug_info = _mbox.addButton( + tr.errors_copy_debug_info_button(), QMessageBox.ButtonRole.ActionRole + ) + debug_info.disconnect() + debug_info.clicked.connect(copy_debug_info) + cancel = _mbox.addButton(QMessageBox.StandardButton.Cancel) + cancel.setText(tr.actions_close()) + + help.disconnect() + help.clicked.connect(show_help) + + return _mbox class ErrorHandler(QObject): "Catch stderr and write into buffer." + ivl = 100 fatal_error_encountered = False @@ -251,33 +293,7 @@ class ErrorHandler(QObject): user_text += "\n\n" + self._addonText(error) debug_text += addon_debug_info() - def show_troubleshooting(): - openHelp(HelpPage.TROUBLESHOOTING) - - def copy_debug_info(): - QApplication.clipboard().setText(debug_text) - tooltip(tr.errors_copied_to_clipboard(), parent=_mbox) - - global _mbox - _mbox = QMessageBox() - _mbox.setWindowTitle("Anki") - _mbox.setText(user_text) - _mbox.setIcon(QMessageBox.Icon.Warning) - _mbox.setTextFormat(Qt.TextFormat.PlainText) - - troubleshooting = _mbox.addButton( - tr.errors_troubleshooting_button(), QMessageBox.ButtonRole.ActionRole - ) - debug_info = _mbox.addButton( - tr.errors_copy_debug_info_button(), QMessageBox.ButtonRole.ActionRole - ) - cancel = _mbox.addButton(QMessageBox.StandardButton.Cancel) - cancel.setText(tr.actions_close()) - - troubleshooting.disconnect() - troubleshooting.clicked.connect(show_troubleshooting) - debug_info.disconnect() - debug_info.clicked.connect(copy_debug_info) + _mbox = _init_message_box(user_text, debug_text) if self.fatal_error_encountered: _mbox.exec() diff --git a/qt/aqt/exporting.py b/qt/aqt/exporting.py index 4ff024917..cadbaef0c 100644 --- a/qt/aqt/exporting.py +++ b/qt/aqt/exporting.py @@ -212,11 +212,10 @@ class ExportDialog(QDialog): if self.isVerbatim: msg = tr.exporting_collection_exported() self.mw.reopen() + elif self.isTextNote: + msg = tr.exporting_note_exported(count=self.exporter.count) else: - if self.isTextNote: - msg = tr.exporting_note_exported(count=self.exporter.count) - else: - msg = tr.exporting_card_exported(count=self.exporter.count) + msg = tr.exporting_card_exported(count=self.exporter.count) gui_hooks.legacy_exporter_did_export(self.exporter) tooltip(msg, period=3000) QDialog.reject(self) diff --git a/qt/aqt/forms/__init__.py b/qt/aqt/forms/__init__.py index 9484f91ec..7cbfe3a6f 100644 --- a/qt/aqt/forms/__init__.py +++ b/qt/aqt/forms/__init__.py @@ -1,3 +1,4 @@ +# ruff: noqa: F401 from . import ( about, addcards, diff --git a/qt/aqt/forms/about.py b/qt/aqt/forms/about.py index 4faf97fb0..fe66f7da3 100644 --- a/qt/aqt/forms/about.py +++ b/qt/aqt/forms/about.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.about_qt6 import * -else: - from _aqt.forms.about_qt5 import * # type: ignore +from _aqt.forms.about_qt6 import * diff --git a/qt/aqt/forms/addcards.py b/qt/aqt/forms/addcards.py index ae2debe3e..8c501695e 100644 --- a/qt/aqt/forms/addcards.py +++ b/qt/aqt/forms/addcards.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.addcards_qt6 import * -else: - from _aqt.forms.addcards_qt5 import * # type: ignore +from _aqt.forms.addcards_qt6 import * diff --git a/qt/aqt/forms/addfield.py b/qt/aqt/forms/addfield.py index 57c697b4a..a2f9eed74 100644 --- a/qt/aqt/forms/addfield.py +++ b/qt/aqt/forms/addfield.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.addfield_qt6 import * -else: - from _aqt.forms.addfield_qt5 import * # type: ignore +from _aqt.forms.addfield_qt6 import * diff --git a/qt/aqt/forms/addmodel.py b/qt/aqt/forms/addmodel.py index 9a7d06b7e..0af313a45 100644 --- a/qt/aqt/forms/addmodel.py +++ b/qt/aqt/forms/addmodel.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.addmodel_qt6 import * -else: - from _aqt.forms.addmodel_qt5 import * # type: ignore +from _aqt.forms.addmodel_qt6 import * diff --git a/qt/aqt/forms/addonconf.py b/qt/aqt/forms/addonconf.py index cca92b7b9..d78ebb82a 100644 --- a/qt/aqt/forms/addonconf.py +++ b/qt/aqt/forms/addonconf.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.addonconf_qt6 import * -else: - from _aqt.forms.addonconf_qt5 import * # type: ignore +from _aqt.forms.addonconf_qt6 import * diff --git a/qt/aqt/forms/addons.py b/qt/aqt/forms/addons.py index fa00be08b..46d7532b4 100644 --- a/qt/aqt/forms/addons.py +++ b/qt/aqt/forms/addons.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.addons_qt6 import * -else: - from _aqt.forms.addons_qt5 import * # type: ignore +from _aqt.forms.addons_qt6 import * diff --git a/qt/aqt/forms/browser.py b/qt/aqt/forms/browser.py index 403f780c5..70214ba4c 100644 --- a/qt/aqt/forms/browser.py +++ b/qt/aqt/forms/browser.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.browser_qt6 import * -else: - from _aqt.forms.browser_qt5 import * # type: ignore +from _aqt.forms.browser_qt6 import * diff --git a/qt/aqt/forms/browserdisp.py b/qt/aqt/forms/browserdisp.py index 712e5a400..fc745a703 100644 --- a/qt/aqt/forms/browserdisp.py +++ b/qt/aqt/forms/browserdisp.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.browserdisp_qt6 import * -else: - from _aqt.forms.browserdisp_qt5 import * # type: ignore +from _aqt.forms.browserdisp_qt6 import * diff --git a/qt/aqt/forms/browseropts.py b/qt/aqt/forms/browseropts.py index 68602c85c..1ae696033 100644 --- a/qt/aqt/forms/browseropts.py +++ b/qt/aqt/forms/browseropts.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.browseropts_qt6 import * -else: - from _aqt.forms.browseropts_qt5 import * # type: ignore +from _aqt.forms.browseropts_qt6 import * diff --git a/qt/aqt/forms/changemap.py b/qt/aqt/forms/changemap.py index 6028b0d49..b48b49a83 100644 --- a/qt/aqt/forms/changemap.py +++ b/qt/aqt/forms/changemap.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.changemap_qt6 import * -else: - from _aqt.forms.changemap_qt5 import * # type: ignore +from _aqt.forms.changemap_qt6 import * diff --git a/qt/aqt/forms/changemodel.py b/qt/aqt/forms/changemodel.py index 73f7f6095..cd1931af8 100644 --- a/qt/aqt/forms/changemodel.py +++ b/qt/aqt/forms/changemodel.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.changemodel_qt6 import * -else: - from _aqt.forms.changemodel_qt5 import * # type: ignore +from _aqt.forms.changemodel_qt6 import * diff --git a/qt/aqt/forms/clayout_top.py b/qt/aqt/forms/clayout_top.py index 24f78be11..1a76c882a 100644 --- a/qt/aqt/forms/clayout_top.py +++ b/qt/aqt/forms/clayout_top.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.clayout_top_qt6 import * -else: - from _aqt.forms.clayout_top_qt5 import * # type: ignore +from _aqt.forms.clayout_top_qt6 import * diff --git a/qt/aqt/forms/customstudy.py b/qt/aqt/forms/customstudy.py index 393638b2c..3bfad32ac 100644 --- a/qt/aqt/forms/customstudy.py +++ b/qt/aqt/forms/customstudy.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.customstudy_qt6 import * -else: - from _aqt.forms.customstudy_qt5 import * # type: ignore +from _aqt.forms.customstudy_qt6 import * diff --git a/qt/aqt/forms/dconf.py b/qt/aqt/forms/dconf.py index e28db5c31..f39de7077 100644 --- a/qt/aqt/forms/dconf.py +++ b/qt/aqt/forms/dconf.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.dconf_qt6 import * -else: - from _aqt.forms.dconf_qt5 import * # type: ignore +from _aqt.forms.dconf_qt6 import * diff --git a/qt/aqt/forms/debug.py b/qt/aqt/forms/debug.py index 928ba7795..0880c49fc 100644 --- a/qt/aqt/forms/debug.py +++ b/qt/aqt/forms/debug.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.debug_qt6 import * -else: - from _aqt.forms.debug_qt5 import * # type: ignore +from _aqt.forms.debug_qt6 import * diff --git a/qt/aqt/forms/editcurrent.py b/qt/aqt/forms/editcurrent.py index 1281faafe..cfa9ab1d9 100644 --- a/qt/aqt/forms/editcurrent.py +++ b/qt/aqt/forms/editcurrent.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.editcurrent_qt6 import * -else: - from _aqt.forms.editcurrent_qt5 import * # type: ignore +from _aqt.forms.editcurrent_qt6 import * diff --git a/qt/aqt/forms/edithtml.py b/qt/aqt/forms/edithtml.py index 029977705..61b9e0fd2 100644 --- a/qt/aqt/forms/edithtml.py +++ b/qt/aqt/forms/edithtml.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.edithtml_qt6 import * -else: - from _aqt.forms.edithtml_qt5 import * # type: ignore +from _aqt.forms.edithtml_qt6 import * diff --git a/qt/aqt/forms/emptycards.py b/qt/aqt/forms/emptycards.py index 046c7eb3a..1cae290fd 100644 --- a/qt/aqt/forms/emptycards.py +++ b/qt/aqt/forms/emptycards.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.emptycards_qt6 import * -else: - from _aqt.forms.emptycards_qt5 import * # type: ignore +from _aqt.forms.emptycards_qt6 import * diff --git a/qt/aqt/forms/exporting.py b/qt/aqt/forms/exporting.py index 559e50ecd..d09e9cdd9 100644 --- a/qt/aqt/forms/exporting.py +++ b/qt/aqt/forms/exporting.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.exporting_qt6 import * -else: - from _aqt.forms.exporting_qt5 import * # type: ignore +from _aqt.forms.exporting_qt6 import * diff --git a/qt/aqt/forms/fields.py b/qt/aqt/forms/fields.py index fa379be67..cf7a39f75 100644 --- a/qt/aqt/forms/fields.py +++ b/qt/aqt/forms/fields.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.fields_qt6 import * -else: - from _aqt.forms.fields_qt5 import * # type: ignore +from _aqt.forms.fields_qt6 import * diff --git a/qt/aqt/forms/filtered_deck.py b/qt/aqt/forms/filtered_deck.py index 9b9589046..59870f5a0 100644 --- a/qt/aqt/forms/filtered_deck.py +++ b/qt/aqt/forms/filtered_deck.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.filtered_deck_qt6 import * -else: - from _aqt.forms.filtered_deck_qt5 import * # type: ignore +from _aqt.forms.filtered_deck_qt6 import * diff --git a/qt/aqt/forms/finddupes.py b/qt/aqt/forms/finddupes.py index 7bca9c4cd..43ac30549 100644 --- a/qt/aqt/forms/finddupes.py +++ b/qt/aqt/forms/finddupes.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.finddupes_qt6 import * -else: - from _aqt.forms.finddupes_qt5 import * # type: ignore +from _aqt.forms.finddupes_qt6 import * diff --git a/qt/aqt/forms/findreplace.py b/qt/aqt/forms/findreplace.py index 8f82e58fe..65d1f3555 100644 --- a/qt/aqt/forms/findreplace.py +++ b/qt/aqt/forms/findreplace.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.findreplace_qt6 import * -else: - from _aqt.forms.findreplace_qt5 import * # type: ignore +from _aqt.forms.findreplace_qt6 import * diff --git a/qt/aqt/forms/forget.py b/qt/aqt/forms/forget.py index 97425aed8..0d17803df 100644 --- a/qt/aqt/forms/forget.py +++ b/qt/aqt/forms/forget.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.forget_qt6 import * -else: - from _aqt.forms.forget_qt5 import * # type: ignore +from _aqt.forms.forget_qt6 import * diff --git a/qt/aqt/forms/getaddons.py b/qt/aqt/forms/getaddons.py index c47ed27a8..ecb6c23dd 100644 --- a/qt/aqt/forms/getaddons.py +++ b/qt/aqt/forms/getaddons.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.getaddons_qt6 import * -else: - from _aqt.forms.getaddons_qt5 import * # type: ignore +from _aqt.forms.getaddons_qt6 import * diff --git a/qt/aqt/forms/importing.py b/qt/aqt/forms/importing.py index f60b74a4e..39ade97c2 100644 --- a/qt/aqt/forms/importing.py +++ b/qt/aqt/forms/importing.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.importing_qt6 import * -else: - from _aqt.forms.importing_qt5 import * # type: ignore +from _aqt.forms.importing_qt6 import * diff --git a/qt/aqt/forms/main.py b/qt/aqt/forms/main.py index 068804a2d..7ec7107b3 100644 --- a/qt/aqt/forms/main.py +++ b/qt/aqt/forms/main.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.main_qt6 import * -else: - from _aqt.forms.main_qt5 import * # type: ignore +from _aqt.forms.main_qt6 import * diff --git a/qt/aqt/forms/main.ui b/qt/aqt/forms/main.ui index 596ea985c..bffc67ad0 100644 --- a/qt/aqt/forms/main.ui +++ b/qt/aqt/forms/main.ui @@ -46,7 +46,7 @@ 0 0 667 - 24 + 43 @@ -93,6 +93,7 @@ + @@ -130,7 +131,7 @@ Ctrl+P - QAction::PreferencesRole + QAction::MenuRole::PreferencesRole @@ -138,7 +139,7 @@ qt_accel_about - QAction::AboutRole + QAction::MenuRole::ApplicationSpecificRole @@ -283,6 +284,11 @@ qt_accel_load_backup + + + qt_accel_upgrade_downgrade + + diff --git a/qt/aqt/forms/modelopts.py b/qt/aqt/forms/modelopts.py index 0e4770c92..811b1fb7b 100644 --- a/qt/aqt/forms/modelopts.py +++ b/qt/aqt/forms/modelopts.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.modelopts_qt6 import * -else: - from _aqt.forms.modelopts_qt5 import * # type: ignore +from _aqt.forms.modelopts_qt6 import * diff --git a/qt/aqt/forms/models.py b/qt/aqt/forms/models.py index fb0b64e0a..43c75c62a 100644 --- a/qt/aqt/forms/models.py +++ b/qt/aqt/forms/models.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.models_qt6 import * -else: - from _aqt.forms.models_qt5 import * # type: ignore +from _aqt.forms.models_qt6 import * diff --git a/qt/aqt/forms/preferences.py b/qt/aqt/forms/preferences.py index de9fdc989..6fdb0bfd3 100644 --- a/qt/aqt/forms/preferences.py +++ b/qt/aqt/forms/preferences.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.preferences_qt6 import * -else: - from _aqt.forms.preferences_qt5 import * # type: ignore +from _aqt.forms.preferences_qt6 import * diff --git a/qt/aqt/forms/preview.py b/qt/aqt/forms/preview.py index ca938a396..bf735bd39 100644 --- a/qt/aqt/forms/preview.py +++ b/qt/aqt/forms/preview.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.preview_qt6 import * -else: - from _aqt.forms.preview_qt5 import * # type: ignore +from _aqt.forms.preview_qt6 import * diff --git a/qt/aqt/forms/profiles.py b/qt/aqt/forms/profiles.py index c7bcc10e1..7d5b8d6e0 100644 --- a/qt/aqt/forms/profiles.py +++ b/qt/aqt/forms/profiles.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.profiles_qt6 import * -else: - from _aqt.forms.profiles_qt5 import * # type: ignore +from _aqt.forms.profiles_qt6 import * diff --git a/qt/aqt/forms/progress.py b/qt/aqt/forms/progress.py index 47a57ce49..7a2a332d5 100644 --- a/qt/aqt/forms/progress.py +++ b/qt/aqt/forms/progress.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.progress_qt6 import * -else: - from _aqt.forms.progress_qt5 import * # type: ignore +from _aqt.forms.progress_qt6 import * diff --git a/qt/aqt/forms/reposition.py b/qt/aqt/forms/reposition.py index 646abf7c4..cfad6b55a 100644 --- a/qt/aqt/forms/reposition.py +++ b/qt/aqt/forms/reposition.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.reposition_qt6 import * -else: - from _aqt.forms.reposition_qt5 import * # type: ignore +from _aqt.forms.reposition_qt6 import * diff --git a/qt/aqt/forms/setgroup.py b/qt/aqt/forms/setgroup.py index 649e4f75a..617ef3b96 100644 --- a/qt/aqt/forms/setgroup.py +++ b/qt/aqt/forms/setgroup.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.setgroup_qt6 import * -else: - from _aqt.forms.setgroup_qt5 import * # type: ignore +from _aqt.forms.setgroup_qt6 import * diff --git a/qt/aqt/forms/setlang.py b/qt/aqt/forms/setlang.py index bb715ff92..efe14343b 100644 --- a/qt/aqt/forms/setlang.py +++ b/qt/aqt/forms/setlang.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.setlang_qt6 import * -else: - from _aqt.forms.setlang_qt5 import * # type: ignore +from _aqt.forms.setlang_qt6 import * diff --git a/qt/aqt/forms/stats.py b/qt/aqt/forms/stats.py index 212c03345..12b161f4e 100644 --- a/qt/aqt/forms/stats.py +++ b/qt/aqt/forms/stats.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.stats_qt6 import * -else: - from _aqt.forms.stats_qt5 import * # type: ignore +from _aqt.forms.stats_qt6 import * diff --git a/qt/aqt/forms/studydeck.py b/qt/aqt/forms/studydeck.py index b95bc7e87..497ab01cf 100644 --- a/qt/aqt/forms/studydeck.py +++ b/qt/aqt/forms/studydeck.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.studydeck_qt6 import * -else: - from _aqt.forms.studydeck_qt5 import * # type: ignore +from _aqt.forms.studydeck_qt6 import * diff --git a/qt/aqt/forms/synclog.py b/qt/aqt/forms/synclog.py index 97fefe300..ddd08456b 100644 --- a/qt/aqt/forms/synclog.py +++ b/qt/aqt/forms/synclog.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.synclog_qt6 import * -else: - from _aqt.forms.synclog_qt5 import * # type: ignore +from _aqt.forms.synclog_qt6 import * diff --git a/qt/aqt/forms/taglimit.py b/qt/aqt/forms/taglimit.py index 7a4763016..88262c657 100644 --- a/qt/aqt/forms/taglimit.py +++ b/qt/aqt/forms/taglimit.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.taglimit_qt6 import * -else: - from _aqt.forms.taglimit_qt5 import * # type: ignore +from _aqt.forms.taglimit_qt6 import * diff --git a/qt/aqt/forms/template.py b/qt/aqt/forms/template.py index 84f3d2a05..7540d72e0 100644 --- a/qt/aqt/forms/template.py +++ b/qt/aqt/forms/template.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.template_qt6 import * -else: - from _aqt.forms.template_qt5 import * # type: ignore +from _aqt.forms.template_qt6 import * diff --git a/qt/aqt/forms/widgets.py b/qt/aqt/forms/widgets.py index b91f7ae26..07dc11c6c 100644 --- a/qt/aqt/forms/widgets.py +++ b/qt/aqt/forms/widgets.py @@ -1,8 +1 @@ -from typing import TYPE_CHECKING - -from aqt.qt import qtmajor - -if qtmajor > 5 or TYPE_CHECKING: - from _aqt.forms.widgets_qt6 import * -else: - from _aqt.forms.widgets_qt5 import * # type: ignore +from _aqt.forms.widgets_qt6 import * diff --git a/qt/aqt/import_export/importing.py b/qt/aqt/import_export/importing.py index 938824035..cb27c5e4b 100644 --- a/qt/aqt/import_export/importing.py +++ b/qt/aqt/import_export/importing.py @@ -134,9 +134,8 @@ IMPORTERS: list[type[Importer]] = [ def legacy_file_endings(col: Collection) -> list[str]: - from anki.importing import AnkiPackageImporter + from anki.importing import AnkiPackageImporter, TextImporter, importers from anki.importing import MnemosyneImporter as LegacyMnemosyneImporter - from anki.importing import TextImporter, importers return [ ext diff --git a/qt/aqt/importing.py b/qt/aqt/importing.py index 8f9741a77..8701f9843 100644 --- a/qt/aqt/importing.py +++ b/qt/aqt/importing.py @@ -11,10 +11,10 @@ from collections.abc import Callable from concurrent.futures import Future from typing import Any -import anki.importing as importing import aqt.deckchooser import aqt.forms import aqt.modelchooser +from anki import importing from anki.importing.anki2 import MediaMapInvalid, V2ImportIntoV1 from anki.importing.apkg import AnkiPackageImporter from aqt.import_export.importing import ColpkgImporter @@ -262,7 +262,7 @@ class ImportDialog(QDialog): self.mapwidget.setLayout(self.grid) self.grid.setContentsMargins(3, 3, 3, 3) self.grid.setSpacing(6) - for num in range(len(self.mapping)): # pylint: disable=consider-using-enumerate + for num in range(len(self.mapping)): text = tr.importing_field_of_file_is(val=num + 1) self.grid.addWidget(QLabel(text), num, 0) if self.mapping[num] == "_tags": @@ -357,7 +357,7 @@ def importFile(mw: AnkiQt, file: str) -> None: try: importer.open() mw.progress.finish() - diag = ImportDialog(mw, importer) + ImportDialog(mw, importer) except UnicodeDecodeError: mw.progress.finish() showUnicodeWarning() @@ -443,3 +443,4 @@ def setupApkgImport(mw: AnkiQt, importer: AnkiPackageImporter) -> bool: return True ColpkgImporter.do_import(mw, importer.file) return False + return False diff --git a/qt/aqt/main.py b/qt/aqt/main.py index bc28e287b..c707d1b2a 100644 --- a/qt/aqt/main.py +++ b/qt/aqt/main.py @@ -376,7 +376,6 @@ class AnkiQt(QMainWindow): def openProfile(self) -> None: name = self.pm.profiles()[self.profileForm.profiles.currentRow()] self.pm.load(name) - return def onOpenProfile(self, *, callback: Callable[[], None] | None = None) -> None: def on_done() -> None: @@ -451,7 +450,6 @@ class AnkiQt(QMainWindow): self.loadProfile() def onOpenBackup(self) -> None: - def do_open(path: str) -> None: if not askUser( tr.qt_misc_replace_your_collection_with_an_earlier2( @@ -677,7 +675,7 @@ class AnkiQt(QMainWindow): gui_hooks.collection_did_load(self.col) self.apply_collection_options() self.moveToState("deckBrowser") - except Exception as e: + except Exception: # dump error to stderr so it gets picked up by errors.py traceback.print_exc() @@ -774,7 +772,6 @@ class AnkiQt(QMainWindow): oldState = self.state cleanup = getattr(self, f"_{oldState}Cleanup", None) if cleanup: - # pylint: disable=not-callable cleanup(state) self.clearStateShortcuts() self.state = state @@ -821,7 +818,7 @@ class AnkiQt(QMainWindow): self.bottomWeb.hide_timer.start() def _reviewCleanup(self, newState: MainWindowState) -> None: - if newState != "resetRequired" and newState != "review": + if newState not in {"resetRequired", "review"}: self.reviewer.auto_advance_enabled = False self.reviewer.cleanup() self.toolbarWeb.elevate() @@ -1308,6 +1305,14 @@ title="{}" {}>{}""".format( def onPrefs(self) -> None: aqt.dialogs.open("Preferences", self) + def on_upgrade_downgrade(self) -> None: + if not askUser(tr.qt_misc_open_anki_launcher()): + return + + from aqt.package import update_and_restart + + update_and_restart() + def onNoteTypes(self) -> None: import aqt.models @@ -1389,6 +1394,8 @@ title="{}" {}>{}""".format( ########################################################################## def setupMenus(self) -> None: + from aqt.package import launcher_executable + m = self.form # File @@ -1405,6 +1412,7 @@ title="{}" {}>{}""".format( qconnect(m.actionDocumentation.triggered, self.onDocumentation) qconnect(m.actionDonate.triggered, self.onDonate) qconnect(m.actionAbout.triggered, self.onAbout) + m.actionAbout.setText(tr.qt_accel_about_mac()) # Edit qconnect(m.actionUndo.triggered, self.undo) @@ -1417,6 +1425,9 @@ title="{}" {}>{}""".format( qconnect(m.actionCreateFiltered.triggered, self.onCram) qconnect(m.actionEmptyCards.triggered, self.onEmptyCards) qconnect(m.actionNoteTypes.triggered, self.onNoteTypes) + qconnect(m.action_upgrade_downgrade.triggered, self.on_upgrade_downgrade) + if not launcher_executable(): + m.action_upgrade_downgrade.setVisible(False) qconnect(m.actionPreferences.triggered, self.onPrefs) # View @@ -1708,11 +1719,37 @@ title="{}" {}>{}""".format( self.maybeHideAccelerators() self.hideStatusTips() elif is_win: - # make sure ctypes is bundled - from ctypes import windll, wintypes # type: ignore + self._setupWin32() - _dummy1 = windll - _dummy2 = wintypes + def _setupWin32(self): + """Fix taskbar display/pinning""" + if sys.platform != "win32": + return + + launcher_path = os.environ.get("ANKI_LAUNCHER") + if not launcher_path: + return + + from win32com.propsys import propsys, pscon + from win32com.propsys.propsys import PROPVARIANTType + + hwnd = int(self.winId()) + prop_store = propsys.SHGetPropertyStoreForWindow(hwnd) # type: ignore[call-arg] + prop_store.SetValue( + pscon.PKEY_AppUserModel_ID, PROPVARIANTType("Ankitects.Anki") + ) + prop_store.SetValue( + pscon.PKEY_AppUserModel_RelaunchCommand, + PROPVARIANTType(f'"{launcher_path}"'), + ) + prop_store.SetValue( + pscon.PKEY_AppUserModel_RelaunchDisplayNameResource, PROPVARIANTType("Anki") + ) + prop_store.SetValue( + pscon.PKEY_AppUserModel_RelaunchIconResource, + PROPVARIANTType(f"{launcher_path},0"), + ) + prop_store.Commit() def maybeHideAccelerators(self, tgt: Any | None = None) -> None: if not self.hideMenuAccels: diff --git a/qt/aqt/mediasrv.py b/qt/aqt/mediasrv.py index a38790728..f08be4cef 100644 --- a/qt/aqt/mediasrv.py +++ b/qt/aqt/mediasrv.py @@ -230,7 +230,11 @@ def _handle_local_file_request(request: LocalFileRequest) -> Response: else: max_age = 60 * 60 return flask.send_file( - fullpath, mimetype=mimetype, conditional=True, max_age=max_age, download_name="foo" # type: ignore[call-arg] + fullpath, + mimetype=mimetype, + conditional=True, + max_age=max_age, + download_name="foo", # type: ignore[call-arg] ) else: print(f"Not found: {path}") @@ -252,14 +256,8 @@ def _handle_local_file_request(request: LocalFileRequest) -> Response: def _builtin_data(path: str) -> bytes: """Return data from file in aqt/data folder. Path must use forward slash separators.""" - # packaged build? - if getattr(sys, "frozen", False): - reader = aqt.__loader__.get_resource_reader("_aqt") # type: ignore - with reader.open_resource(path) as f: - return f.read() - else: - full_path = aqt_data_path() / ".." / path - return full_path.read_bytes() + full_path = aqt_data_path() / ".." / path + return full_path.read_bytes() def _handle_builtin_file_request(request: BundledFileRequest) -> Response: @@ -653,7 +651,7 @@ exposed_backend_list = [ "compute_fsrs_params", "compute_optimal_retention", "set_wants_abort", - "evaluate_params", + "evaluate_params_legacy", "get_optimal_retention_parameters", "simulate_fsrs_review", # DeckConfigService diff --git a/qt/aqt/mpv.py b/qt/aqt/mpv.py index 74155814c..2586d024a 100644 --- a/qt/aqt/mpv.py +++ b/qt/aqt/mpv.py @@ -24,7 +24,7 @@ # # ------------------------------------------------------------------------------ -# pylint: disable=raise-missing-from + from __future__ import annotations import inspect @@ -66,7 +66,6 @@ class MPVTimeoutError(MPVError): if is_win: - # pylint: disable=import-error import pywintypes import win32file # pytype: disable=import-error import win32job @@ -138,15 +137,15 @@ class MPVBase: extended_info = win32job.QueryInformationJobObject( self._job, win32job.JobObjectExtendedLimitInformation ) - extended_info["BasicLimitInformation"][ - "LimitFlags" - ] = win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE + extended_info["BasicLimitInformation"]["LimitFlags"] = ( + win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE + ) win32job.SetInformationJobObject( self._job, win32job.JobObjectExtendedLimitInformation, extended_info, ) - handle = self._proc._handle # pylint: disable=no-member + handle = self._proc._handle win32job.AssignProcessToJobObject(self._job, handle) def _stop_process(self): @@ -177,7 +176,8 @@ class MPVBase: startup. """ start = time.time() - while self.is_running() and time.time() < start + 10: + timeout = 60 if is_mac else 10 + while self.is_running() and time.time() < start + timeout: time.sleep(0.1) if is_win: # named pipe @@ -192,7 +192,10 @@ class MPVBase: None, ) win32pipe.SetNamedPipeHandleState( - self._sock, 1, None, None # PIPE_NOWAIT + self._sock, + 1, + None, + None, # PIPE_NOWAIT ) except pywintypes.error as err: if err.args[0] == winerror.ERROR_FILE_NOT_FOUND: @@ -393,7 +396,7 @@ class MPVBase: return self._get_response(timeout) except MPVCommandError as e: raise MPVCommandError(f"{message['command']!r}: {e}") - except Exception as e: + except Exception: if _retry: print("mpv timed out, restarting") self._stop_process() @@ -500,7 +503,6 @@ class MPV(MPVBase): # Simulate an init event when the process and all callbacks have been # completely set up. if hasattr(self, "on_init"): - # pylint: disable=no-member self.on_init() # diff --git a/qt/aqt/overview.py b/qt/aqt/overview.py index 184a51cf5..b1fc9a119 100644 --- a/qt/aqt/overview.py +++ b/qt/aqt/overview.py @@ -113,7 +113,7 @@ class Overview: self.mw.moveToState("deckBrowser") elif url == "review": openLink(f"{aqt.appShared}info/{self.sid}?v={self.sidVer}") - elif url == "studymore" or url == "customStudy": + elif url in {"studymore", "customStudy"}: self.onStudyMore() elif url == "unbury": self.on_unbury() @@ -180,7 +180,6 @@ class Overview: ############################################################ def _renderPage(self) -> None: - but = self.mw.button deck = self.mw.col.decks.current() self.sid = deck.get("sharedFrom") if self.sid: @@ -307,9 +306,7 @@ class Overview: if b[0]: b[0] = tr.actions_shortcut_key(val=shortcut(b[0])) buf += """ -""" % tuple( - b - ) +""" % tuple(b) self.bottom.draw( buf=buf, link_handler=link_handler, diff --git a/qt/aqt/package.py b/qt/aqt/package.py index f1ee8cd79..f85a17335 100644 --- a/qt/aqt/package.py +++ b/qt/aqt/package.py @@ -5,93 +5,164 @@ from __future__ import annotations +import contextlib import os +import subprocess import sys from pathlib import Path - -def _fix_pywin32() -> None: - # extend sys.path with .pth files - import site - - site.addsitedir(sys.path[0]) - - # use updated sys.path to locate dll folder and add it to path - path = sys.path[-1] - path = path.replace("Pythonwin", "pywin32_system32") - os.environ["PATH"] += ";" + path - - # import Python modules from .dll files - import importlib.machinery - - for name in "pythoncom", "pywintypes": - filename = os.path.join(path, name + "39.dll") - loader = importlib.machinery.ExtensionFileLoader(name, filename) - spec = importlib.machinery.ModuleSpec(name=name, loader=loader, origin=filename) - _mod = importlib._bootstrap._load(spec) # type: ignore +from anki.utils import is_mac, is_win -def _patch_pkgutil() -> None: - """Teach pkgutil.get_data() how to read files from in-memory resources. +# ruff: noqa: F401 +def first_run_setup() -> None: + """Code run the first time after install/upgrade. - This is required for jsonschema.""" - import importlib - import pkgutil + Currently, we just import our main libraries and invoke + mpv/lame on macOS, which is slow on the first run, and doing + it this way shows progress being made. + """ - def get_data_custom(package: str, resource: str) -> bytes | None: - try: - module = importlib.import_module(package) - reader = module.__loader__.get_resource_reader(package) # type: ignore - with reader.open_resource(resource) as f: - return f.read() - except Exception: - return None - - pkgutil.get_data = get_data_custom - - -def _patch_certifi() -> None: - """Tell certifi (and thus requests) to use a file in our package folder. - - By default it creates a copy of the data in a temporary folder, which then gets - cleaned up by macOS's temp file cleaner.""" - import certifi - - def where() -> str: - prefix = Path(sys.prefix) - if sys.platform == "darwin": - path = prefix / "../Resources/certifi/cacert.pem" - else: - path = prefix / "lib" / "certifi" / "cacert.pem" - return str(path) - - certifi.where = where - - -def _fix_protobuf_path() -> None: - sys.path.append(str(Path(sys.prefix) / "../Resources")) - - -def packaged_build_setup() -> None: - if not getattr(sys, "frozen", False): + if not is_mac: return - print("Initial setup...") + # Import anki_audio first and spawn commands + import anki_audio - if sys.platform == "win32": - _fix_pywin32() - elif sys.platform == "darwin": - _fix_protobuf_path() + audio_pkg_path = Path(anki_audio.__file__).parent - _patch_pkgutil() - _patch_certifi() + # Start mpv and lame commands concurrently + processes = [] + for cmd_name in ["mpv", "lame"]: + cmd_path = audio_pkg_path / cmd_name + proc = subprocess.Popen( + [str(cmd_path), "--version"], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + processes.append(proc) - # escape hatch for debugging issues with packaged build startup - if os.getenv("ANKI_STARTUP_REPL"): - # mypy incorrectly thinks this does not exist on Windows - is_tty = os.isatty(sys.stdin.fileno()) # type: ignore - if is_tty: - import code + # Continue with other imports while commands run + import concurrent.futures - code.InteractiveConsole().interact() - sys.exit(0) + import bs4 + import flask + import flask_cors + import markdown + import PyQt6.QtCore + import PyQt6.QtGui + import PyQt6.QtNetwork + import PyQt6.QtQuick + import PyQt6.QtWebChannel + import PyQt6.QtWebEngineCore + import PyQt6.QtWebEngineWidgets + import PyQt6.QtWidgets + import PyQt6.sip + import requests + import waitress + + import anki.collection + + from . import _macos_helper + + # Wait for both commands to complete + for proc in processes: + proc.wait() + + +def uv_binary() -> str | None: + """Return the path to the uv binary.""" + return os.environ.get("ANKI_LAUNCHER_UV") + + +def launcher_root() -> str | None: + """Return the path to the launcher root directory (AnkiProgramFiles).""" + return os.environ.get("UV_PROJECT") + + +def venv_binary(cmd: str) -> str | None: + """Return the path to a binary in the launcher's venv.""" + root = launcher_root() + if not root: + return None + + root_path = Path(root) + if is_win: + binary_path = root_path / ".venv" / "Scripts" / cmd + else: + binary_path = root_path / ".venv" / "bin" / cmd + + return str(binary_path) + + +def add_python_requirements(reqs: list[str]) -> tuple[bool, str]: + """Add Python requirements to the launcher venv using uv add. + + Returns (success, output)""" + + binary = uv_binary() + if not binary: + return (False, "Not in packaged build.") + + uv_cmd = [binary, "add"] + reqs + result = subprocess.run(uv_cmd, capture_output=True, text=True, check=False) + + if result.returncode == 0: + root = launcher_root() + if root: + sync_marker = Path(root) / ".sync_complete" + sync_marker.touch() + + return (True, result.stdout) + else: + return (False, result.stderr) + + +def launcher_executable() -> str | None: + """Return the path to the Anki launcher executable.""" + return os.getenv("ANKI_LAUNCHER") + + +def trigger_launcher_run() -> None: + """Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run.""" + try: + root = launcher_root() + if not root: + return + + pyproject_path = Path(root) / "pyproject.toml" + + if pyproject_path.exists(): + # Touch the file to update its mtime + pyproject_path.touch() + except Exception as e: + print(e) + + +def update_and_restart() -> None: + """Update and restart Anki using the launcher.""" + from aqt import mw + + launcher = launcher_executable() + assert launcher + + trigger_launcher_run() + + with contextlib.suppress(ResourceWarning): + env = os.environ.copy() + creationflags = 0 + if sys.platform == "win32": + creationflags = ( + subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS + ) + subprocess.Popen( + [launcher], + start_new_session=True, + stdin=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + env=env, + creationflags=creationflags, + ) + + mw.app.quit() diff --git a/qt/aqt/profiles.py b/qt/aqt/profiles.py index 273e6df3a..919be170c 100644 --- a/qt/aqt/profiles.py +++ b/qt/aqt/profiles.py @@ -128,7 +128,7 @@ class ProfileManager: default_answer_keys = {ease_num: str(ease_num) for ease_num in range(1, 5)} last_run_version: int = 0 - def __init__(self, base: Path) -> None: # + def __init__(self, base: Path) -> None: "base should be retrieved via ProfileMangager.get_created_base_folder" ## Settings which should be forgotten each Anki restart self.session: dict[str, Any] = {} @@ -153,7 +153,7 @@ class ProfileManager: else: try: self.load(profile) - except Exception as exc: + except Exception: self.invalid_profile_provided_on_commandline = True # Profile load/save @@ -189,11 +189,8 @@ class ProfileManager: # return the bytes directly return args[0] elif name == "_unpickle_enum": - if qtmajor == 5: - return sip._unpickle_enum(module, klass, args) # type: ignore - else: - # old style enums can't be unpickled - return None + # old style enums can't be unpickled + return None else: return sip._unpickle_type(module, klass, args) # type: ignore @@ -486,7 +483,11 @@ create table if not exists profiles code = obj[1] name = obj[0] r = QMessageBox.question( - None, "Anki", tr.profiles_confirm_lang_choice(lang=name), QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No, QMessageBox.StandardButton.No # type: ignore + None, + "Anki", + tr.profiles_confirm_lang_choice(lang=name), + QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No, + QMessageBox.StandardButton.No, # type: ignore ) if r != QMessageBox.StandardButton.Yes: return self.setDefaultLang(f.lang.currentRow()) diff --git a/qt/aqt/progress.py b/qt/aqt/progress.py index fbb0a7470..cc7e750de 100644 --- a/qt/aqt/progress.py +++ b/qt/aqt/progress.py @@ -119,13 +119,12 @@ class ProgressManager: if not self._levels: # no current progress; safe to fire func() + elif repeat: + # skip this time; we'll fire again + pass else: - if repeat: - # skip this time; we'll fire again - pass - else: - # retry in 100ms - self.single_shot(100, func, requires_collection) + # retry in 100ms + self.single_shot(100, func, requires_collection) return handler @@ -300,8 +299,7 @@ class ProgressManager: def _closeWin(self) -> None: # if the parent window has been deleted, the progress dialog may have # already been dropped; delete it if it hasn't been - if not sip.isdeleted(self._win): - assert self._win is not None + if self._win and not sip.isdeleted(self._win): self._win.cancel() self._win = None self._shown = 0 diff --git a/qt/aqt/qt/__init__.py b/qt/aqt/qt/__init__.py index ea1b4bd46..730bc771b 100644 --- a/qt/aqt/qt/__init__.py +++ b/qt/aqt/qt/__init__.py @@ -2,7 +2,7 @@ # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html # make sure not to optimize imports on this file -# pylint: disable=unused-import +# ruff: noqa: F401 from __future__ import annotations import os @@ -11,27 +11,19 @@ import traceback from collections.abc import Callable from typing import TypeVar, Union -try: - import PyQt6 -except Exception: - from .qt5 import * # type: ignore -else: - if os.getenv("ENABLE_QT5_COMPAT"): - print("Running with temporary Qt5 compatibility shims.") - from . import qt5_compat # needs to be imported first - from .qt6 import * +from anki._legacy import deprecated +# legacy code depends on these re-exports from anki.utils import is_mac, is_win -# fix buggy ubuntu12.04 display of language selector -os.environ["LIBOVERLAY_SCROLLBAR"] = "0" +from .qt6 import * def debug() -> None: from pdb import set_trace pyqtRemoveInputHook() - set_trace() # pylint: disable=forgotten-debug-statement + set_trace() if os.environ.get("DEBUG"): @@ -52,7 +44,7 @@ qtminor = _version.minorVersion() qtpoint = _version.microVersion() qtfullversion = _version.segments() -if qtmajor < 5 or (qtmajor == 5 and qtminor < 14): +if qtmajor == 6 and qtminor < 2: raise Exception("Anki does not support your Qt version.") @@ -64,11 +56,6 @@ def qconnect(signal: Callable | pyqtSignal | pyqtBoundSignal, func: Callable) -> _T = TypeVar("_T") +@deprecated(info="no longer required, and now a no-op") def without_qt5_compat_wrapper(cls: _T) -> _T: - """Remove Qt5 compat wrapper from Qt class, if active. - - Only needed for a few Qt APIs that deal with QVariants.""" - if fn := getattr(cls, "_without_compat_wrapper", None): - return fn() - else: - return cls + return cls diff --git a/qt/aqt/qt/qt5.py b/qt/aqt/qt/qt5.py deleted file mode 100644 index 0a45dffb9..000000000 --- a/qt/aqt/qt/qt5.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright: Ankitects Pty Ltd and contributors -# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -# make sure not to optimize imports on this file -# pylint: skip-file - -""" -PyQt5 imports -""" - -from PyQt5.QtCore import * # type: ignore -from PyQt5.QtGui import * # type: ignore -from PyQt5.QtNetwork import QLocalServer, QLocalSocket, QNetworkProxy # type: ignore -from PyQt5.QtWebChannel import QWebChannel # type: ignore -from PyQt5.QtWebEngineCore import * # type: ignore -from PyQt5.QtWebEngineWidgets import * # type: ignore -from PyQt5.QtWidgets import * # type: ignore - -try: - from PyQt5 import sip # type: ignore -except ImportError: - import sip # type: ignore diff --git a/qt/aqt/qt/qt5_audio.py b/qt/aqt/qt/qt5_audio.py deleted file mode 100644 index cc8426a6e..000000000 --- a/qt/aqt/qt/qt5_audio.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright: Ankitects Pty Ltd and contributors -# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -# pylint: skip-file - -""" -PyQt5-only audio code -""" - -import wave -from collections.abc import Callable -from concurrent.futures import Future -from typing import cast - -import aqt - -from . import * # isort:skip -from ..sound import Recorder # isort:skip -from ..utils import showWarning # isort:skip - - -class QtAudioInputRecorder(Recorder): - def __init__(self, output_path: str, mw: aqt.AnkiQt, parent: QWidget) -> None: - super().__init__(output_path) - - self.mw = mw - self._parent = parent - - from PyQt5.QtMultimedia import ( # type: ignore - QAudioDeviceInfo, - QAudioFormat, - QAudioInput, - ) - - format = QAudioFormat() - format.setChannelCount(1) - format.setSampleRate(44100) - format.setSampleSize(16) - format.setCodec("audio/pcm") - format.setByteOrder(QAudioFormat.LittleEndian) - format.setSampleType(QAudioFormat.SignedInt) - - device = QAudioDeviceInfo.defaultInputDevice() - if not device.isFormatSupported(format): - format = device.nearestFormat(format) - print("format changed") - print("channels", format.channelCount()) - print("rate", format.sampleRate()) - print("size", format.sampleSize()) - self._format = format - - self._audio_input = QAudioInput(device, format, parent) - - def start(self, on_done: Callable[[], None]) -> None: - self._iodevice = self._audio_input.start() - self._buffer = bytearray() - qconnect(self._iodevice.readyRead, self._on_read_ready) - super().start(on_done) - - def _on_read_ready(self) -> None: - self._buffer.extend(cast(bytes, self._iodevice.readAll())) - - def stop(self, on_done: Callable[[str], None]) -> None: - def on_stop_timer() -> None: - # read anything remaining in buffer & stop - self._on_read_ready() - self._audio_input.stop() - - if err := self._audio_input.error(): - showWarning(f"recording failed: {err}") - return - - def write_file() -> None: - # swallow the first 300ms to allow audio device to quiesce - wait = int(44100 * self.STARTUP_DELAY) - if len(self._buffer) <= wait: - return - self._buffer = self._buffer[wait:] - - # write out the wave file - wf = wave.open(self.output_path, "wb") - wf.setnchannels(self._format.channelCount()) - wf.setsampwidth(self._format.sampleSize() // 8) - wf.setframerate(self._format.sampleRate()) - wf.writeframes(self._buffer) - wf.close() - - def and_then(fut: Future) -> None: - fut.result() - Recorder.stop(self, on_done) - - self.mw.taskman.run_in_background(write_file, and_then) - - # schedule the stop for half a second in the future, - # to avoid truncating the end of the recording - self._stop_timer = t = QTimer(self._parent) - t.timeout.connect(on_stop_timer) # type: ignore - t.setSingleShot(True) - t.start(500) diff --git a/qt/aqt/qt/qt5_compat.py b/qt/aqt/qt/qt5_compat.py deleted file mode 100644 index ef281b87c..000000000 --- a/qt/aqt/qt/qt5_compat.py +++ /dev/null @@ -1,411 +0,0 @@ -# Copyright: Ankitects Pty Ltd and contributors -# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -# type: ignore -# pylint: disable=unused-import - -""" -Patches and aliases that provide a PyQt5 → PyQt6 compatibility shim for add-ons -""" - -import sys -import types -import typing - -import PyQt6.QtCore -import PyQt6.QtDBus -import PyQt6.QtGui -import PyQt6.QtNetwork -import PyQt6.QtPrintSupport -import PyQt6.QtWebChannel -import PyQt6.QtWebEngineCore -import PyQt6.QtWebEngineWidgets -import PyQt6.QtWidgets - -from anki._legacy import print_deprecation_warning - -# Globally alias PyQt5 to PyQt6 -# ######################################################################### - -sys.modules["PyQt5"] = PyQt6 -# Need to alias QtCore explicitly as sip otherwise complains about repeat registration -sys.modules["PyQt5.QtCore"] = PyQt6.QtCore -# Need to alias QtWidgets and QtGui explicitly to facilitate patches -sys.modules["PyQt5.QtGui"] = PyQt6.QtGui -sys.modules["PyQt5.QtWidgets"] = PyQt6.QtWidgets -# Needed to maintain import order between QtWebEngineWidgets and QCoreApplication: -sys.modules["PyQt5.QtWebEngineWidgets"] = PyQt6.QtWebEngineWidgets -# Register other aliased top-level Qt modules just to be safe: -sys.modules["PyQt5.QtWebEngineCore"] = PyQt6.QtWebEngineCore -sys.modules["PyQt5.QtWebChannel"] = PyQt6.QtWebChannel -sys.modules["PyQt5.QtNetwork"] = PyQt6.QtNetwork -# Alias sip -sys.modules["sip"] = PyQt6.sip - -# Restore QWebEnginePage.view() -# ######################################################################## - -from PyQt6.QtWebEngineCore import QWebEnginePage -from PyQt6.QtWebEngineWidgets import QWebEngineView - - -def qwebenginepage_view(page: QWebEnginePage) -> QWebEnginePage: - print_deprecation_warning( - "'QWebEnginePage.view()' is deprecated. " - "Please use 'QWebEngineView.forPage(page)'" - ) - return QWebEngineView.forPage(page) - - -PyQt6.QtWebEngineCore.QWebEnginePage.view = qwebenginepage_view - -# Alias removed exec_ methods to exec -# ######################################################################## - -from PyQt6.QtCore import QCoreApplication, QEventLoop, QThread -from PyQt6.QtGui import QDrag, QGuiApplication -from PyQt6.QtWidgets import QApplication, QDialog, QMenu - - -# This helper function is needed as aliasing exec_ to exec directly will cause -# an unbound method error, even when wrapped with types.MethodType -def qt_exec_(object, *args, **kwargs): - class_name = object.__class__.__name__ - print_deprecation_warning( - f"'{class_name}.exec_()' is deprecated. Please use '{class_name}.exec()'" - ) - return object.exec(*args, **kwargs) - - -QCoreApplication.exec_ = qt_exec_ -QEventLoop.exec_ = qt_exec_ -QThread.exec_ = qt_exec_ -QDrag.exec_ = qt_exec_ -QGuiApplication.exec_ = qt_exec_ -QApplication.exec_ = qt_exec_ -QDialog.exec_ = qt_exec_ -QMenu.exec_ = qt_exec_ - -# Graciously handle removed Qt resource system -# ######################################################################## - -# Given that add-ons mostly use the Qt resource system to equip UI elements with -# icons – which oftentimes are not essential to the core UX –, printing a warning -# instead of preventing the add-on from loading seems appropriate. - - -def qt_resource_system_call(*args, **kwargs): - print_deprecation_warning( - "The Qt resource system no longer works on PyQt6. " - "Use QDir.addSearchPath() or mw.addonManager.setWebExports() instead." - ) - - -PyQt6.QtCore.qRegisterResourceData = qt_resource_system_call -PyQt6.QtCore.qUnregisterResourceData = qt_resource_system_call - -# Patch unscoped enums back in, aliasing them to scoped enums -# ######################################################################## - -PyQt6.QtWidgets.QDockWidget.AllDockWidgetFeatures = ( - PyQt6.QtWidgets.QDockWidget.DockWidgetFeature.DockWidgetClosable - | PyQt6.QtWidgets.QDockWidget.DockWidgetFeature.DockWidgetMovable - | PyQt6.QtWidgets.QDockWidget.DockWidgetFeature.DockWidgetFloatable -) - -# when we subclass QIcon, icons fail to show when returned by getData() -# in a tableview/treeview, so we need to manually alias these -PyQt6.QtGui.QIcon.Active = PyQt6.QtGui.QIcon.Mode.Active -PyQt6.QtGui.QIcon.Disabled = PyQt6.QtGui.QIcon.Mode.Disabled -PyQt6.QtGui.QIcon.Normal = PyQt6.QtGui.QIcon.Mode.Normal -PyQt6.QtGui.QIcon.Selected = PyQt6.QtGui.QIcon.Mode.Selected -PyQt6.QtGui.QIcon.Off = PyQt6.QtGui.QIcon.State.Off -PyQt6.QtGui.QIcon.On = PyQt6.QtGui.QIcon.State.On - -# This is the subset of enums used in all public Anki add-ons as of 2021-10-19. -# Please note that this list is likely to be incomplete as the process used to -# find them probably missed dynamically constructed enums. -# Also, as mostly only public Anki add-ons were taken into consideration, -# some enums in other add-ons might not be included. In those cases please -# consider filing a PR to extend the assignments below. - -# Important: These patches are not meant to provide compatibility for all -# add-ons going forward, but simply to maintain support with already -# existing add-ons. Add-on authors should take heed to use scoped enums -# in any future code changes. - -# (module, [(type_name, enums)]) -_enum_map = ( - ( - PyQt6.QtCore, - [ - ("QEvent", ("Type",)), - ("QEventLoop", ("ProcessEventsFlag",)), - ("QIODevice", ("OpenModeFlag",)), - ("QItemSelectionModel", ("SelectionFlag",)), - ("QLocale", ("Country", "Language")), - ("QMetaType", ("Type",)), - ("QProcess", ("ProcessState", "ProcessChannel")), - ("QStandardPaths", ("StandardLocation",)), - ( - "Qt", - ( - "AlignmentFlag", - "ApplicationAttribute", - "ArrowType", - "AspectRatioMode", - "BrushStyle", - "CaseSensitivity", - "CheckState", - "ConnectionType", - "ContextMenuPolicy", - "CursorShape", - "DateFormat", - "DayOfWeek", - "DockWidgetArea", - "FindChildOption", - "FocusPolicy", - "FocusReason", - "GlobalColor", - "HighDpiScaleFactorRoundingPolicy", - "ImageConversionFlag", - "InputMethodHint", - "ItemDataRole", - "ItemFlag", - "KeyboardModifier", - "LayoutDirection", - "MatchFlag", - "Modifier", - "MouseButton", - "Orientation", - "PenCapStyle", - "PenJoinStyle", - "PenStyle", - "ScrollBarPolicy", - "ShortcutContext", - "SortOrder", - "TextElideMode", - "TextFlag", - "TextFormat", - "TextInteractionFlag", - "ToolBarArea", - "ToolButtonStyle", - "TransformationMode", - "WidgetAttribute", - "WindowModality", - "WindowState", - "WindowType", - "Key", - ), - ), - ("QThread", ("Priority",)), - ], - ), - (PyQt6.QtDBus, [("QDBus", ("CallMode",))]), - ( - PyQt6.QtGui, - [ - ("QAction", ("MenuRole", "ActionEvent")), - ("QClipboard", ("Mode",)), - ("QColor", ("NameFormat",)), - ("QFont", ("Style", "Weight", "StyleHint")), - ("QFontDatabase", ("WritingSystem", "SystemFont")), - ("QImage", ("Format",)), - ("QKeySequence", ("SequenceFormat", "StandardKey")), - ("QMovie", ("CacheMode",)), - ("QPageLayout", ("Orientation",)), - ("QPageSize", ("PageSizeId",)), - ("QPainter", ("RenderHint",)), - ("QPalette", ("ColorRole", "ColorGroup")), - ("QTextCharFormat", ("UnderlineStyle",)), - ("QTextCursor", ("MoveOperation", "MoveMode", "SelectionType")), - ("QTextFormat", ("Property",)), - ("QTextOption", ("WrapMode",)), - ("QValidator", ("State",)), - ], - ), - (PyQt6.QtNetwork, [("QHostAddress", ("SpecialAddress",))]), - (PyQt6.QtPrintSupport, [("QPrinter", ("Unit",))]), - ( - PyQt6.QtWebEngineCore, - [ - ("QWebEnginePage", ("WebWindowType", "FindFlag", "WebAction")), - ("QWebEngineProfile", ("PersistentCookiesPolicy", "HttpCacheType")), - ("QWebEngineScript", ("ScriptWorldId", "InjectionPoint")), - ("QWebEngineSettings", ("FontSize", "WebAttribute")), - ], - ), - ( - PyQt6.QtWidgets, - [ - ( - "QAbstractItemView", - ( - "CursorAction", - "DropIndicatorPosition", - "ScrollMode", - "EditTrigger", - "SelectionMode", - "SelectionBehavior", - "DragDropMode", - "ScrollHint", - ), - ), - ("QAbstractScrollArea", ("SizeAdjustPolicy",)), - ("QAbstractSpinBox", ("ButtonSymbols",)), - ("QBoxLayout", ("Direction",)), - ("QColorDialog", ("ColorDialogOption",)), - ("QComboBox", ("SizeAdjustPolicy", "InsertPolicy")), - ("QCompleter", ("CompletionMode",)), - ("QDateTimeEdit", ("Section",)), - ("QDialog", ("DialogCode",)), - ("QDialogButtonBox", ("StandardButton", "ButtonRole")), - ("QDockWidget", ("DockWidgetFeature",)), - ("QFileDialog", ("Option", "FileMode", "AcceptMode", "DialogLabel")), - ("QFormLayout", ("FieldGrowthPolicy", "ItemRole")), - ("QFrame", ("Shape", "Shadow")), - ("QGraphicsItem", ("GraphicsItemFlag",)), - ("QGraphicsPixmapItem", ("ShapeMode",)), - ("QGraphicsView", ("ViewportAnchor", "DragMode")), - ("QHeaderView", ("ResizeMode",)), - ("QLayout", ("SizeConstraint",)), - ("QLineEdit", ("EchoMode",)), - ( - "QListView", - ("Flow", "BrowserLayout", "ResizeMode", "Movement", "ViewMode"), - ), - ("QListWidgetItem", ("ItemType",)), - ("QMessageBox", ("StandardButton", "Icon", "ButtonRole")), - ("QPlainTextEdit", ("LineWrapMode",)), - ("QProgressBar", ("Direction",)), - ("QRubberBand", ("Shape",)), - ("QSizePolicy", ("ControlType", "Policy")), - ("QSlider", ("TickPosition",)), - ( - "QStyle", - ( - "SubElement", - "ComplexControl", - "StandardPixmap", - "ControlElement", - "PixelMetric", - "StateFlag", - "SubControl", - ), - ), - ("QSystemTrayIcon", ("MessageIcon", "ActivationReason")), - ("QTabBar", ("ButtonPosition",)), - ("QTabWidget", ("TabShape", "TabPosition")), - ("QTextEdit", ("LineWrapMode",)), - ("QToolButton", ("ToolButtonPopupMode",)), - ("QWizard", ("WizardStyle", "WizardOption")), - ], - ), -) - -_renamed_enum_cases = { - "QComboBox": { - "AdjustToMinimumContentsLength": "AdjustToMinimumContentsLengthWithIcon" - }, - "QDialogButtonBox": {"No": "NoButton"}, - "QPainter": {"HighQualityAntialiasing": "Antialiasing"}, - "QPalette": {"Background": "Window", "Foreground": "WindowText"}, - "Qt": {"MatchRegExp": "MatchRegularExpression", "MidButton": "MiddleButton"}, -} - - -# This works by wrapping each enum-containing Qt class (eg QAction) in a proxy. -# When an attribute is missing from the underlying Qt class, __getattr__ is -# called, and we try fetching the attribute from each of the declared enums -# for that module. If a match is found, a deprecation warning is printed. -# -# Looping through enumerations is not particularly efficient on a large type like -# Qt, but we only pay the cost when an attribute is not found. In the worst case, -# it's about 50ms per 1000 failed lookups on the Qt module. - - -def _instrument_type( - module: types.ModuleType, type_name: str, enums: list[str] -) -> None: - type = getattr(module, type_name) - renamed_attrs = _renamed_enum_cases.get(type_name, {}) - - class QtClassProxyType(type.__class__): - def __getattr__(cls, provided_name): # pylint: disable=no-self-argument - # we know this is not an enum - if provided_name == "__pyqtSignature__": - raise AttributeError - - name = renamed_attrs.get(provided_name) or provided_name - - for enum_name in enums: - enum = getattr(type, enum_name) - try: - val = getattr(enum, name) - except AttributeError: - continue - - print_deprecation_warning( - f"'{type_name}.{provided_name}' will stop working. Please use '{type_name}.{enum_name}.{name}' instead." - ) - return val - - return getattr(type, name) - - class QtClassProxy( - type, metaclass=QtClassProxyType - ): # pylint: disable=invalid-metaclass - @staticmethod - def _without_compat_wrapper(): - return type - - setattr(module, type_name, QtClassProxy) - - -for module, type_to_enum_list in _enum_map: - for type_name, enums in type_to_enum_list: - _instrument_type(module, type_name, enums) - -# Alias classes shifted between QtWidgets and QtGui -########################################################################## - -PyQt6.QtWidgets.QAction = PyQt6.QtGui.QAction -PyQt6.QtWidgets.QActionGroup = PyQt6.QtGui.QActionGroup -PyQt6.QtWidgets.QShortcut = PyQt6.QtGui.QShortcut - -# Alias classes shifted between QtWebEngineWidgets and QtWebEngineCore -########################################################################## - -PyQt6.QtWebEngineWidgets.QWebEnginePage = PyQt6.QtWebEngineCore.QWebEnginePage -PyQt6.QtWebEngineWidgets.QWebEngineHistory = PyQt6.QtWebEngineCore.QWebEngineHistory -PyQt6.QtWebEngineWidgets.QWebEngineProfile = PyQt6.QtWebEngineCore.QWebEngineProfile -PyQt6.QtWebEngineWidgets.QWebEngineScript = PyQt6.QtWebEngineCore.QWebEngineScript -PyQt6.QtWebEngineWidgets.QWebEngineScriptCollection = ( - PyQt6.QtWebEngineCore.QWebEngineScriptCollection -) -PyQt6.QtWebEngineWidgets.QWebEngineClientCertificateSelection = ( - PyQt6.QtWebEngineCore.QWebEngineClientCertificateSelection -) -PyQt6.QtWebEngineWidgets.QWebEngineSettings = PyQt6.QtWebEngineCore.QWebEngineSettings -PyQt6.QtWebEngineWidgets.QWebEngineFullScreenRequest = ( - PyQt6.QtWebEngineCore.QWebEngineFullScreenRequest -) -PyQt6.QtWebEngineWidgets.QWebEngineContextMenuData = ( - PyQt6.QtWebEngineCore.QWebEngineContextMenuRequest -) -PyQt6.QtWebEngineWidgets.QWebEngineDownloadItem = ( - PyQt6.QtWebEngineCore.QWebEngineDownloadRequest -) - -# Aliases for other miscellaneous class changes -########################################################################## - -PyQt6.QtCore.QRegExp = PyQt6.QtCore.QRegularExpression - - -# Mock the removed PyQt5.Qt module -########################################################################## - -sys.modules["PyQt5.Qt"] = sys.modules["aqt.qt"] -# support 'from PyQt5 import Qt', as it's an alias to PyQt6 -PyQt6.Qt = sys.modules["aqt.qt"] diff --git a/qt/aqt/qt/qt6.py b/qt/aqt/qt/qt6.py index df79d6b1a..dabed757b 100644 --- a/qt/aqt/qt/qt6.py +++ b/qt/aqt/qt/qt6.py @@ -2,8 +2,7 @@ # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html # make sure not to optimize imports on this file -# pylint: disable=unused-import - +# ruff: noqa: F401 """ PyQt6 imports """ @@ -12,7 +11,7 @@ from PyQt6 import sip from PyQt6.QtCore import * # conflicting Qt and qFuzzyCompare definitions require an ignore -from PyQt6.QtGui import * # type: ignore[misc,assignment] +from PyQt6.QtGui import * # type: ignore[no-redef,assignment] from PyQt6.QtNetwork import QLocalServer, QLocalSocket, QNetworkProxy from PyQt6.QtQuick import * from PyQt6.QtWebChannel import QWebChannel diff --git a/qt/aqt/reviewer.py b/qt/aqt/reviewer.py index 05e9becf4..a8839c598 100644 --- a/qt/aqt/reviewer.py +++ b/qt/aqt/reviewer.py @@ -18,13 +18,14 @@ import aqt.operations from anki.cards import Card, CardId from anki.collection import Config, OpChanges, OpChangesWithCount from anki.scheduler.base import ScheduleCardsAsNew -from anki.scheduler.v3 import CardAnswer, QueuedCards -from anki.scheduler.v3 import Scheduler as V3Scheduler from anki.scheduler.v3 import ( + CardAnswer, + QueuedCards, SchedulingContext, SchedulingStates, SetSchedulingStatesRequest, ) +from anki.scheduler.v3 import Scheduler as V3Scheduler from anki.tags import MARKED_TAG from anki.types import assert_exhaustive from anki.utils import is_mac @@ -594,10 +595,9 @@ class Reviewer: def _shortcutKeys( self, ) -> Sequence[tuple[str, Callable] | tuple[Qt.Key, Callable]]: - - def generate_default_answer_keys() -> ( - Generator[tuple[str, partial], None, None] - ): + def generate_default_answer_keys() -> Generator[ + tuple[str, partial], None, None + ]: for ease in aqt.mw.pm.default_answer_keys: key = aqt.mw.pm.get_answer_key(ease) if not key: diff --git a/qt/aqt/sound.py b/qt/aqt/sound.py index 386767a30..d20365232 100644 --- a/qt/aqt/sound.py +++ b/qt/aqt/sound.py @@ -101,7 +101,7 @@ def is_audio_file(fname: str) -> bool: return ext in AUDIO_EXTENSIONS -class SoundOrVideoPlayer(Player): # pylint: disable=abstract-method +class SoundOrVideoPlayer(Player): default_rank = 0 def rank_for_tag(self, tag: AVTag) -> int | None: @@ -111,7 +111,7 @@ class SoundOrVideoPlayer(Player): # pylint: disable=abstract-method return None -class SoundPlayer(Player): # pylint: disable=abstract-method +class SoundPlayer(Player): default_rank = 0 def rank_for_tag(self, tag: AVTag) -> int | None: @@ -121,7 +121,7 @@ class SoundPlayer(Player): # pylint: disable=abstract-method return None -class VideoPlayer(Player): # pylint: disable=abstract-method +class VideoPlayer(Player): default_rank = 0 def rank_for_tag(self, tag: AVTag) -> int | None: @@ -279,12 +279,25 @@ def _packagedCmd(cmd: list[str]) -> tuple[Any, dict[str, str]]: if "LD_LIBRARY_PATH" in env and "SNAP" not in env: del env["LD_LIBRARY_PATH"] - if is_win: - packaged_path = Path(sys.prefix) / (cmd[0] + ".exe") - elif is_mac: - packaged_path = Path(sys.prefix) / ".." / "Resources" / cmd[0] - else: - packaged_path = Path(sys.prefix) / cmd[0] + # Try to find binary in anki-audio package for Windows/Mac + if is_win or is_mac: + try: + import anki_audio + + audio_pkg_path = Path(anki_audio.__file__).parent + if is_win: + packaged_path = audio_pkg_path / (cmd[0] + ".exe") + else: # is_mac + packaged_path = audio_pkg_path / cmd[0] + + if packaged_path.exists(): + cmd[0] = str(packaged_path) + return cmd, env + except ImportError: + # anki-audio not available, fall back to old behavior + pass + + packaged_path = Path(sys.prefix) / cmd[0] if packaged_path.exists(): cmd[0] = str(packaged_path) @@ -311,7 +324,7 @@ def retryWait(proc: subprocess.Popen) -> int: ########################################################################## -class SimpleProcessPlayer(Player): # pylint: disable=abstract-method +class SimpleProcessPlayer(Player): "A player that invokes a new process for each tag to play." args: list[str] = [] @@ -759,19 +772,14 @@ class RecordDialog(QDialog): saveGeom(self, "audioRecorder2") def _start_recording(self) -> None: - if qtmajor > 5: - if macos_helper and platform.machine() == "arm64": - self._recorder = NativeMacRecorder( - namedtmp("rec.wav"), - ) - else: - self._recorder = QtAudioInputRecorder( - namedtmp("rec.wav"), self.mw, self._parent - ) + if macos_helper and platform.machine() == "arm64": + self._recorder = NativeMacRecorder( + namedtmp("rec.wav"), + ) else: - from aqt.qt.qt5_audio import QtAudioInputRecorder as Qt5Recorder - - self._recorder = Qt5Recorder(namedtmp("rec.wav"), self.mw, self._parent) + self._recorder = QtAudioInputRecorder( + namedtmp("rec.wav"), self.mw, self._parent + ) self._recorder.start(self._start_timer) def _start_timer(self) -> None: diff --git a/qt/aqt/stylesheets.py b/qt/aqt/stylesheets.py index 35e47ef0d..a262e18b9 100644 --- a/qt/aqt/stylesheets.py +++ b/qt/aqt/stylesheets.py @@ -120,7 +120,7 @@ class CustomStyles: QLabel:disabled {{ color: {tm.var(colors.FG_DISABLED)}; }} - QToolTip {{ color: {tm.var(colors.FG)}; background-color: {tm.var(colors.CANVAS)}; }} + QToolTip {{ color: {tm.var(colors.FG)}; background-color: {tm.var(colors.CANVAS)}; }} """ def menu(self, tm: ThemeManager) -> str: @@ -208,7 +208,7 @@ class CustomStyles: button_pressed_gradient( tm.var(colors.BUTTON_GRADIENT_START), tm.var(colors.BUTTON_GRADIENT_END), - tm.var(colors.SHADOW) + tm.var(colors.SHADOW), ) }; }} @@ -340,7 +340,7 @@ class CustomStyles: }} QTabBar::tab:selected:hover {{ background: { - button_gradient( + button_gradient( tm.var(colors.BUTTON_PRIMARY_GRADIENT_START), tm.var(colors.BUTTON_PRIMARY_GRADIENT_END), ) @@ -391,7 +391,7 @@ class CustomStyles: button_pressed_gradient( tm.var(colors.BUTTON_GRADIENT_START), tm.var(colors.BUTTON_GRADIENT_END), - tm.var(colors.SHADOW) + tm.var(colors.SHADOW), ) } }} @@ -404,18 +404,18 @@ class CustomStyles: }; }} QHeaderView::section:first {{ - border-left: 1px solid {tm.var(colors.BORDER_SUBTLE)}; + border-left: 1px solid {tm.var(colors.BORDER_SUBTLE)}; border-top-left-radius: {tm.var(props.BORDER_RADIUS)}; }} QHeaderView::section:!first {{ border-left: none; }} QHeaderView::section:last {{ - border-right: 1px solid {tm.var(colors.BORDER_SUBTLE)}; + border-right: 1px solid {tm.var(colors.BORDER_SUBTLE)}; border-top-right-radius: {tm.var(props.BORDER_RADIUS)}; }} QHeaderView::section:only-one {{ - border-left: 1px solid {tm.var(colors.BORDER_SUBTLE)}; + border-left: 1px solid {tm.var(colors.BORDER_SUBTLE)}; border-right: 1px solid {tm.var(colors.BORDER_SUBTLE)}; border-top-left-radius: {tm.var(props.BORDER_RADIUS)}; border-top-right-radius: {tm.var(props.BORDER_RADIUS)}; @@ -579,19 +579,19 @@ class CustomStyles: }} QScrollBar::handle:pressed {{ background-color: {tm.var(colors.SCROLLBAR_BG_ACTIVE)}; - }} + }} QScrollBar:horizontal {{ height: 12px; }} QScrollBar::handle:horizontal {{ min-width: 60px; - }} + }} QScrollBar:vertical {{ width: 12px; }} QScrollBar::handle:vertical {{ min-height: 60px; - }} + }} QScrollBar::add-line {{ border: none; background: none; @@ -647,10 +647,12 @@ class CustomStyles: margin: -7px 0; }} QSlider::handle:hover {{ - background: {button_gradient( - tm.var(colors.BUTTON_GRADIENT_START), - tm.var(colors.BUTTON_GRADIENT_END), - )} + background: { + button_gradient( + tm.var(colors.BUTTON_GRADIENT_START), + tm.var(colors.BUTTON_GRADIENT_END), + ) + } }} """ diff --git a/qt/aqt/sync.py b/qt/aqt/sync.py index 5a4d5fd4c..bedc05f8e 100644 --- a/qt/aqt/sync.py +++ b/qt/aqt/sync.py @@ -44,7 +44,7 @@ def get_sync_status( ) -> None: auth = mw.pm.sync_auth() if not auth: - callback(SyncStatus(required=SyncStatus.NO_CHANGES)) # pylint:disable=no-member + callback(SyncStatus(required=SyncStatus.NO_CHANGES)) return def on_future_done(fut: Future[SyncStatus]) -> None: @@ -302,7 +302,6 @@ def sync_login( username: str = "", password: str = "", ) -> None: - def on_future_done(fut: Future[SyncAuth], username: str, password: str) -> None: try: auth = fut.result() @@ -374,7 +373,9 @@ def get_id_and_pass_from_user( g.addWidget(passwd, 1, 1) l2.setBuddy(passwd) vbox.addLayout(g) - bb = QDialogButtonBox(QDialogButtonBox.StandardButton.Ok | QDialogButtonBox.StandardButton.Cancel) # type: ignore + bb = QDialogButtonBox( + QDialogButtonBox.StandardButton.Ok | QDialogButtonBox.StandardButton.Cancel + ) # type: ignore ok_button = bb.button(QDialogButtonBox.StandardButton.Ok) assert ok_button is not None ok_button.setAutoDefault(True) diff --git a/qt/aqt/taskman.py b/qt/aqt/taskman.py index 143c1022a..2ca1c6670 100644 --- a/qt/aqt/taskman.py +++ b/qt/aqt/taskman.py @@ -84,15 +84,8 @@ class TaskManager(QObject): fut = executor.submit(task, **args) if on_done is not None: - - def wrapped_done(future: Future) -> None: - if uses_collection and not (self.mw.col and self.mw.col.db): - print(f"Ignored on_done as collection unloaded: {repr(on_done)}") - return - on_done(future) - fut.add_done_callback( - lambda future: self.run_on_main(lambda: wrapped_done(future)) + lambda future: self.run_on_main(lambda: on_done(future)) ) return fut diff --git a/qt/aqt/theme.py b/qt/aqt/theme.py index e06cf71c2..675eb9345 100644 --- a/qt/aqt/theme.py +++ b/qt/aqt/theme.py @@ -187,7 +187,7 @@ class ThemeManager: self, card_ord: int, night_mode: bool | None = None ) -> str: "Returns body classes used when showing a card." - return f"card card{card_ord+1} {self.body_class(night_mode, reviewer=True)}" + return f"card card{card_ord + 1} {self.body_class(night_mode, reviewer=True)}" def var(self, vars: dict[str, str]) -> str: """Given day/night colors/props, return the correct one for the current theme.""" @@ -213,13 +213,12 @@ class ThemeManager: return False elif theme == Theme.DARK: return True + elif is_win: + return get_windows_dark_mode() + elif is_mac: + return get_macos_dark_mode() else: - if is_win: - return get_windows_dark_mode() - elif is_mac: - return get_macos_dark_mode() - else: - return get_linux_dark_mode() + return get_linux_dark_mode() def apply_style(self) -> None: "Apply currently configured style." @@ -340,7 +339,7 @@ def get_windows_dark_mode() -> bool: if not is_win: return False - from winreg import ( # type: ignore[attr-defined] # pylint: disable=import-error + from winreg import ( # type: ignore[attr-defined] HKEY_CURRENT_USER, OpenKey, QueryValueEx, @@ -352,7 +351,7 @@ def get_windows_dark_mode() -> bool: r"Software\Microsoft\Windows\CurrentVersion\Themes\Personalize", ) return not QueryValueEx(key, "AppsUseLightTheme")[0] - except Exception as err: + except Exception: # key reportedly missing or set to wrong type on some systems return False @@ -416,12 +415,12 @@ def get_linux_dark_mode() -> bool: capture_output=True, encoding="utf8", ) - except FileNotFoundError as e: + except FileNotFoundError: # detection strategy failed, missing program # print(e) continue - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError: # detection strategy failed, command returned error # print(e) continue diff --git a/qt/aqt/toolbar.py b/qt/aqt/toolbar.py index 44f2ee66c..be547b5ba 100644 --- a/qt/aqt/toolbar.py +++ b/qt/aqt/toolbar.py @@ -87,6 +87,7 @@ class TopWebView(ToolbarWebView): else: self.flatten() + self.adjustHeightToFit() self.show() def _onHeight(self, qvar: int | None) -> None: diff --git a/qt/aqt/tts.py b/qt/aqt/tts.py index 079a5e3de..d559fb41f 100644 --- a/qt/aqt/tts.py +++ b/qt/aqt/tts.py @@ -166,7 +166,6 @@ class MacVoice(TTSVoice): original_name: str -# pylint: disable=no-member class MacTTSPlayer(TTSProcessPlayer): "Invokes a process to play the audio in the background." @@ -487,7 +486,7 @@ if is_win: class WindowsTTSPlayer(TTSProcessPlayer): default_rank = -1 try: - import win32com.client # pylint: disable=import-error + import win32com.client speaker = win32com.client.Dispatch("SAPI.SpVoice") except Exception as exc: diff --git a/qt/aqt/update.py b/qt/aqt/update.py index fd0e4eafd..e5794eead 100644 --- a/qt/aqt/update.py +++ b/qt/aqt/update.py @@ -1,11 +1,19 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +from __future__ import annotations + import aqt from anki.buildinfo import buildhash from anki.collection import CheckForUpdateResponse, Collection from anki.utils import dev_mode, int_time, int_version, plat_desc from aqt.operations import QueryOp +from aqt.package import ( + launcher_executable as _launcher_executable, +) +from aqt.package import ( + update_and_restart as _update_and_restart, +) from aqt.qt import * from aqt.utils import openLink, show_warning, showText, tr @@ -77,4 +85,7 @@ def prompt_to_update(mw: aqt.AnkiQt, ver: str) -> None: # ignore this update mw.pm.meta["suppressUpdate"] = ver elif ret == QMessageBox.StandardButton.Yes: - openLink(aqt.appWebsiteDownloadSection) + if _launcher_executable(): + _update_and_restart() + else: + openLink(aqt.appWebsiteDownloadSection) diff --git a/qt/aqt/utils.py b/qt/aqt/utils.py index 6ae8bace8..64d057082 100644 --- a/qt/aqt/utils.py +++ b/qt/aqt/utils.py @@ -19,7 +19,7 @@ from send2trash import send2trash import aqt from anki._legacy import DeprecatedNamesMixinForModule from anki.collection import Collection, HelpPage -from anki.lang import TR, tr_legacyglobal # pylint: disable=unused-import +from anki.lang import TR, tr_legacyglobal # noqa: F401 from anki.utils import ( call, invalid_filename, @@ -31,7 +31,7 @@ from anki.utils import ( from aqt.qt import * from aqt.qt import ( PYQT_VERSION_STR, - QT_VERSION_STR, + QT_VERSION_STR, # noqa: F401 QAction, QApplication, QCheckBox, @@ -87,24 +87,15 @@ if TYPE_CHECKING: def aqt_data_path() -> Path: - # packaged? - if getattr(sys, "frozen", False): - prefix = Path(sys.prefix) - path = prefix / "lib/_aqt/data" - if path.exists(): - return path - else: - return prefix / "../Resources/_aqt/data" - else: - import _aqt.colors + import _aqt.colors - data_folder = Path(inspect.getfile(_aqt.colors)).with_name("data") - if data_folder.exists(): - return data_folder.absolute() - else: - # should only happen when running unit tests - print("warning, data folder not found") - return Path(".") + data_folder = Path(inspect.getfile(_aqt.colors)).with_name("data") + if data_folder.exists(): + return data_folder.absolute() + else: + # should only happen when running unit tests + print("warning, data folder not found") + return Path(".") def aqt_data_folder() -> str: @@ -303,7 +294,7 @@ def showInfo( icon = QMessageBox.Icon.Critical else: icon = QMessageBox.Icon.Information - mb = QMessageBox(parent_widget) # + mb = QMessageBox(parent_widget) if textFormat == "plain": mb.setTextFormat(Qt.TextFormat.PlainText) elif textFormat == "rich": @@ -945,14 +936,39 @@ def show_in_folder(path: str) -> None: """ call(osascript_to_args(script)) else: - # Just open the file in any other platform - with no_bundled_libs(): - QDesktopServices.openUrl(QUrl.fromLocalFile(path)) + # For linux, there are multiple file managers. Let's test if one of the + # most common file managers is found and use it in case it is installed. + # If none of this list are installed, use a fallback. The fallback + # might open the image in a web browser, image viewer or others, + # depending on the users defaults. + file_managers = [ + "nautilus", # GNOME + "dolphin", # KDE + "pcmanfm", # LXDE + "thunar", # XFCE + "nemo", # Cinnamon + "caja", # MATE + ] + + available_file_manager = None + + # Test if a file manager is installed and use it, fallback otherwise + for file_manager in file_managers: + if shutil.which(file_manager): + available_file_manager = file_manager + break + + if available_file_manager: + subprocess.run([available_file_manager, path], check=False) + else: + # Just open the file in any other platform + with no_bundled_libs(): + QDesktopServices.openUrl(QUrl.fromLocalFile(path)) def _show_in_folder_win32(path: str) -> None: - import win32con # pylint: disable=import-error - import win32gui # pylint: disable=import-error + import win32con + import win32gui from aqt import mw @@ -1207,12 +1223,11 @@ def supportText() -> str: platname = platform.platform() return """\ -Anki {} {} {} +Anki {} {} Python {} Qt {} PyQt {} Platform: {} """.format( version_with_build(), - "(src)" if not getattr(sys, "frozen", False) else "", "(ao)" if mw.addonManager.dirty else "", platform.python_version(), qVersion(), @@ -1248,12 +1263,12 @@ def opengl_vendor() -> str | None: # Can't use versionFunctions there return None - vp = QOpenGLVersionProfile() # type: ignore # pylint: disable=undefined-variable + vp = QOpenGLVersionProfile() # type: ignore vp.setVersion(2, 0) try: vf = ctx.versionFunctions(vp) # type: ignore - except ImportError as e: + except ImportError: return None if vf is None: diff --git a/qt/aqt/webview.py b/qt/aqt/webview.py index 966d3de5a..95d84c00e 100644 --- a/qt/aqt/webview.py +++ b/qt/aqt/webview.py @@ -980,7 +980,6 @@ def _create_ankiwebview_subclass( /, **fixed_kwargs: Unpack[_AnkiWebViewKwargs], ) -> Type[AnkiWebView]: - def __init__(self, *args: Any, **kwargs: _AnkiWebViewKwargs) -> None: # user‑supplied kwargs override fixed kwargs merged = cast(_AnkiWebViewKwargs, {**fixed_kwargs, **kwargs}) diff --git a/qt/aqt/winpaths.py b/qt/aqt/winpaths.py index e53a47c06..8b2698739 100644 --- a/qt/aqt/winpaths.py +++ b/qt/aqt/winpaths.py @@ -100,7 +100,7 @@ _SHGetFolderPath.restype = _err_unless_zero def _get_path_buf(csidl): path_buf = ctypes.create_unicode_buffer(wintypes.MAX_PATH) - result = _SHGetFolderPath(0, csidl, 0, 0, path_buf) + _SHGetFolderPath(0, csidl, 0, 0, path_buf) return path_buf.value diff --git a/qt/bundle/Cargo.lock b/qt/bundle/Cargo.lock deleted file mode 100644 index 544276d6f..000000000 --- a/qt/bundle/Cargo.lock +++ /dev/null @@ -1,629 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "aho-corasick" -version = "0.7.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" -dependencies = [ - "memchr", -] - -[[package]] -name = "anki" -version = "0.0.0" -dependencies = [ - "embed-resource", - "jemallocator", - "libc", - "libc-stdhandle", - "mimalloc", - "pyembed", - "snmalloc-rs", - "winapi", -] - -[[package]] -name = "anyhow" -version = "1.0.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61604a8f862e1d5c3229fdd78f8b02c68dcf73a4c4b05fd636d12240aaa242c1" - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "base64" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" -dependencies = [ - "byteorder", -] - -[[package]] -name = "base64" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "byteorder" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" - -[[package]] -name = "cc" -version = "1.0.71" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79c2681d6594606957bbb8631c4b90a7fcaaa72cdb714743a437b156d6a7eedd" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "charset" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f426e64df1c3de26cbf44593c6ffff5dbfd43bbf9de0d075058558126b3fc73" -dependencies = [ - "base64 0.10.1", - "encoding_rs", -] - -[[package]] -name = "cmake" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7b858541263efe664aead4a5209a4ae5c5d2811167d4ed4ee0944503f8d2089" -dependencies = [ - "cc", -] - -[[package]] -name = "cty" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b365fabc795046672053e29c954733ec3b05e4be654ab130fe8f1f94d7051f35" - -[[package]] -name = "dunce" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "453440c271cf5577fd2a40e4942540cb7d0d2f85e27c8d07dd0023c925a67541" - -[[package]] -name = "either" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" - -[[package]] -name = "embed-resource" -version = "1.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85505eb239fc952b300f29f0556d2d884082a83566768d980278d8faf38c780d" -dependencies = [ - "cc", - "vswhom", - "winreg", -] - -[[package]] -name = "encoding_rs" -version = "0.8.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a74ea89a0a1b98f6332de42c95baff457ada66d1cb4030f9ff151b2041a1c746" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "fs_extra" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394" - -[[package]] -name = "indoc" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3" - -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "itertools" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf" -dependencies = [ - "either", -] - -[[package]] -name = "jemalloc-sys" -version = "0.5.2+5.3.0-patched" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134163979b6eed9564c98637b710b40979939ba351f59952708234ea11b5f3f8" -dependencies = [ - "cc", - "fs_extra", - "libc", -] - -[[package]] -name = "jemallocator" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16c2514137880c52b0b4822b563fadd38257c1f380858addb74a400889696ea6" -dependencies = [ - "jemalloc-sys", - "libc", -] - -[[package]] -name = "libc" -version = "0.2.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "869d572136620d55835903746bcb5cdc54cb2851fd0aeec53220b4bb65ef3013" - -[[package]] -name = "libc-stdhandle" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dac2473dc28934c5e0b82250dab231c9d3b94160d91fe9ff483323b05797551" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "libmimalloc-sys" -version = "0.1.39" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44" -dependencies = [ - "cc", - "cty", - "libc", -] - -[[package]] -name = "lock_api" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712a4d093c9976e24e7dbca41db895dabcbac38eb5f4045393d17a95bdfb1109" -dependencies = [ - "scopeguard", -] - -[[package]] -name = "mailparse" -version = "0.13.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee6e1ca1c8396da58f8128176f6980dd57bec84c8670a479519d3655f2d6734" -dependencies = [ - "base64 0.13.0", - "charset", - "quoted_printable", -] - -[[package]] -name = "memchr" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" - -[[package]] -name = "memmap2" -version = "0.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc" -dependencies = [ - "libc", -] - -[[package]] -name = "memoffset" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" -dependencies = [ - "autocfg", -] - -[[package]] -name = "memory-module-sys" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bbdce2925c681860b08875119254fb5543dbf6337c56ff93afebeed9c686da3" -dependencies = [ - "cc", - "libc", - "winapi", -] - -[[package]] -name = "mimalloc" -version = "0.1.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633" -dependencies = [ - "libmimalloc-sys", -] - -[[package]] -name = "once_cell" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" - -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" -dependencies = [ - "cfg-if", - "instant", - "libc", - "redox_syscall", - "smallvec", - "winapi", -] - -[[package]] -name = "proc-macro2" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edc3358ebc67bc8b7fa0c007f945b0b18226f78437d61bec735a9eb96b61ee70" -dependencies = [ - "unicode-xid", -] - -[[package]] -name = "pyembed" -version = "0.24.0-pre" -dependencies = [ - "anyhow", - "dunce", - "jemalloc-sys", - "libc", - "libmimalloc-sys", - "once_cell", - "pyo3", - "pyo3-build-config", - "python-oxidized-importer", - "python-packaging", - "snmalloc-sys", -] - -[[package]] -name = "pyo3" -version = "0.17.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "268be0c73583c183f2b14052337465768c07726936a260f480f0857cb95ba543" -dependencies = [ - "cfg-if", - "indoc", - "libc", - "memoffset", - "parking_lot", - "pyo3-build-config", - "pyo3-ffi", - "pyo3-macros", - "unindent", -] - -[[package]] -name = "pyo3-build-config" -version = "0.17.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28fcd1e73f06ec85bf3280c48c67e731d8290ad3d730f8be9dc07946923005c8" -dependencies = [ - "once_cell", - "target-lexicon", -] - -[[package]] -name = "pyo3-ffi" -version = "0.17.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f6cb136e222e49115b3c51c32792886defbfb0adead26a688142b346a0b9ffc" -dependencies = [ - "libc", - "pyo3-build-config", -] - -[[package]] -name = "pyo3-macros" -version = "0.17.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94144a1266e236b1c932682136dc35a9dee8d3589728f68130c7c3861ef96b28" -dependencies = [ - "proc-macro2", - "pyo3-macros-backend", - "quote", - "syn", -] - -[[package]] -name = "pyo3-macros-backend" -version = "0.17.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8df9be978a2d2f0cdebabb03206ed73b11314701a5bfe71b0d753b81997777f" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "python-oxidized-importer" -version = "0.9.0-pre" -dependencies = [ - "anyhow", - "memmap2", - "memory-module-sys", - "once_cell", - "pyo3", - "python-packaging", - "python-packed-resources", - "simple-file-manifest", - "winapi", -] - -[[package]] -name = "python-packaging" -version = "0.16.0-pre" -dependencies = [ - "anyhow", - "byteorder", - "encoding_rs", - "itertools", - "mailparse", - "once_cell", - "python-packed-resources", - "regex", - "simple-file-manifest", - "spdx", - "walkdir", -] - -[[package]] -name = "python-packed-resources" -version = "0.12.0-pre" -dependencies = [ - "anyhow", - "byteorder", -] - -[[package]] -name = "quote" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "quoted_printable" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1238256b09923649ec89b08104c4dfe9f6cb2fea734a5db5384e44916d59e9c5" - -[[package]] -name = "redox_syscall" -version = "0.2.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" -dependencies = [ - "bitflags", -] - -[[package]] -name = "regex" -version = "1.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-syntax" -version = "0.6.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64" - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "scopeguard" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" - -[[package]] -name = "simple-file-manifest" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd19be0257552dd56d1bb6946f89f193c6e5b9f13cc9327c4bc84a357507c74" - -[[package]] -name = "smallvec" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309" - -[[package]] -name = "snmalloc-rs" -version = "0.2.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36acaace2719c972eab3ef6a6b3aee4495f0bf300f59715bb9cff6c5acf4ae20" -dependencies = [ - "snmalloc-sys", -] - -[[package]] -name = "snmalloc-sys" -version = "0.2.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35a7e6e7d5fe756bee058ddedefc7e0a9f9c8dbaa9401b48ed3c17d6578e40b5" -dependencies = [ - "cc", - "cmake", -] - -[[package]] -name = "spdx" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a346909b3fd07776f9b96b98d4a58e3666f831c9a672c279b10f795a34c36425" -dependencies = [ - "smallvec", -] - -[[package]] -name = "syn" -version = "1.0.80" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - -[[package]] -name = "target-lexicon" -version = "0.12.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9410d0f6853b1d94f0e519fb95df60f29d2c1eff2d921ffdf01a4c8a3b54f12d" - -[[package]] -name = "unicode-xid" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" - -[[package]] -name = "unindent" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f14ee04d9415b52b3aeab06258a3f07093182b88ba0f9b8d203f211a7a7d41c7" - -[[package]] -name = "vswhom" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be979b7f07507105799e854203b470ff7c78a1639e330a58f183b5fea574608b" -dependencies = [ - "libc", - "vswhom-sys", -] - -[[package]] -name = "vswhom-sys" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc2f5402d3d0e79a069714f7b48e3ecc60be7775a2c049cb839457457a239532" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "walkdir" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" -dependencies = [ - "same-file", - "winapi", - "winapi-util", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" -dependencies = [ - "winapi", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "winreg" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" -dependencies = [ - "winapi", -] diff --git a/qt/bundle/Cargo.toml b/qt/bundle/Cargo.toml deleted file mode 100644 index 920e890d0..000000000 --- a/qt/bundle/Cargo.toml +++ /dev/null @@ -1,60 +0,0 @@ -[package] -name = "anki" -version = "0.0.0" -authors = ["Ankitects Pty Ltd and contributors "] -build = "build.rs" -edition = "2021" -license = "AGPL-3.0-or-later" -publish = false -rust-version = "1.64" - -[dependencies] -pyembed = { path = "./PyOxidizer/pyembed", default-features = false } - -[target.'cfg(windows)'.dependencies] -winapi = { version = "0.3", features = ["wincon"] } -libc = "0.2" -libc-stdhandle = "=0.1.0" - -[dependencies.jemallocator] -version = "0.5" -optional = true - -[dependencies.mimalloc] -version = "0.1" -optional = true -features = ["local_dynamic_tls", "override", "secure"] - -[dependencies.snmalloc-rs] -version = "0.2" -optional = true - -[build-dependencies] -embed-resource = "1.6" - -[features] -default = ["build-mode-standalone"] - -global-allocator-jemalloc = ["jemallocator"] -global-allocator-mimalloc = ["mimalloc"] -global-allocator-snmalloc = ["snmalloc-rs"] - -allocator-jemalloc = ["pyembed/allocator-jemalloc"] -allocator-mimalloc = ["pyembed/allocator-mimalloc"] -allocator-snmalloc = ["pyembed/allocator-snmalloc"] - -# Build this crate in isolation, without using PyOxidizer. -build-mode-standalone = [] - -# Build this crate by executing a `pyoxidizer` executable to build -# required artifacts. -build-mode-pyoxidizer-exe = [] - -# Build this crate by reusing artifacts generated by `pyoxidizer` out-of-band. -# In this mode, the PYOXIDIZER_ARTIFACT_DIR environment variable can refer -# to the directory containing build artifacts produced by `pyoxidizer`. If not -# set, OUT_DIR will be used. -build-mode-prebuilt-artifacts = [] - -[profile.release] -lto = true diff --git a/qt/bundle/PyOxidizer b/qt/bundle/PyOxidizer deleted file mode 160000 index 12a249f68..000000000 --- a/qt/bundle/PyOxidizer +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 12a249f686484c5e212ba800e1e7f18c7c4b1b27 diff --git a/qt/bundle/build.rs b/qt/bundle/build.rs deleted file mode 100644 index 4b0318dbf..000000000 --- a/qt/bundle/build.rs +++ /dev/null @@ -1,95 +0,0 @@ -// Based off PyOxidizer's 'init-rust-project'. -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v. 2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at https://mozilla.org/MPL/2.0/. - -use std::path::{Path, PathBuf}; - -use embed_resource; - -const DEFAULT_PYTHON_CONFIG_FILENAME: &str = "default_python_config.rs"; -const DEFAULT_PYTHON_CONFIG: &str = "\ -pub fn default_python_config<'a>() -> pyembed::OxidizedPythonInterpreterConfig<'a> { - pyembed::OxidizedPythonInterpreterConfig::default() -} -"; - -/// Build by calling a `pyoxidizer` executable to generate build artifacts. -fn build_with_pyoxidizer_exe(exe: Option, resolve_target: Option<&str>) { - let pyoxidizer_exe = if let Some(path) = exe { - path - } else { - "pyoxidizer".to_string() - }; - - let mut args = vec!["run-build-script", "build.rs"]; - if let Some(target) = resolve_target { - args.push("--target"); - args.push(target); - } - - match std::process::Command::new(pyoxidizer_exe) - .args(args) - .status() - { - Ok(status) => { - if !status.success() { - panic!("`pyoxidizer run-build-script` failed"); - } - } - Err(e) => panic!("`pyoxidizer run-build-script` failed: {}", e.to_string()), - } -} - -#[allow(clippy::if_same_then_else)] -fn main() { - if std::env::var("CARGO_FEATURE_BUILD_MODE_STANDALONE").is_ok() { - let path = PathBuf::from(std::env::var("OUT_DIR").expect("OUT_DIR not defined")); - let path = path.join(DEFAULT_PYTHON_CONFIG_FILENAME); - - std::fs::write(&path, DEFAULT_PYTHON_CONFIG.as_bytes()) - .expect("failed to write default python config"); - println!( - "cargo:rustc-env=DEFAULT_PYTHON_CONFIG_RS={}", - path.display() - ); - } else if std::env::var("CARGO_FEATURE_BUILD_MODE_PYOXIDIZER_EXE").is_ok() { - let target = if let Ok(target) = std::env::var("PYOXIDIZER_BUILD_TARGET") { - Some(target) - } else { - None - }; - - build_with_pyoxidizer_exe( - std::env::var("PYOXIDIZER_EXE").ok(), - target.as_ref().map(|target| target.as_ref()), - ); - } else if std::env::var("CARGO_FEATURE_BUILD_MODE_PREBUILT_ARTIFACTS").is_ok() { - // relative to src/ - let artifacts = Path::new("../../../out/bundle/artifacts/"); - let config_rs = artifacts.join("default_python_config.rs"); - println!( - "cargo:rustc-env=DEFAULT_PYTHON_CONFIG_RS={}", - config_rs.display() - ); - let config_txt = artifacts.join("pyo3-build-config-file.txt"); - println!("cargo:rustc-env=PYO3_CONFIG_FILE={}", config_txt.display()); - - let link_arg = if cfg!(target_os = "macos") { - "-rdynamic" - } else { - "-Wl,-export-dynamic" - }; - println!("cargo:rustc-link-arg={link_arg}"); - } else { - panic!("build-mode-* feature not set"); - } - - let target_family = - std::env::var("CARGO_CFG_TARGET_FAMILY").expect("CARGO_CFG_TARGET_FAMILY not defined"); - - // embed manifest and icon - if target_family == "windows" { - embed_resource::compile("win/anki-manifest.rc"); - } -} diff --git a/qt/bundle/mac/Cargo.toml b/qt/bundle/mac/Cargo.toml deleted file mode 100644 index a154b76f7..000000000 --- a/qt/bundle/mac/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "makeapp" -version.workspace = true -authors.workspace = true -edition.workspace = true -license.workspace = true -publish = false -rust-version.workspace = true - -[dependencies] -anyhow.workspace = true -apple-bundles.workspace = true -camino.workspace = true -clap.workspace = true -glob.workspace = true -plist.workspace = true -serde.workspace = true -serde_json.workspace = true -simple-file-manifest.workspace = true -walkdir.workspace = true diff --git a/qt/bundle/mac/src/codesign.rs b/qt/bundle/mac/src/codesign.rs deleted file mode 100644 index fb251521f..000000000 --- a/qt/bundle/mac/src/codesign.rs +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use std::env; -use std::process::Command; - -use anyhow::bail; -use anyhow::Result; -use camino::Utf8Path; -use camino::Utf8PathBuf; - -const CODESIGN_ARGS: &[&str] = &["-vvvv", "-o", "runtime", "-s", "Developer ID Application:"]; - -pub fn codesign_python_libs(bundle_dir: &Utf8PathBuf) -> Result<()> { - for entry in glob::glob(bundle_dir.join("Contents/MacOS/lib/**/*.so").as_str())? { - let entry = entry?; - let entry = Utf8PathBuf::from_path_buf(entry).unwrap(); - codesign_file(&entry, &[])?; - } - codesign_file(&bundle_dir.join("Contents/MacOS/libankihelper.dylib"), &[]) -} - -pub fn codesign_app(bundle_dir: &Utf8PathBuf) -> Result<()> { - codesign_file( - bundle_dir, - &["--entitlements", "qt/bundle/mac/entitlements.python.xml"], - ) -} - -fn codesign_file(path: &Utf8Path, extra_args: &[&str]) -> Result<()> { - if env::var("ANKI_CODESIGN").is_ok() { - let status = Command::new("codesign") - .args(CODESIGN_ARGS) - .args(extra_args) - .arg(path.as_str()) - .status()?; - if !status.success() { - bail!("codesign failed"); - } - } - - Ok(()) -} diff --git a/qt/bundle/mac/src/dmg.rs b/qt/bundle/mac/src/dmg.rs deleted file mode 100644 index 862150340..000000000 --- a/qt/bundle/mac/src/dmg.rs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use std::fs; -use std::process::Command; - -use anyhow::Context; -use anyhow::Result; -use camino::Utf8Path; -use camino::Utf8PathBuf; -use clap::Args; - -use crate::notarize::wait_then_staple_app; - -#[derive(Args)] -pub struct BuildDmgsArgs { - qt6_dmg: Utf8PathBuf, - qt5_dmg: Option, -} - -pub fn make_dmgs(args: BuildDmgsArgs) -> Result<()> { - let root = Utf8Path::new("out/bundle/app"); - let mut variants = vec![("std", args.qt6_dmg)]; - if let Some(alt) = args.qt5_dmg { - variants.push(("alt", alt)); - } - - for (variant, dmg) in variants { - let app = root.join(variant).join("Anki.app"); - if std::env::var("ANKI_CODESIGN").is_ok() { - let uuid = fs::read_to_string(app.with_extension("uuid")).context("read uuid")?; - wait_then_staple_app(&app, uuid)?; - } - - make_dmg(&app, &dmg)?; - } - - Ok(()) -} - -fn make_dmg(app_folder: &Utf8Path, dmg: &Utf8Path) -> Result<()> { - assert!( - Command::new("qt/bundle/mac/dmg/build.sh") - .args([app_folder.parent().unwrap().as_str(), dmg.as_str()]) - .status() - .context("dmg")? - .success(), - "dmg" - ); - Ok(()) -} diff --git a/qt/bundle/mac/src/main.rs b/qt/bundle/mac/src/main.rs deleted file mode 100644 index 2182d808e..000000000 --- a/qt/bundle/mac/src/main.rs +++ /dev/null @@ -1,239 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -#![cfg(unix)] - -//! Munge the output of PyOxidizer into a macOS app bundle, and combine it -//! with our other runtime dependencies. - -mod codesign; -mod dmg; -mod notarize; - -use std::env; -use std::fs; -use std::os::unix::prelude::PermissionsExt; -use std::process::Command; - -use anyhow::bail; -use anyhow::Result; -use apple_bundles::MacOsApplicationBundleBuilder; -use camino::Utf8Path; -use camino::Utf8PathBuf; -use clap::Parser; -use clap::Subcommand; -use clap::ValueEnum; -use codesign::codesign_app; -use codesign::codesign_python_libs; -use dmg::make_dmgs; -use dmg::BuildDmgsArgs; -use notarize::notarize_app; -use plist::Value; -use simple_file_manifest::FileEntry; -use walkdir::WalkDir; - -#[derive(Clone, ValueEnum)] -enum DistKind { - Standard, - Alternate, -} - -impl DistKind { - fn folder_name(&self) -> &'static str { - match self { - DistKind::Standard => "std", - DistKind::Alternate => "alt", - } - } - - fn input_folder(&self) -> Utf8PathBuf { - Utf8Path::new("out/bundle").join(self.folder_name()) - } - - fn output_folder(&self) -> Utf8PathBuf { - Utf8Path::new("out/bundle/app") - .join(self.folder_name()) - .join("Anki.app") - } - - fn macos_min(&self) -> &str { - match self { - DistKind::Standard => { - if env::var("MAC_X86").is_ok() { - "11" - } else { - "12" - } - } - DistKind::Alternate => "10.13.4", - } - } - - fn qt_repo(&self) -> &Utf8Path { - Utf8Path::new(match self { - DistKind::Standard => { - if cfg!(target_arch = "aarch64") && env::var("MAC_X86").is_err() { - "out/extracted/mac_arm_qt6" - } else { - "out/extracted/mac_amd_qt6" - } - } - DistKind::Alternate => "out/extracted/mac_amd_qt5", - }) - } -} - -#[derive(Parser)] -struct Cli { - #[command(subcommand)] - command: Commands, -} - -#[derive(Subcommand)] -enum Commands { - BuildApp { - version: String, - kind: DistKind, - stamp: Utf8PathBuf, - }, - BuildDmgs(BuildDmgsArgs), -} - -fn main() -> Result<()> { - match Cli::parse().command { - Commands::BuildApp { - version, - kind, - stamp, - } => { - let plist = get_plist(&version); - make_app(kind, plist, &stamp) - } - Commands::BuildDmgs(args) => make_dmgs(args), - } -} - -fn make_app(kind: DistKind, mut plist: plist::Dictionary, stamp: &Utf8Path) -> Result<()> { - let input_folder = kind.input_folder(); - let output_folder = kind.output_folder(); - let output_variant = output_folder.parent().unwrap(); - if output_variant.exists() { - fs::remove_dir_all(output_variant)?; - } - fs::create_dir_all(&output_folder)?; - - let mut builder = MacOsApplicationBundleBuilder::new("Anki")?; - plist.insert( - "LSMinimumSystemVersion".into(), - Value::from(kind.macos_min()), - ); - builder.set_info_plist_from_dictionary(plist)?; - builder.add_file_resources("Assets.car", &include_bytes!("../icon/Assets.car")[..])?; - - for entry in WalkDir::new(&input_folder) - .into_iter() - .map(Result::unwrap) - .filter(|e| !e.file_type().is_dir()) - { - let path = entry.path(); - let entry = FileEntry::try_from(path)?; - let relative_path = path.strip_prefix(&input_folder)?; - let path_str = relative_path.to_str().unwrap(); - if path_str.contains("libankihelper") { - builder.add_file_macos("libankihelper.dylib", entry)?; - } else if path_str.contains("aqt/data") - || path_str.contains("certifi") - || path_str.contains("google/protobuf") - { - builder.add_file_resources(relative_path.strip_prefix("lib").unwrap(), entry)?; - } else { - if path_str.contains("__pycache__") { - continue; - } - builder.add_file_macos(relative_path, entry)?; - } - } - - builder.files().materialize_files(&output_folder)?; - fix_rpath(output_folder.join("Contents/MacOS/anki"))?; - codesign_python_libs(&output_folder)?; - copy_in_audio(&output_folder)?; - copy_in_qt(&output_folder, kind)?; - codesign_app(&output_folder)?; - fixup_perms(&output_folder)?; - notarize_app(&output_folder)?; - fs::write(stamp, b"")?; - - Ok(()) -} - -/// The bundle builder writes some files without world read/execute perms, -/// which prevents them from being opened by a non-admin user. -fn fixup_perms(dir: &Utf8Path) -> Result<()> { - let status = Command::new("find") - .arg(dir) - .args(["-not", "-perm", "-a=r", "-exec", "chmod", "a+r", "{}", ";"]) - .status()?; - if !status.success() { - bail!("error setting perms"); - } - fs::set_permissions( - dir.join("Contents/MacOS/anki"), - PermissionsExt::from_mode(0o755), - )?; - Ok(()) -} - -/// Copy everything at the provided path into the Contents/ folder of our app. -fn extend_app_contents(source: &Utf8Path, target_dir: &Utf8Path) -> Result<()> { - let status = Command::new("rsync") - .arg("-a") - .arg(format!("{}/", source.as_str())) - .arg(target_dir) - .status()?; - if !status.success() { - bail!("error syncing {source:?}"); - } - Ok(()) -} - -fn copy_in_audio(bundle_dir: &Utf8Path) -> Result<()> { - println!("Copying in audio..."); - - let src_folder = Utf8Path::new( - if cfg!(target_arch = "aarch64") && env::var("MAC_X86").is_err() { - "out/extracted/mac_arm_audio" - } else { - "out/extracted/mac_amd_audio" - }, - ); - extend_app_contents(src_folder, &bundle_dir.join("Contents/Resources")) -} - -fn copy_in_qt(bundle_dir: &Utf8Path, kind: DistKind) -> Result<()> { - println!("Copying in Qt..."); - extend_app_contents(kind.qt_repo(), &bundle_dir.join("Contents")) -} - -fn fix_rpath(exe_path: Utf8PathBuf) -> Result<()> { - let status = Command::new("install_name_tool") - .arg("-add_rpath") - .arg("@executable_path/../Frameworks") - .arg(exe_path.as_str()) - .status()?; - assert!(status.success()); - Ok(()) -} - -fn get_plist(anki_version: &str) -> plist::Dictionary { - let reader = std::io::Cursor::new(include_bytes!("Info.plist")); - let mut plist = Value::from_reader(reader) - .unwrap() - .into_dictionary() - .unwrap(); - plist.insert( - "CFBundleShortVersionString".into(), - Value::from(anki_version), - ); - plist -} diff --git a/qt/bundle/mac/src/notarize.rs b/qt/bundle/mac/src/notarize.rs deleted file mode 100644 index 0688354f9..000000000 --- a/qt/bundle/mac/src/notarize.rs +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use std::env; -use std::fs; -use std::process::Command; - -use anyhow::bail; -use anyhow::Context; -use anyhow::Result; -use camino::Utf8Path; -use serde::Deserialize; - -#[derive(Deserialize)] -struct NotarySubmitOutput { - id: String, -} - -pub fn notarize_app(output_folder: &Utf8Path) -> Result<()> { - if env::var("ANKI_CODESIGN").is_err() { - return Ok(()); - } - if env::var("ANKI_NO_NOTARIZE").is_ok() { - return Ok(()); - } - let zip_file = output_folder.with_extension("zip"); - assert!( - Command::new("ditto") - .args([ - "-c", - "-k", - "--keepParent", - output_folder.as_str(), - zip_file.as_str(), - ]) - .status() - .unwrap() - .success(), - "zip build" - ); - let output = Command::new("xcrun") - .args([ - "notarytool", - "submit", - zip_file.as_str(), - "-f", - "json", - "-p", - "default", - ]) - .output() - .expect("notarytool"); - if !output.status.success() { - panic!( - "notarytool submit failed: {} {}", - String::from_utf8_lossy(&output.stderr), - String::from_utf8_lossy(&output.stdout) - ) - } - let output: NotarySubmitOutput = match serde_json::from_slice(&output.stdout) { - Ok(out) => out, - Err(err) => panic!( - "unable to parse notary output: {err} {} {}", - String::from_utf8_lossy(&output.stdout), - String::from_utf8_lossy(&output.stderr) - ), - }; - let uuid_path = output_folder.with_extension("uuid"); - fs::write(uuid_path, output.id).expect("write uuid"); - Ok(()) -} - -#[derive(Deserialize)] -struct NotaryWaitOutput { - status: String, -} - -pub fn wait_then_staple_app(app: &Utf8Path, uuid: String) -> Result<()> { - let output = Command::new("xcrun") - .args(["notarytool", "wait", &uuid, "-p", "default", "-f", "json"]) - .output() - .context("notary wait")?; - let output: NotaryWaitOutput = serde_json::from_slice(&output.stdout) - .with_context(|| String::from_utf8_lossy(&output.stderr).to_string())?; - if output.status != "Accepted" { - bail!("unexpected status: {}", output.status); - } - - assert!( - Command::new("xcrun") - .args(["stapler", "staple", app.as_str()]) - .status() - .context("staple")? - .success(), - "staple" - ); - - // clean up temporary files - fs::remove_file(app.with_extension("zip")).context("app.zip")?; - fs::remove_file(app.with_extension("uuid")).context("app.uuid")?; - - Ok(()) -} diff --git a/qt/bundle/pyoxidizer.bzl b/qt/bundle/pyoxidizer.bzl deleted file mode 100644 index dc4870606..000000000 --- a/qt/bundle/pyoxidizer.bzl +++ /dev/null @@ -1,189 +0,0 @@ -# type: ignore - -set_build_path(VARS.get("build")) - -excluded_source_prefixes = [ - "ctypes.test", - "distutils.tests", - "idlelib", - "lib2to3.tests", - "test", - "tkinter", - "win32comext", - "win32com", - "win32", - "pythonwin", - "PyQt6", - "pip", - "setuptools", - "google" -] - -excluded_resource_suffixes = [ - ".pyi", - ".pyc", - "py.typed", -] - -included_resource_packages = [ - "anki", - "aqt", - "_aqt", - "lib2to3", - "certifi", - "jsonschema", -] - - -def handle_resource(policy, resource): - if type(resource) == "PythonModuleSource": - resource.add_include = True - for prefix in excluded_source_prefixes: - if resource.name.startswith(prefix) and not resource.name.startswith("pip_system_certs"): - resource.add_include = False - - # if resource.add_include: - # print("src", resource.name, resource.add_include) - - elif type(resource) == "PythonExtensionModule": - resource.add_include = True - if resource.name.startswith("win32") or resource.name.startswith("PyQt6"): - resource.add_include = False - - # print("ext", resource.name, resource.add_include) - - elif type(resource) == "PythonPackageResource": - for prefix in included_resource_packages: - if resource.package.startswith(prefix): - resource.add_include = True - if resource.package == "certifi": - resource.add_location = "filesystem-relative:lib" - for suffix in excluded_resource_suffixes: - if resource.name.endswith(suffix): - resource.add_include = False - - # aqt web resources can be stored in binary - if resource.package.endswith("aqt"): - if not resource.name.startswith("data/web"): - resource.add_location = "filesystem-relative:lib" - - # if resource.add_include: - # print("rsrc", resource.package, resource.name, resource.add_include) - - elif type(resource) == "PythonPackageDistributionResource": - # print("dist", resource.package, resource.name, resource.add_include) - pass - - # elif type(resource) == "File": - # print(resource.path) - - elif type(resource) == "File": - if ( - resource.path.startswith("win32") - or resource.path.startswith("pythonwin") - or resource.path.startswith("pywin32") - ): - exclude = ( - "tests" in resource.path - or "benchmark" in resource.path - or "__pycache__" in resource.path - ) - if not exclude: - # print("add", resource.path) - resource.add_include = True - resource.add_location = "filesystem-relative:lib" - - if ".dist-info" in resource.path: - resource.add_include = False - - else: - print("unexpected type", type(resource)) - - -def make_exe(): - if BUILD_TARGET_TRIPLE == "x86_64-unknown-linux-gnu": - dist = PythonDistribution( - url = "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64_v2-unknown-linux-gnu-pgo-full.tar.zst", - sha256 = "7ccdc1b19599a6660040ec2f0ade755b32bb45c897ea75d0b7826236146b78cf", - ) - elif BUILD_TARGET_TRIPLE == "x86_64-apple-darwin": - dist = PythonDistribution( - url = "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64-apple-darwin-pgo-full.tar.zst", - sha256 = "b2f06f0f0ebbbed0eae87a6e8eede2e0d838735386a8b84257d4f02d16b9baec", - ) - elif BUILD_TARGET_TRIPLE == "aarch64-apple-darwin": - dist = PythonDistribution( - url = "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-aarch64-apple-darwin-pgo-full.tar.zst", - sha256 = "154dfa7cd6f9a6047a58811f84bef69b019ea459e5b42991c8af63e1285b445f", - ) - elif BUILD_TARGET_TRIPLE == "x86_64-pc-windows-msvc": - dist = PythonDistribution( - url = "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64-pc-windows-msvc-shared-pgo-full.tar.zst", - sha256 = "3b9c7d6ed94260b83ed8f44ee9a7b8fce392259ce6591e538601f7353061a884", - ) - else: - fail("unexpected arch") - - policy = dist.make_python_packaging_policy() - - policy.file_scanner_classify_files = True - policy.include_classified_resources = False - - policy.allow_files = True - policy.file_scanner_emit_files = True - policy.include_file_resources = False - - policy.include_distribution_sources = False - policy.include_distribution_resources = False - policy.include_non_distribution_sources = False - policy.include_test = False - - policy.resources_location = "in-memory" - policy.resources_location_fallback = "filesystem-relative:lib" - - policy.register_resource_callback(handle_resource) - - policy.bytecode_optimize_level_zero = False - policy.bytecode_optimize_level_two = True - - python_config = dist.make_python_interpreter_config() - - # detected libs do not need this, but we add extra afterwards - python_config.module_search_paths = ["$ORIGIN/lib"] - python_config.optimization_level = 2 - - python_config.run_command = "import aqt; aqt.run()" - - exe = dist.to_python_executable( - name="anki", - packaging_policy=policy, - config=python_config, - ) - - exe.windows_runtime_dlls_mode = "always" - - # set in main.rs - exe.windows_subsystem = "console" - - resources = exe.read_virtualenv(VARS.get("venv")) - exe.add_python_resources(resources) - - return exe - - -def make_embedded_resources(exe): - return exe.to_embedded_resources() - - -def make_install(exe): - files = FileManifest() - files.add_python_resource(".", exe) - return files - - -register_target("exe", make_exe) -register_target( - "resources", make_embedded_resources, depends=["exe"], default_build_script=True -) -register_target("install", make_install, depends=["exe"], default=True) -resolve_targets() diff --git a/qt/bundle/qt.exclude b/qt/bundle/qt.exclude deleted file mode 100644 index e5a6f252b..000000000 --- a/qt/bundle/qt.exclude +++ /dev/null @@ -1,10 +0,0 @@ -qml -bindings -uic -lupdate -qsci -*.pyc -*.pyi -*.sip -py.typed -__pycache__ diff --git a/qt/bundle/src/anki.rs b/qt/bundle/src/anki.rs deleted file mode 100644 index 3cf960028..000000000 --- a/qt/bundle/src/anki.rs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -pub(super) fn init() { - #[cfg(target_os = "windows")] - attach_console(); - - println!("Anki starting..."); -} - -/// If parent process has a console (eg cmd.exe), redirect our output there. -#[cfg(target_os = "windows")] -fn attach_console() { - use std::ffi::CString; - - use libc_stdhandle::*; - use winapi::um::wincon; - - let console_attached = unsafe { wincon::AttachConsole(wincon::ATTACH_PARENT_PROCESS) }; - if console_attached == 0 { - return; - } - - let conin = CString::new("CONIN$").unwrap(); - let conout = CString::new("CONOUT$").unwrap(); - let r = CString::new("r").unwrap(); - let w = CString::new("w").unwrap(); - - // Python uses the CRT for I/O, and it requires the descriptors are reopened. - unsafe { - libc::freopen(conin.as_ptr(), r.as_ptr(), stdin()); - libc::freopen(conout.as_ptr(), w.as_ptr(), stdout()); - libc::freopen(conout.as_ptr(), w.as_ptr(), stderr()); - } -} diff --git a/qt/bundle/src/main.rs b/qt/bundle/src/main.rs deleted file mode 100644 index d424d4f26..000000000 --- a/qt/bundle/src/main.rs +++ /dev/null @@ -1,32 +0,0 @@ -// Based off PyOxidizer's 'init-rust-project'. -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v. 2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at https://mozilla.org/MPL/2.0/. - -#![windows_subsystem = "windows"] - -mod anki; - -use pyembed::{MainPythonInterpreter, OxidizedPythonInterpreterConfig}; - -#[cfg(feature = "global-allocator-jemalloc")] -#[global_allocator] -static GLOBAL: jemallocator::Jemalloc = jemallocator::Jemalloc; - -include!(env!("DEFAULT_PYTHON_CONFIG_RS")); - -fn main() { - anki::init(); - - let exit_code = { - let config: OxidizedPythonInterpreterConfig = default_python_config(); - match MainPythonInterpreter::new(config) { - Ok(interp) => interp.run(), - Err(msg) => { - eprintln!("error instantiating embedded Python interpreter: {}", msg); - 1 - } - } - }; - std::process::exit(exit_code); -} diff --git a/qt/bundle/win/Cargo.toml b/qt/bundle/win/Cargo.toml deleted file mode 100644 index 9c091b55f..000000000 --- a/qt/bundle/win/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "makeexe" -version.workspace = true -authors.workspace = true -edition.workspace = true -license.workspace = true -publish = false -rust-version.workspace = true - -[dependencies] -anyhow.workspace = true -camino.workspace = true -clap.workspace = true -tugger-windows-codesign.workspace = true -walkdir.workspace = true diff --git a/qt/bundle/win/anki-console.bat b/qt/bundle/win/anki-console.bat deleted file mode 100644 index a565fa7b6..000000000 --- a/qt/bundle/win/anki-console.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -"%~dp0"\anki %* -pause - - diff --git a/qt/bundle/win/anki.exe.manifest b/qt/bundle/win/anki.exe.manifest deleted file mode 100644 index 6abbdfea9..000000000 --- a/qt/bundle/win/anki.exe.manifest +++ /dev/null @@ -1,9 +0,0 @@ - - - - - true - UTF-8 - - - diff --git a/qt/bundle/win/src/main.rs b/qt/bundle/win/src/main.rs deleted file mode 100644 index 2091f6a51..000000000 --- a/qt/bundle/win/src/main.rs +++ /dev/null @@ -1,153 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -use std::fs; -use std::io::prelude::*; -use std::path::Path; -use std::process::Command; - -use anyhow::bail; -use anyhow::Context; -use anyhow::Result; -use camino::Utf8Path; -use camino::Utf8PathBuf; -use clap::Parser; -use tugger_windows_codesign::CodeSigningCertificate; -use tugger_windows_codesign::SigntoolSign; -use tugger_windows_codesign::SystemStore; -use tugger_windows_codesign::TimestampServer; -use walkdir::WalkDir; - -#[derive(Parser)] -struct Args { - version: String, - bundle_root: Utf8PathBuf, - qt6_setup_path: Utf8PathBuf, -} - -fn main() -> Result<()> { - let args = Args::parse(); - - let src_win_folder = Utf8Path::new("qt/bundle/win"); - let std_dist_folder = args.bundle_root.join("std"); - // folder->installer - let dists = [(&std_dist_folder, &args.qt6_setup_path)]; - - for (folder, _) in dists { - fs::copy( - src_win_folder.join("anki-console.bat"), - folder.join("anki-console.bat"), - ) - .context("anki-console")?; - } - - println!("--- Build uninstaller"); - build_installer( - &args.bundle_root, - &std_dist_folder, - &args.qt6_setup_path, - &args.version, - true, - ) - .context("uninstaller")?; - - // sign the anki.exe and uninstaller.exe in std - println!("--- Sign binaries"); - codesign([ - &std_dist_folder.join("anki.exe"), - &std_dist_folder.join("uninstall.exe"), - ])?; - - println!("--- Build manifest"); - for (folder, _) in dists { - build_manifest(folder).context("manifest")?; - } - - for (folder, installer) in dists { - println!("--- Build {}", installer); - build_installer(&args.bundle_root, folder, installer, &args.version, false)?; - } - - println!("--- Sign installers"); - codesign(dists.iter().map(|tup| tup.1))?; - - Ok(()) -} - -fn build_installer( - bundle_root: &Utf8Path, - dist_folder: &Utf8Path, - installer: &Utf8Path, - version: &str, - uninstaller: bool, -) -> Result<()> { - let rendered_nsi = include_str!("../anki.template.nsi") - .replace("@@SRC@@", dist_folder.as_str()) - .replace("@@INSTALLER@@", installer.as_str()) - .replace("@@VERSION@@", version); - let rendered_nsi_path = bundle_root.join("anki.nsi"); - fs::write(&rendered_nsi_path, rendered_nsi).context("anki.nsi")?; - fs::write( - bundle_root.join("fileassoc.nsh"), - include_str!("../fileassoc.nsh"), - )?; - fs::copy( - "out/extracted/nsis_plugins/nsProcess.dll", - bundle_root.join("nsProcess.dll"), - )?; - let mut cmd = Command::new("c:/program files (x86)/nsis/makensis.exe"); - cmd.arg("-V3"); - if uninstaller { - cmd.arg("-DWRITE_UNINSTALLER"); - }; - if option_env!("RELEASE").is_none() { - cmd.arg("-DNO_COMPRESS"); - } - cmd.arg(rendered_nsi_path); - let status = cmd.status()?; - if !status.success() { - bail!("makensis failed"); - } - Ok(()) -} - -fn codesign(paths: impl IntoIterator>) -> Result<()> { - if option_env!("ANKI_CODESIGN").is_none() { - return Ok(()); - } - let cert = CodeSigningCertificate::Sha1Thumbprint( - SystemStore::My, - "dccfc6d312fc0432197bb7be951478e5866eebf8".into(), - ); - let mut sign = SigntoolSign::new(cert); - sign.file_digest_algorithm("sha256") - .timestamp_server(TimestampServer::Rfc3161( - "http://time.certum.pl".into(), - "sha256".into(), - )) - .verbose(); - paths.into_iter().for_each(|path| { - sign.sign_file(path); - }); - sign.run() -} - -fn build_manifest(base_path: &Utf8Path) -> Result<()> { - let mut buf = vec![]; - for entry in WalkDir::new(base_path) - .min_depth(1) - .sort_by_file_name() - .into_iter() - { - let entry = entry?; - let path = entry.path(); - let relative_path = path.strip_prefix(base_path)?; - write!( - &mut buf, - "{}\r\n", - relative_path.to_str().context("relative_path utf8")? - )?; - } - fs::write(base_path.join("anki.install-manifest"), buf)?; - Ok(()) -} diff --git a/qt/hatch_build.py b/qt/hatch_build.py new file mode 100644 index 000000000..aaf345842 --- /dev/null +++ b/qt/hatch_build.py @@ -0,0 +1,82 @@ +# Copyright: Ankitects Pty Ltd and contributors +# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +import os +import sys +from pathlib import Path +from typing import Any, Dict + +from hatchling.builders.hooks.plugin.interface import BuildHookInterface + + +class CustomBuildHook(BuildHookInterface): + """Build hook to copy generated files into both sdist and wheel.""" + + PLUGIN_NAME = "custom" + + def initialize(self, version: str, build_data: Dict[str, Any]) -> None: + """Initialize the build hook.""" + force_include = build_data.setdefault("force_include", {}) + + # Pin anki== + self._set_anki_dependency(version, build_data) + + # Look for generated files in out/qt/_aqt + project_root = Path(self.root).parent + generated_root = project_root / "out" / "qt" / "_aqt" + + if not os.environ.get("ANKI_WHEEL_TAG"): + # On Windows, uv invokes this build hook during the initial uv sync, + # when the tag has not been declared by our build script. + return + + assert generated_root.exists(), "you should build with --wheel" + self._add_aqt_files(force_include, generated_root) + + def _set_anki_dependency(self, version: str, build_data: Dict[str, Any]) -> None: + # Get current dependencies and replace 'anki' with exact version + dependencies = build_data.setdefault("dependencies", []) + + # Remove any existing anki dependency + dependencies[:] = [dep for dep in dependencies if not dep.startswith("anki")] + + # Handle version detection + actual_version = version + if version == "standard": + # Read actual version from .version file + project_root = Path(self.root).parent + version_file = project_root / ".version" + if version_file.exists(): + actual_version = version_file.read_text().strip() + + # Only add exact version for real releases, not editable installs + if actual_version != "editable": + dependencies.append(f"anki=={actual_version}") + else: + # For editable installs, just add anki without version constraint + dependencies.append("anki") + + def _add_aqt_files(self, force_include: Dict[str, str], aqt_root: Path) -> None: + """Add _aqt files to the build.""" + for path in aqt_root.rglob("*"): + if path.is_file() and not self._should_exclude(path): + relative_path = path.relative_to(aqt_root) + # Place files under _aqt/ in the distribution + dist_path = "_aqt" / relative_path + force_include[str(path)] = str(dist_path) + + def _should_exclude(self, path: Path) -> bool: + """Check if a file should be excluded from the wheel.""" + path_str = str(path) + + # Exclude __pycache__ + if "/__pycache__/" in path_str: + return True + + if path.suffix in [".ui", ".scss", ".map", ".ts"]: + return True + if path.name.startswith("tsconfig"): + return True + if "/aqt/data" in path_str: + return True + return False diff --git a/qt/launcher/Cargo.toml b/qt/launcher/Cargo.toml new file mode 100644 index 000000000..32fb15991 --- /dev/null +++ b/qt/launcher/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "launcher" +version = "1.0.0" +authors.workspace = true +edition.workspace = true +license.workspace = true +publish = false +rust-version.workspace = true + +[dependencies] +anki_io.workspace = true +anki_process.workspace = true +anyhow.workspace = true +camino.workspace = true +dirs.workspace = true + +[target.'cfg(all(unix, not(target_os = "macos")))'.dependencies] +libc.workspace = true + +[target.'cfg(windows)'.dependencies] +windows.workspace = true +widestring.workspace = true +libc.workspace = true +libc-stdhandle.workspace = true + +[[bin]] +name = "build_win" +path = "src/bin/build_win.rs" + +[[bin]] +name = "anki-console" +path = "src/bin/anki_console.rs" + +[target.'cfg(windows)'.build-dependencies] +embed-resource.workspace = true diff --git a/qt/launcher/addon/__init__.py b/qt/launcher/addon/__init__.py new file mode 100644 index 000000000..63a2cc5a9 --- /dev/null +++ b/qt/launcher/addon/__init__.py @@ -0,0 +1,192 @@ +# Copyright: Ankitects Pty Ltd and contributors +# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +from __future__ import annotations + +import contextlib +import os +import subprocess +import sys +from pathlib import Path +from typing import Any + +from anki.utils import pointVersion +from aqt import mw +from aqt.qt import QAction +from aqt.utils import askUser, is_mac, is_win, showInfo + + +def launcher_executable() -> str | None: + """Return the path to the Anki launcher executable.""" + return os.getenv("ANKI_LAUNCHER") + + +def uv_binary() -> str | None: + """Return the path to the uv binary.""" + return os.environ.get("ANKI_LAUNCHER_UV") + + +def launcher_root() -> str | None: + """Return the path to the launcher root directory (AnkiProgramFiles).""" + return os.environ.get("UV_PROJECT") + + +def venv_binary(cmd: str) -> str | None: + """Return the path to a binary in the launcher's venv.""" + root = launcher_root() + if not root: + return None + + root_path = Path(root) + if is_win: + binary_path = root_path / ".venv" / "Scripts" / cmd + else: + binary_path = root_path / ".venv" / "bin" / cmd + + return str(binary_path) + + +def add_python_requirements(reqs: list[str]) -> tuple[bool, str]: + """Add Python requirements to the launcher venv using uv add. + + Returns (success, output)""" + + binary = uv_binary() + if not binary: + return (False, "Not in packaged build.") + + uv_cmd = [binary, "add"] + reqs + result = subprocess.run(uv_cmd, capture_output=True, text=True, check=False) + + if result.returncode == 0: + root = launcher_root() + if root: + sync_marker = Path(root) / ".sync_complete" + sync_marker.touch() + return (True, result.stdout) + else: + return (False, result.stderr) + + +def trigger_launcher_run() -> None: + """Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run.""" + try: + root = launcher_root() + if not root: + return + + pyproject_path = Path(root) / "pyproject.toml" + + if pyproject_path.exists(): + # Touch the file to update its mtime + pyproject_path.touch() + except Exception as e: + print(e) + + +def update_and_restart() -> None: + """Update and restart Anki using the launcher.""" + launcher = launcher_executable() + assert launcher + + trigger_launcher_run() + + with contextlib.suppress(ResourceWarning): + env = os.environ.copy() + creationflags = 0 + if sys.platform == "win32": + creationflags = ( + subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS + ) + subprocess.Popen( + [launcher], + start_new_session=True, + stdin=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + env=env, + creationflags=creationflags, + ) + + mw.app.quit() + + +def confirm_then_upgrade(): + if not askUser("Change to a different Anki version?"): + return + update_and_restart() + + +# return modified command array that points to bundled command, and return +# required environment +def _packagedCmd(cmd: list[str]) -> tuple[Any, dict[str, str]]: + cmd = cmd[:] + env = os.environ.copy() + # keep LD_LIBRARY_PATH when in snap environment + if "LD_LIBRARY_PATH" in env and "SNAP" not in env: + del env["LD_LIBRARY_PATH"] + + # Try to find binary in anki-audio package for Windows/Mac + if is_win or is_mac: + try: + import anki_audio + + audio_pkg_path = Path(anki_audio.__file__).parent + if is_win: + packaged_path = audio_pkg_path / (cmd[0] + ".exe") + else: # is_mac + packaged_path = audio_pkg_path / cmd[0] + + if packaged_path.exists(): + cmd[0] = str(packaged_path) + return cmd, env + except ImportError: + # anki-audio not available, fall back to old behavior + pass + + packaged_path = Path(sys.prefix) / cmd[0] + if packaged_path.exists(): + cmd[0] = str(packaged_path) + + return cmd, env + + +def on_addon_config(): + showInfo( + "This add-on is automatically added when installing older Anki versions, so that they work with the launcher. You can remove it if you wish." + ) + + +def setup(): + mw.addonManager.setConfigAction(__name__, on_addon_config) + + if pointVersion() >= 250600: + return + if not launcher_executable(): + return + + # Add action to tools menu + action = QAction("Upgrade/Downgrade", mw) + action.triggered.connect(confirm_then_upgrade) + mw.form.menuTools.addAction(action) + + # Monkey-patch audio tools to use anki-audio + if is_win or is_mac: + import aqt + import aqt.sound + + aqt.sound._packagedCmd = _packagedCmd + + # Inject launcher functions into launcher module + import aqt.package + + aqt.package.launcher_executable = launcher_executable + aqt.package.update_and_restart = update_and_restart + aqt.package.trigger_launcher_run = trigger_launcher_run + aqt.package.uv_binary = uv_binary + aqt.package.launcher_root = launcher_root + aqt.package.venv_binary = venv_binary + aqt.package.add_python_requirements = add_python_requirements + + +setup() diff --git a/qt/launcher/addon/manifest.json b/qt/launcher/addon/manifest.json new file mode 100644 index 000000000..b4f08e70d --- /dev/null +++ b/qt/launcher/addon/manifest.json @@ -0,0 +1,6 @@ +{ + "name": "Anki Launcher", + "package": "anki-launcher", + "min_point_version": 50, + "max_point_version": 250600 +} diff --git a/qt/launcher/build.rs b/qt/launcher/build.rs new file mode 100644 index 000000000..3ba75b0e1 --- /dev/null +++ b/qt/launcher/build.rs @@ -0,0 +1,10 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +fn main() { + #[cfg(windows)] + { + embed_resource::compile("win/anki-manifest.rc", embed_resource::NONE) + .manifest_required() + .unwrap(); + } +} diff --git a/qt/bundle/lin/README.md b/qt/launcher/lin/README.md similarity index 100% rename from qt/bundle/lin/README.md rename to qt/launcher/lin/README.md diff --git a/qt/launcher/lin/anki b/qt/launcher/lin/anki new file mode 100644 index 000000000..2a4a46062 --- /dev/null +++ b/qt/launcher/lin/anki @@ -0,0 +1,30 @@ +#!/bin/bash +# Universal Anki launcher script + +# Get the directory where this script is located (resolve symlinks) +SCRIPT_DIR="$(cd "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" && pwd)" + +# Determine architecture +ARCH=$(uname -m) +case "$ARCH" in + x86_64|amd64) + LAUNCHER="$SCRIPT_DIR/launcher.amd64" + ;; + aarch64|arm64) + LAUNCHER="$SCRIPT_DIR/launcher.arm64" + ;; + *) + echo "Error: Unsupported architecture: $ARCH" + echo "Supported architectures: x86_64, aarch64" + exit 1 + ;; +esac + +# Check if launcher exists +if [ ! -f "$LAUNCHER" ]; then + echo "Error: Launcher not found: $LAUNCHER" + exit 1 +fi + +# Execute the appropriate launcher with all arguments +exec "$LAUNCHER" "$@" \ No newline at end of file diff --git a/qt/bundle/lin/anki.1 b/qt/launcher/lin/anki.1 similarity index 100% rename from qt/bundle/lin/anki.1 rename to qt/launcher/lin/anki.1 diff --git a/qt/bundle/lin/anki.desktop b/qt/launcher/lin/anki.desktop similarity index 100% rename from qt/bundle/lin/anki.desktop rename to qt/launcher/lin/anki.desktop diff --git a/qt/bundle/lin/anki.png b/qt/launcher/lin/anki.png similarity index 100% rename from qt/bundle/lin/anki.png rename to qt/launcher/lin/anki.png diff --git a/qt/bundle/lin/anki.xml b/qt/launcher/lin/anki.xml similarity index 100% rename from qt/bundle/lin/anki.xml rename to qt/launcher/lin/anki.xml diff --git a/qt/bundle/lin/anki.xpm b/qt/launcher/lin/anki.xpm similarity index 100% rename from qt/bundle/lin/anki.xpm rename to qt/launcher/lin/anki.xpm diff --git a/qt/launcher/lin/build.sh b/qt/launcher/lin/build.sh new file mode 100755 index 000000000..de96a1b50 --- /dev/null +++ b/qt/launcher/lin/build.sh @@ -0,0 +1,87 @@ +#!/bin/bash +# +# This script currently only supports universal builds on x86_64. +# + +set -e + +# Add Linux cross-compilation target +rustup target add aarch64-unknown-linux-gnu +# Detect host architecture +HOST_ARCH=$(uname -m) + + +# Define output paths +OUTPUT_DIR="../../../out/launcher" +LAUNCHER_DIR="$OUTPUT_DIR/anki-launcher" + +# Clean existing output directory +rm -rf "$LAUNCHER_DIR" + +# Build binaries based on host architecture +if [ "$HOST_ARCH" = "aarch64" ]; then + # On aarch64 host, only build for aarch64 + cargo build -p launcher --release --target aarch64-unknown-linux-gnu +else + # On other hosts, build for both architectures + cargo build -p launcher --release --target x86_64-unknown-linux-gnu + CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc \ + cargo build -p launcher --release --target aarch64-unknown-linux-gnu + # Extract uv_lin_arm for cross-compilation + (cd ../../.. && ./ninja extract:uv_lin_arm) +fi + +# Create output directory +mkdir -p "$LAUNCHER_DIR" + +# Copy binaries and support files +TARGET_DIR=${CARGO_TARGET_DIR:-../../../target} + +# Copy binaries with architecture suffixes +if [ "$HOST_ARCH" = "aarch64" ]; then + # On aarch64 host, copy arm64 binary to both locations + cp "$TARGET_DIR/aarch64-unknown-linux-gnu/release/launcher" "$LAUNCHER_DIR/launcher.amd64" + cp "$TARGET_DIR/aarch64-unknown-linux-gnu/release/launcher" "$LAUNCHER_DIR/launcher.arm64" + # Copy uv binary to both locations + cp "../../../out/extracted/uv/uv" "$LAUNCHER_DIR/uv.amd64" + cp "../../../out/extracted/uv/uv" "$LAUNCHER_DIR/uv.arm64" +else + # On other hosts, copy architecture-specific binaries + cp "$TARGET_DIR/x86_64-unknown-linux-gnu/release/launcher" "$LAUNCHER_DIR/launcher.amd64" + cp "$TARGET_DIR/aarch64-unknown-linux-gnu/release/launcher" "$LAUNCHER_DIR/launcher.arm64" + cp "../../../out/extracted/uv/uv" "$LAUNCHER_DIR/uv.amd64" + cp "../../../out/extracted/uv_lin_arm/uv" "$LAUNCHER_DIR/uv.arm64" +fi + +# Copy support files from lin directory +for file in README.md anki.1 anki.desktop anki.png anki.xml anki.xpm install.sh uninstall.sh anki; do + cp "$file" "$LAUNCHER_DIR/" +done + +# Copy additional files from parent directory +cp ../pyproject.toml "$LAUNCHER_DIR/" +cp ../../../.python-version "$LAUNCHER_DIR/" + +# Set executable permissions +chmod +x \ + "$LAUNCHER_DIR/anki" \ + "$LAUNCHER_DIR/launcher.amd64" \ + "$LAUNCHER_DIR/launcher.arm64" \ + "$LAUNCHER_DIR/uv.amd64" \ + "$LAUNCHER_DIR/uv.arm64" \ + "$LAUNCHER_DIR/install.sh" \ + "$LAUNCHER_DIR/uninstall.sh" + +# Set proper permissions and create tarball +chmod -R a+r "$LAUNCHER_DIR" + +# Create tarball using the same options as the Rust template +ZSTD="zstd -c --long -T0 -18" +TRANSFORM="s%^.%anki-launcher%S" +TARBALL="$OUTPUT_DIR/anki-launcher.tar.zst" + +tar -I "$ZSTD" --transform "$TRANSFORM" -cf "$TARBALL" -C "$LAUNCHER_DIR" . + +echo "Build complete:" +echo "Universal launcher: $LAUNCHER_DIR" +echo "Tarball: $TARBALL" diff --git a/qt/bundle/lin/install.sh b/qt/launcher/lin/install.sh similarity index 92% rename from qt/bundle/lin/install.sh rename to qt/launcher/lin/install.sh index 519a45fa2..c9f129654 100755 --- a/qt/bundle/lin/install.sh +++ b/qt/launcher/lin/install.sh @@ -13,7 +13,7 @@ fi rm -rf "$PREFIX"/share/anki "$PREFIX"/bin/anki mkdir -p "$PREFIX"/share/anki -cp -av --no-preserve=owner,context -- * "$PREFIX"/share/anki/ +cp -av --no-preserve=owner,context -- * .python-version "$PREFIX"/share/anki/ mkdir -p "$PREFIX"/bin ln -sf "$PREFIX"/share/anki/anki "$PREFIX"/bin/anki # fix a previous packaging issue where we created this as a file diff --git a/qt/bundle/lin/uninstall.sh b/qt/launcher/lin/uninstall.sh similarity index 100% rename from qt/bundle/lin/uninstall.sh rename to qt/launcher/lin/uninstall.sh diff --git a/qt/bundle/mac/src/Info.plist b/qt/launcher/mac/Info.plist similarity index 87% rename from qt/bundle/mac/src/Info.plist rename to qt/launcher/mac/Info.plist index 5933838e4..ac0ab2f09 100644 --- a/qt/bundle/mac/src/Info.plist +++ b/qt/launcher/mac/Info.plist @@ -5,9 +5,11 @@ CFBundleDisplayName Anki CFBundleShortVersionString - 2.1.46 + 1.0 LSMinimumSystemVersion - 10.14.0 + 12 + LSApplicationCategoryType + public.app-category.education CFBundleDocumentTypes @@ -26,11 +28,11 @@ CFBundleExecutable - anki + launcher CFBundleIconName AppIcon CFBundleIdentifier - net.ankiweb.dtop + net.ankiweb.launcher CFBundleInfoDictionaryVersion 6.0 CFBundleName diff --git a/qt/launcher/mac/build.sh b/qt/launcher/mac/build.sh new file mode 100755 index 000000000..0ec39ad8f --- /dev/null +++ b/qt/launcher/mac/build.sh @@ -0,0 +1,54 @@ +#!/bin/bash + +set -e + +# Define output path +OUTPUT_DIR="../../../out/launcher" +APP_LAUNCHER="$OUTPUT_DIR/Anki.app" +rm -rf "$APP_LAUNCHER" + +# Build binaries for both architectures +rustup target add aarch64-apple-darwin x86_64-apple-darwin +cargo build -p launcher --release --target aarch64-apple-darwin +cargo build -p launcher --release --target x86_64-apple-darwin +(cd ../../.. && ./ninja launcher:uv_universal) + +# Ensure output directory exists +mkdir -p "$OUTPUT_DIR" + +# Remove existing app launcher +rm -rf "$APP_LAUNCHER" + +# Create app launcher structure +mkdir -p "$APP_LAUNCHER/Contents/MacOS" "$APP_LAUNCHER/Contents/Resources" + +# Copy binaries in +TARGET_DIR=${CARGO_TARGET_DIR:-target} +lipo -create \ + "$TARGET_DIR/aarch64-apple-darwin/release/launcher" \ + "$TARGET_DIR/x86_64-apple-darwin/release/launcher" \ + -output "$APP_LAUNCHER/Contents/MacOS/launcher" +cp "$OUTPUT_DIR/uv" "$APP_LAUNCHER/Contents/MacOS/" + +# Copy support files +cp Info.plist "$APP_LAUNCHER/Contents/" +cp icon/Assets.car "$APP_LAUNCHER/Contents/Resources/" +cp ../pyproject.toml "$APP_LAUNCHER/Contents/Resources/" +cp ../../../.python-version "$APP_LAUNCHER/Contents/Resources/" + +# Codesign +for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do + codesign --force -vvvv -o runtime -s "Developer ID Application:" \ + --entitlements entitlements.python.xml \ + "$i" +done + +# Check +codesign -vvv "$APP_LAUNCHER" +spctl -a "$APP_LAUNCHER" + +# Notarize and bundle (skip if NODMG is set) +if [ -z "$NODMG" ]; then + ./notarize.sh "$OUTPUT_DIR" + ./dmg/build.sh "$OUTPUT_DIR" +fi \ No newline at end of file diff --git a/qt/bundle/mac/dmg/anki-logo-bg.png b/qt/launcher/mac/dmg/anki-logo-bg.png similarity index 100% rename from qt/bundle/mac/dmg/anki-logo-bg.png rename to qt/launcher/mac/dmg/anki-logo-bg.png diff --git a/qt/bundle/mac/dmg/build.sh b/qt/launcher/mac/dmg/build.sh similarity index 87% rename from qt/bundle/mac/dmg/build.sh rename to qt/launcher/mac/dmg/build.sh index 6efb510a7..16b48c06a 100755 --- a/qt/bundle/mac/dmg/build.sh +++ b/qt/launcher/mac/dmg/build.sh @@ -4,9 +4,9 @@ set -e # base folder with Anki.app in it -dist=$1 -dmg_path=$2 -script_folder=$(dirname $0) +output="$1" +dist="$1/tmp" +dmg_path="$output/Anki.dmg" if [ -d "/Volumes/Anki" ] then @@ -14,9 +14,14 @@ then exit 1 fi +rm -rf $dist $dmg_path +mkdir -p $dist +rsync -av $output/Anki.app $dist/ +script_folder=$(dirname $0) + echo "bundling..." ln -s /Applications $dist/Applications -mkdir $dist/.background +mkdir -p $dist/.background cp ${script_folder}/anki-logo-bg.png $dist/.background cp ${script_folder}/dmg_ds_store $dist/.DS_Store diff --git a/qt/bundle/mac/dmg/dmg_ds_store b/qt/launcher/mac/dmg/dmg_ds_store similarity index 100% rename from qt/bundle/mac/dmg/dmg_ds_store rename to qt/launcher/mac/dmg/dmg_ds_store diff --git a/qt/bundle/mac/dmg/set-dmg-settings.app/Contents/Info.plist b/qt/launcher/mac/dmg/set-dmg-settings.app/Contents/Info.plist similarity index 100% rename from qt/bundle/mac/dmg/set-dmg-settings.app/Contents/Info.plist rename to qt/launcher/mac/dmg/set-dmg-settings.app/Contents/Info.plist diff --git a/qt/bundle/mac/dmg/set-dmg-settings.app/Contents/MacOS/applet b/qt/launcher/mac/dmg/set-dmg-settings.app/Contents/MacOS/applet similarity index 100% rename from qt/bundle/mac/dmg/set-dmg-settings.app/Contents/MacOS/applet rename to qt/launcher/mac/dmg/set-dmg-settings.app/Contents/MacOS/applet diff --git a/qt/bundle/mac/dmg/set-dmg-settings.app/Contents/PkgInfo b/qt/launcher/mac/dmg/set-dmg-settings.app/Contents/PkgInfo similarity index 100% rename from qt/bundle/mac/dmg/set-dmg-settings.app/Contents/PkgInfo rename to qt/launcher/mac/dmg/set-dmg-settings.app/Contents/PkgInfo diff --git a/qt/bundle/mac/dmg/set-dmg-settings.app/Contents/Resources/Scripts/main.scpt b/qt/launcher/mac/dmg/set-dmg-settings.app/Contents/Resources/Scripts/main.scpt similarity index 100% rename from qt/bundle/mac/dmg/set-dmg-settings.app/Contents/Resources/Scripts/main.scpt rename to qt/launcher/mac/dmg/set-dmg-settings.app/Contents/Resources/Scripts/main.scpt diff --git a/qt/bundle/mac/dmg/set-dmg-settings.app/Contents/Resources/applet.icns b/qt/launcher/mac/dmg/set-dmg-settings.app/Contents/Resources/applet.icns similarity index 100% rename from qt/bundle/mac/dmg/set-dmg-settings.app/Contents/Resources/applet.icns rename to qt/launcher/mac/dmg/set-dmg-settings.app/Contents/Resources/applet.icns diff --git a/qt/bundle/mac/dmg/set-dmg-settings.app/Contents/Resources/applet.rsrc b/qt/launcher/mac/dmg/set-dmg-settings.app/Contents/Resources/applet.rsrc similarity index 100% rename from qt/bundle/mac/dmg/set-dmg-settings.app/Contents/Resources/applet.rsrc rename to qt/launcher/mac/dmg/set-dmg-settings.app/Contents/Resources/applet.rsrc diff --git a/qt/bundle/mac/dmg/set-dmg-settings.app/Contents/Resources/description.rtfd/TXT.rtf b/qt/launcher/mac/dmg/set-dmg-settings.app/Contents/Resources/description.rtfd/TXT.rtf similarity index 100% rename from qt/bundle/mac/dmg/set-dmg-settings.app/Contents/Resources/description.rtfd/TXT.rtf rename to qt/launcher/mac/dmg/set-dmg-settings.app/Contents/Resources/description.rtfd/TXT.rtf diff --git a/qt/bundle/mac/dmg/set-dmg-settings.app/Contents/_CodeSignature/CodeResources b/qt/launcher/mac/dmg/set-dmg-settings.app/Contents/_CodeSignature/CodeResources similarity index 100% rename from qt/bundle/mac/dmg/set-dmg-settings.app/Contents/_CodeSignature/CodeResources rename to qt/launcher/mac/dmg/set-dmg-settings.app/Contents/_CodeSignature/CodeResources diff --git a/qt/bundle/mac/dmg/set-dmg-settings.scpt b/qt/launcher/mac/dmg/set-dmg-settings.scpt similarity index 100% rename from qt/bundle/mac/dmg/set-dmg-settings.scpt rename to qt/launcher/mac/dmg/set-dmg-settings.scpt diff --git a/qt/bundle/mac/entitlements.python.xml b/qt/launcher/mac/entitlements.python.xml similarity index 100% rename from qt/bundle/mac/entitlements.python.xml rename to qt/launcher/mac/entitlements.python.xml diff --git a/qt/bundle/mac/icon/Assets.car b/qt/launcher/mac/icon/Assets.car similarity index 100% rename from qt/bundle/mac/icon/Assets.car rename to qt/launcher/mac/icon/Assets.car diff --git a/qt/bundle/mac/icon/Assets.xcassets/AppIcon.appiconset/Contents.json b/qt/launcher/mac/icon/Assets.xcassets/AppIcon.appiconset/Contents.json similarity index 100% rename from qt/bundle/mac/icon/Assets.xcassets/AppIcon.appiconset/Contents.json rename to qt/launcher/mac/icon/Assets.xcassets/AppIcon.appiconset/Contents.json diff --git a/qt/bundle/mac/icon/Assets.xcassets/AppIcon.appiconset/round-1024-512.png b/qt/launcher/mac/icon/Assets.xcassets/AppIcon.appiconset/round-1024-512.png similarity index 100% rename from qt/bundle/mac/icon/Assets.xcassets/AppIcon.appiconset/round-1024-512.png rename to qt/launcher/mac/icon/Assets.xcassets/AppIcon.appiconset/round-1024-512.png diff --git a/qt/bundle/mac/icon/Assets.xcassets/Contents.json b/qt/launcher/mac/icon/Assets.xcassets/Contents.json similarity index 100% rename from qt/bundle/mac/icon/Assets.xcassets/Contents.json rename to qt/launcher/mac/icon/Assets.xcassets/Contents.json diff --git a/qt/bundle/mac/icon/build.sh b/qt/launcher/mac/icon/build.sh similarity index 100% rename from qt/bundle/mac/icon/build.sh rename to qt/launcher/mac/icon/build.sh diff --git a/qt/launcher/mac/notarize.sh b/qt/launcher/mac/notarize.sh new file mode 100755 index 000000000..d906bd7f8 --- /dev/null +++ b/qt/launcher/mac/notarize.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +set -e + +# Define output path +OUTPUT_DIR="$1" +APP_LAUNCHER="$OUTPUT_DIR/Anki.app" +ZIP_FILE="$OUTPUT_DIR/Anki.zip" + +# Create zip for notarization +(cd "$OUTPUT_DIR" && rm -rf Anki.zip && zip -r Anki.zip Anki.app) + +# Upload for notarization +xcrun notarytool submit "$ZIP_FILE" -p default --wait + +# Staple the app +xcrun stapler staple "$APP_LAUNCHER" \ No newline at end of file diff --git a/qt/launcher/pyproject.toml b/qt/launcher/pyproject.toml new file mode 100644 index 000000000..cc521b432 --- /dev/null +++ b/qt/launcher/pyproject.toml @@ -0,0 +1,8 @@ +[project] +name = "anki-launcher" +version = "1.0.0" +description = "UV-based launcher for Anki." +requires-python = ">=3.9" +dependencies = [ + "anki-release", +] diff --git a/qt/launcher/src/bin/anki_console.rs b/qt/launcher/src/bin/anki_console.rs new file mode 100644 index 000000000..181db920f --- /dev/null +++ b/qt/launcher/src/bin/anki_console.rs @@ -0,0 +1,58 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +#![windows_subsystem = "console"] + +use std::env; +use std::io::stdin; +use std::process::Command; + +use anyhow::Context; +use anyhow::Result; + +fn main() { + if let Err(e) = run() { + eprintln!("Error: {e:#}"); + std::process::exit(1); + } +} + +fn run() -> Result<()> { + let current_exe = env::current_exe().context("Failed to get current executable path")?; + let exe_dir = current_exe + .parent() + .context("Failed to get executable directory")?; + + let anki_exe = exe_dir.join("anki.exe"); + + if !anki_exe.exists() { + anyhow::bail!("anki.exe not found in the same directory"); + } + + // Forward all command line arguments to anki.exe + let args: Vec = env::args().skip(1).collect(); + + let mut cmd = Command::new(&anki_exe); + cmd.args(&args); + + if std::env::var("ANKI_IMPLICIT_CONSOLE").is_err() { + // if directly invoked by the user, signal the launcher that the + // user wants a Python console + std::env::set_var("ANKI_CONSOLE", "1"); + } + + // Wait for the process to complete and forward its exit code + let status = cmd.status().context("Failed to execute anki.exe")?; + if !status.success() { + println!("\nPress enter to close."); + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + } + + if let Some(code) = status.code() { + std::process::exit(code); + } else { + // Process was terminated by a signal + std::process::exit(1); + } +} diff --git a/qt/launcher/src/bin/build_win.rs b/qt/launcher/src/bin/build_win.rs new file mode 100644 index 000000000..fc9082bf2 --- /dev/null +++ b/qt/launcher/src/bin/build_win.rs @@ -0,0 +1,302 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use std::env; +use std::path::Path; +use std::path::PathBuf; +use std::process::Command; + +use anki_io::copy_file; +use anki_io::create_dir_all; +use anki_io::remove_dir_all; +use anki_io::write_file; +use anki_process::CommandExt; +use anyhow::Result; + +const OUTPUT_DIR: &str = "../../../out/launcher"; +const LAUNCHER_EXE_DIR: &str = "../../../out/launcher_exe"; +const NSIS_DIR: &str = "../../../out/nsis"; +const CARGO_TARGET_DIR: &str = "../../../out/rust"; +const NSIS_PATH: &str = "C:\\Program Files (x86)\\NSIS\\makensis.exe"; + +fn main() -> Result<()> { + println!("Building Windows launcher..."); + + let output_dir = PathBuf::from(OUTPUT_DIR); + let launcher_exe_dir = PathBuf::from(LAUNCHER_EXE_DIR); + let nsis_dir = PathBuf::from(NSIS_DIR); + + setup_directories(&output_dir, &launcher_exe_dir, &nsis_dir)?; + build_launcher_binary()?; + extract_nsis_plugins()?; + copy_files(&output_dir)?; + sign_binaries(&output_dir)?; + copy_nsis_files(&nsis_dir)?; + build_uninstaller(&output_dir, &nsis_dir)?; + sign_file(&output_dir.join("uninstall.exe"))?; + generate_install_manifest(&output_dir)?; + build_installer(&output_dir, &nsis_dir)?; + sign_file(&PathBuf::from("../../../out/launcher_exe/anki-install.exe"))?; + + println!("Build completed successfully!"); + println!("Output directory: {}", output_dir.display()); + println!("Installer: ../../../out/launcher_exe/anki-install.exe"); + + Ok(()) +} + +fn setup_directories(output_dir: &Path, launcher_exe_dir: &Path, nsis_dir: &Path) -> Result<()> { + println!("Setting up directories..."); + + // Remove existing output directories + if output_dir.exists() { + remove_dir_all(output_dir)?; + } + if launcher_exe_dir.exists() { + remove_dir_all(launcher_exe_dir)?; + } + if nsis_dir.exists() { + remove_dir_all(nsis_dir)?; + } + + // Create output directories + create_dir_all(output_dir)?; + create_dir_all(launcher_exe_dir)?; + create_dir_all(nsis_dir)?; + + Ok(()) +} + +fn build_launcher_binary() -> Result<()> { + println!("Building launcher binary..."); + + env::set_var("CARGO_TARGET_DIR", CARGO_TARGET_DIR); + + Command::new("cargo") + .args([ + "build", + "-p", + "launcher", + "--release", + "--target", + "x86_64-pc-windows-msvc", + ]) + .ensure_success()?; + + Ok(()) +} + +fn extract_nsis_plugins() -> Result<()> { + println!("Extracting NSIS plugins..."); + + // Change to the anki root directory and run tools/ninja.bat + Command::new("cmd") + .args([ + "/c", + "cd", + "/d", + "..\\..\\..\\", + "&&", + "tools\\ninja.bat", + "extract:nsis_plugins", + ]) + .ensure_success()?; + + Ok(()) +} + +fn copy_files(output_dir: &Path) -> Result<()> { + println!("Copying binaries..."); + + // Copy launcher binary as anki.exe + let launcher_src = + PathBuf::from(CARGO_TARGET_DIR).join("x86_64-pc-windows-msvc/release/launcher.exe"); + let launcher_dst = output_dir.join("anki.exe"); + copy_file(&launcher_src, &launcher_dst)?; + + // Copy anki-console binary + let console_src = + PathBuf::from(CARGO_TARGET_DIR).join("x86_64-pc-windows-msvc/release/anki-console.exe"); + let console_dst = output_dir.join("anki-console.exe"); + copy_file(&console_src, &console_dst)?; + + // Copy uv.exe and uvw.exe + let uv_src = PathBuf::from("../../../out/extracted/uv/uv.exe"); + let uv_dst = output_dir.join("uv.exe"); + copy_file(&uv_src, &uv_dst)?; + let uv_src = PathBuf::from("../../../out/extracted/uv/uvw.exe"); + let uv_dst = output_dir.join("uvw.exe"); + copy_file(&uv_src, &uv_dst)?; + + println!("Copying support files..."); + + // Copy pyproject.toml + copy_file("../pyproject.toml", output_dir.join("pyproject.toml"))?; + + // Copy .python-version + copy_file( + "../../../.python-version", + output_dir.join(".python-version"), + )?; + + Ok(()) +} + +fn sign_binaries(output_dir: &Path) -> Result<()> { + sign_file(&output_dir.join("anki.exe"))?; + sign_file(&output_dir.join("anki-console.exe"))?; + sign_file(&output_dir.join("uv.exe"))?; + Ok(()) +} + +fn sign_file(file_path: &Path) -> Result<()> { + let codesign = env::var("CODESIGN").unwrap_or_default(); + if codesign != "1" { + println!( + "Skipping code signing for {} (CODESIGN not set to 1)", + file_path.display() + ); + return Ok(()); + } + + let signtool_path = find_signtool()?; + println!("Signing {}...", file_path.display()); + + Command::new(&signtool_path) + .args([ + "sign", + "/sha1", + "dccfc6d312fc0432197bb7be951478e5866eebf8", + "/fd", + "sha256", + "/tr", + "http://time.certum.pl", + "/td", + "sha256", + "/v", + ]) + .arg(file_path) + .ensure_success()?; + + Ok(()) +} + +fn find_signtool() -> Result { + println!("Locating signtool.exe..."); + + let output = Command::new("where") + .args([ + "/r", + "C:\\Program Files (x86)\\Windows Kits", + "signtool.exe", + ]) + .utf8_output()?; + + // Find signtool.exe with "arm64" in the path (as per original batch logic) + for line in output.stdout.lines() { + if line.contains("\\arm64\\") { + let signtool_path = PathBuf::from(line.trim()); + println!("Using signtool: {}", signtool_path.display()); + return Ok(signtool_path); + } + } + + anyhow::bail!("Could not find signtool.exe with arm64 architecture"); +} + +fn generate_install_manifest(output_dir: &Path) -> Result<()> { + println!("Generating install manifest..."); + + let mut manifest_content = String::new(); + let entries = anki_io::read_dir_files(output_dir)?; + + for entry in entries { + let entry = entry?; + let path = entry.path(); + if let Some(file_name) = path.file_name() { + let file_name_str = file_name.to_string_lossy(); + // Skip manifest file and uninstaller (can't delete itself) + if file_name_str != "anki.install-manifest" && file_name_str != "uninstall.exe" { + if let Ok(relative_path) = path.strip_prefix(output_dir) { + // Convert to Windows-style backslashes for NSIS + let windows_path = relative_path.display().to_string().replace('/', "\\"); + // Use Windows line endings (\r\n) as expected by NSIS + manifest_content.push_str(&format!("{windows_path}\r\n")); + } + } + } + } + + write_file(output_dir.join("anki.install-manifest"), manifest_content)?; + + Ok(()) +} + +fn copy_nsis_files(nsis_dir: &Path) -> Result<()> { + println!("Copying NSIS support files..."); + + // Copy anki.template.nsi as anki.nsi + copy_file("anki.template.nsi", nsis_dir.join("anki.nsi"))?; + + // Copy fileassoc.nsh + copy_file("fileassoc.nsh", nsis_dir.join("fileassoc.nsh"))?; + + // Copy nsProcess.dll + copy_file( + "../../../out/extracted/nsis_plugins/nsProcess.dll", + nsis_dir.join("nsProcess.dll"), + )?; + + Ok(()) +} + +fn build_uninstaller(output_dir: &Path, nsis_dir: &Path) -> Result<()> { + println!("Building uninstaller..."); + + let mut flags = vec!["-V3", "-DWRITE_UNINSTALLER"]; + if env::var("NO_COMPRESS").unwrap_or_default() == "1" { + println!("NO_COMPRESS=1 detected, disabling compression"); + flags.push("-DNO_COMPRESS"); + } + + run_nsis( + &PathBuf::from("anki.nsi"), + &flags, + nsis_dir, // Run from nsis directory + )?; + + // Copy uninstaller from nsis directory to output directory + copy_file( + nsis_dir.join("uninstall.exe"), + output_dir.join("uninstall.exe"), + )?; + + Ok(()) +} + +fn build_installer(_output_dir: &Path, nsis_dir: &Path) -> Result<()> { + println!("Building installer..."); + + let mut flags = vec!["-V3"]; + if env::var("NO_COMPRESS").unwrap_or_default() == "1" { + println!("NO_COMPRESS=1 detected, disabling compression"); + flags.push("-DNO_COMPRESS"); + } + + run_nsis( + &PathBuf::from("anki.nsi"), + &flags, + nsis_dir, // Run from nsis directory + )?; + + Ok(()) +} + +fn run_nsis(script_path: &Path, flags: &[&str], working_dir: &Path) -> Result<()> { + let mut cmd = Command::new(NSIS_PATH); + cmd.args(flags).arg(script_path).current_dir(working_dir); + + cmd.ensure_success()?; + + Ok(()) +} diff --git a/qt/launcher/src/main.rs b/qt/launcher/src/main.rs new file mode 100644 index 000000000..5c06aadf7 --- /dev/null +++ b/qt/launcher/src/main.rs @@ -0,0 +1,728 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +#![windows_subsystem = "windows"] + +use std::io::stdin; +use std::io::stdout; +use std::io::Write; +use std::process::Command; +use std::time::SystemTime; +use std::time::UNIX_EPOCH; + +use anki_io::copy_file; +use anki_io::copy_if_newer; +use anki_io::create_dir_all; +use anki_io::modified_time; +use anki_io::read_file; +use anki_io::remove_file; +use anki_io::write_file; +use anki_io::ToUtf8Path; +use anki_process::CommandExt as AnkiCommandExt; +use anyhow::Context; +use anyhow::Result; + +use crate::platform::ensure_os_supported; +use crate::platform::ensure_terminal_shown; +use crate::platform::get_exe_and_resources_dirs; +use crate::platform::get_uv_binary_name; +use crate::platform::launch_anki_normally; +use crate::platform::respawn_launcher; + +mod platform; + +struct State { + current_version: Option, + prerelease_marker: std::path::PathBuf, + uv_install_root: std::path::PathBuf, + uv_cache_dir: std::path::PathBuf, + no_cache_marker: std::path::PathBuf, + anki_base_folder: std::path::PathBuf, + uv_path: std::path::PathBuf, + uv_python_install_dir: std::path::PathBuf, + user_pyproject_path: std::path::PathBuf, + user_python_version_path: std::path::PathBuf, + dist_pyproject_path: std::path::PathBuf, + dist_python_version_path: std::path::PathBuf, + uv_lock_path: std::path::PathBuf, + sync_complete_marker: std::path::PathBuf, + previous_version: Option, +} + +#[derive(Debug, Clone)] +pub enum VersionKind { + PyOxidizer(String), + Uv(String), +} + +#[derive(Debug, Clone)] +pub enum MainMenuChoice { + Latest, + KeepExisting, + Version(VersionKind), + ToggleBetas, + ToggleCache, + Uninstall, + Quit, +} + +fn main() { + if let Err(e) = run() { + eprintln!("Error: {e:#}"); + eprintln!("Press enter to close..."); + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + + std::process::exit(1); + } +} + +fn run() -> Result<()> { + let uv_install_root = dirs::data_local_dir() + .context("Unable to determine data_dir")? + .join("AnkiProgramFiles"); + + let (exe_dir, resources_dir) = get_exe_and_resources_dirs()?; + + let mut state = State { + current_version: None, + prerelease_marker: uv_install_root.join("prerelease"), + uv_install_root: uv_install_root.clone(), + uv_cache_dir: uv_install_root.join("cache"), + no_cache_marker: uv_install_root.join("nocache"), + anki_base_folder: get_anki_base_path()?, + uv_path: exe_dir.join(get_uv_binary_name()), + uv_python_install_dir: uv_install_root.join("python"), + user_pyproject_path: uv_install_root.join("pyproject.toml"), + user_python_version_path: uv_install_root.join(".python-version"), + dist_pyproject_path: resources_dir.join("pyproject.toml"), + dist_python_version_path: resources_dir.join(".python-version"), + uv_lock_path: uv_install_root.join("uv.lock"), + sync_complete_marker: uv_install_root.join(".sync_complete"), + previous_version: None, + }; + + // Check for uninstall request from Windows uninstaller + if std::env::var("ANKI_LAUNCHER_UNINSTALL").is_ok() { + ensure_terminal_shown()?; + handle_uninstall(&state)?; + return Ok(()); + } + + // Create install directory and copy project files in + create_dir_all(&state.uv_install_root)?; + + copy_if_newer(&state.dist_pyproject_path, &state.user_pyproject_path)?; + copy_if_newer( + &state.dist_python_version_path, + &state.user_python_version_path, + )?; + + let pyproject_has_changed = !state.sync_complete_marker.exists() || { + let pyproject_toml_time = modified_time(&state.user_pyproject_path)?; + let sync_complete_time = modified_time(&state.sync_complete_marker)?; + Ok::(pyproject_toml_time > sync_complete_time) + } + .unwrap_or(true); + + if !pyproject_has_changed { + // If venv is already up to date, launch Anki normally + let args: Vec = std::env::args().skip(1).collect(); + let cmd = build_python_command(&state, &args)?; + launch_anki_normally(cmd)?; + return Ok(()); + } + + // If we weren't in a terminal, respawn ourselves in one + ensure_terminal_shown()?; + + print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top + println!("\x1B[1mAnki Launcher\x1B[0m\n"); + + ensure_os_supported()?; + + check_versions(&mut state); + + main_menu_loop(&state)?; + + // Write marker file to indicate we've completed the sync process + write_sync_marker(&state.sync_complete_marker)?; + + #[cfg(target_os = "macos")] + { + let cmd = build_python_command(&state, &[])?; + platform::mac::prepare_for_launch_after_update(cmd, &uv_install_root)?; + } + + if cfg!(unix) && !cfg!(target_os = "macos") { + println!("\nPress enter to start Anki."); + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + } else { + // on Windows/macOS, the user needs to close the terminal/console + // currently, but ideas on how we can avoid this would be good! + println!(); + println!("Anki will start shortly."); + println!("\x1B[1mYou can close this window.\x1B[0m\n"); + } + + // respawn the launcher as a disconnected subprocess for normal startup + respawn_launcher()?; + + Ok(()) +} + +fn extract_aqt_version( + uv_path: &std::path::Path, + uv_install_root: &std::path::Path, +) -> Option { + let output = Command::new(uv_path) + .current_dir(uv_install_root) + .args(["pip", "show", "aqt"]) + .output() + .ok()?; + + if !output.status.success() { + return None; + } + + let stdout = String::from_utf8(output.stdout).ok()?; + for line in stdout.lines() { + if let Some(version) = line.strip_prefix("Version: ") { + return Some(version.trim().to_string()); + } + } + None +} + +fn check_versions(state: &mut State) { + // If sync_complete_marker is missing, do nothing + if !state.sync_complete_marker.exists() { + return; + } + + // Determine current version by invoking uv pip show aqt + match extract_aqt_version(&state.uv_path, &state.uv_install_root) { + Some(version) => { + state.current_version = Some(version); + } + None => { + println!("Warning: Could not determine current Anki version"); + } + } + + // Read previous version from "previous-version" file + let previous_version_path = state.uv_install_root.join("previous-version"); + if let Ok(content) = read_file(&previous_version_path) { + if let Ok(version_str) = String::from_utf8(content) { + let version = version_str.trim().to_string(); + if !version.is_empty() { + state.previous_version = Some(version); + } + } + } +} + +fn main_menu_loop(state: &State) -> Result<()> { + loop { + let menu_choice = get_main_menu_choice(state); + + match menu_choice { + MainMenuChoice::Quit => std::process::exit(0), + MainMenuChoice::KeepExisting => { + // Skip sync, just launch existing installation + break; + } + MainMenuChoice::ToggleBetas => { + // Toggle beta prerelease file + if state.prerelease_marker.exists() { + let _ = remove_file(&state.prerelease_marker); + println!("Beta releases disabled."); + } else { + write_file(&state.prerelease_marker, "")?; + println!("Beta releases enabled."); + } + println!(); + continue; + } + MainMenuChoice::ToggleCache => { + // Toggle cache disable file + if state.no_cache_marker.exists() { + let _ = remove_file(&state.no_cache_marker); + println!("Download caching enabled."); + } else { + write_file(&state.no_cache_marker, "")?; + // Delete the cache directory and everything in it + if state.uv_cache_dir.exists() { + let _ = anki_io::remove_dir_all(&state.uv_cache_dir); + } + println!("Download caching disabled and cache cleared."); + } + println!(); + continue; + } + MainMenuChoice::Uninstall => { + if handle_uninstall(state)? { + std::process::exit(0); + } + continue; + } + choice @ (MainMenuChoice::Latest | MainMenuChoice::Version(_)) => { + // For other choices, update project files and sync + update_pyproject_for_version( + choice.clone(), + state.dist_pyproject_path.clone(), + state.user_pyproject_path.clone(), + state.dist_python_version_path.clone(), + state.user_python_version_path.clone(), + )?; + + // Extract current version before syncing (but don't write to file yet) + let previous_version_to_save = + extract_aqt_version(&state.uv_path, &state.uv_install_root); + + // Remove sync marker before attempting sync + let _ = remove_file(&state.sync_complete_marker); + + println!("\x1B[1mUpdating Anki...\x1B[0m\n"); + + let python_version_trimmed = if state.user_python_version_path.exists() { + let python_version = read_file(&state.user_python_version_path)?; + let python_version_str = String::from_utf8(python_version) + .context("Invalid UTF-8 in .python-version")?; + Some(python_version_str.trim().to_string()) + } else { + None + }; + + // `uv sync` does not pull in Python automatically, unlike `uv run`. + // This might be system/platform specific and/or a uv bug. + let mut command = Command::new(&state.uv_path); + command + .current_dir(&state.uv_install_root) + .env("UV_CACHE_DIR", &state.uv_cache_dir) + .env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir) + .args(["python", "install", "--managed-python"]); + + // Add python version if .python-version file exists + if let Some(version) = &python_version_trimmed { + command.args([version]); + } + + if let Err(e) = command.ensure_success() { + println!("Python install failed: {e:#}"); + println!(); + continue; + } + + // Sync the venv + let mut command = Command::new(&state.uv_path); + command + .current_dir(&state.uv_install_root) + .env("UV_CACHE_DIR", &state.uv_cache_dir) + .env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir) + .args(["sync", "--upgrade", "--managed-python"]); + + // Add python version if .python-version file exists + if let Some(version) = &python_version_trimmed { + command.args(["--python", version]); + } + + // Set UV_PRERELEASE=allow if beta mode is enabled + if state.prerelease_marker.exists() { + command.env("UV_PRERELEASE", "allow"); + } + + if state.no_cache_marker.exists() { + command.env("UV_NO_CACHE", "1"); + } + + match command.ensure_success() { + Ok(_) => { + // Sync succeeded + if matches!(&choice, MainMenuChoice::Version(VersionKind::PyOxidizer(_))) { + inject_helper_addon(&state.uv_install_root)?; + } + + // Now that sync succeeded, save the previous version + if let Some(current_version) = previous_version_to_save { + let previous_version_path = + state.uv_install_root.join("previous-version"); + if let Err(e) = write_file(&previous_version_path, ¤t_version) { + println!("Warning: Could not save previous version: {e}"); + } + } + + break; + } + Err(e) => { + // If sync fails due to things like a missing wheel on pypi, + // we need to remove the lockfile or uv will cache the bad result. + let _ = remove_file(&state.uv_lock_path); + println!("Install failed: {e:#}"); + println!(); + continue; + } + } + } + } + } + Ok(()) +} + +fn write_sync_marker(sync_complete_marker: &std::path::Path) -> Result<()> { + let timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .context("Failed to get system time")? + .as_secs(); + write_file(sync_complete_marker, timestamp.to_string())?; + Ok(()) +} + +fn get_main_menu_choice(state: &State) -> MainMenuChoice { + loop { + println!("1) Latest Anki (just press enter)"); + println!("2) Choose a version"); + if let Some(current_version) = &state.current_version { + println!("3) Keep existing version ({current_version})"); + } + if let Some(prev_version) = &state.previous_version { + if state.current_version.as_ref() != Some(prev_version) { + println!("4) Revert to previous version ({prev_version})"); + } + } + println!(); + + let betas_enabled = state.prerelease_marker.exists(); + println!( + "5) Allow betas: {}", + if betas_enabled { "on" } else { "off" } + ); + let cache_enabled = !state.no_cache_marker.exists(); + println!( + "6) Cache downloads: {}", + if cache_enabled { "on" } else { "off" } + ); + println!(); + println!("7) Uninstall"); + println!("8) Quit"); + print!("> "); + let _ = stdout().flush(); + + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + let input = input.trim(); + + println!(); + + return match input { + "" | "1" => MainMenuChoice::Latest, + "2" => MainMenuChoice::Version(get_version_kind()), + "3" => { + if state.current_version.is_some() { + MainMenuChoice::KeepExisting + } else { + println!("Invalid input. Please try again.\n"); + continue; + } + } + "4" => { + if let Some(prev_version) = &state.previous_version { + if state.current_version.as_ref() != Some(prev_version) { + if let Some(version_kind) = parse_version_kind(prev_version) { + return MainMenuChoice::Version(version_kind); + } + } + } + println!("Invalid input. Please try again.\n"); + continue; + } + "5" => MainMenuChoice::ToggleBetas, + "6" => MainMenuChoice::ToggleCache, + "7" => MainMenuChoice::Uninstall, + "8" => MainMenuChoice::Quit, + _ => { + println!("Invalid input. Please try again."); + continue; + } + }; + } +} + +fn get_version_kind() -> VersionKind { + loop { + println!("Enter the version you want to install:"); + print!("> "); + let _ = stdout().flush(); + + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + let input = input.trim(); + + if input.is_empty() { + println!("Please enter a version."); + continue; + } + + match parse_version_kind(input) { + Some(version_kind) => { + println!(); + return version_kind; + } + None => { + println!("Invalid version format. Please enter a version like 25.07.1 or 24.11 (minimum 2.1.50)"); + continue; + } + } + } +} + +fn update_pyproject_for_version( + menu_choice: MainMenuChoice, + dist_pyproject_path: std::path::PathBuf, + user_pyproject_path: std::path::PathBuf, + dist_python_version_path: std::path::PathBuf, + user_python_version_path: std::path::PathBuf, +) -> Result<()> { + match menu_choice { + MainMenuChoice::Latest => { + let content = read_file(&dist_pyproject_path)?; + write_file(&user_pyproject_path, &content)?; + let python_version_content = read_file(&dist_python_version_path)?; + write_file(&user_python_version_path, &python_version_content)?; + } + MainMenuChoice::KeepExisting => { + // Do nothing - keep existing pyproject.toml and .python-version + } + MainMenuChoice::ToggleBetas => { + unreachable!(); + } + MainMenuChoice::ToggleCache => { + unreachable!(); + } + MainMenuChoice::Uninstall => { + unreachable!(); + } + MainMenuChoice::Version(version_kind) => { + let content = read_file(&dist_pyproject_path)?; + let content_str = + String::from_utf8(content).context("Invalid UTF-8 in pyproject.toml")?; + let updated_content = match &version_kind { + VersionKind::PyOxidizer(version) => { + // Replace package name and add PyQt6 dependencies + content_str.replace( + "anki-release", + &format!( + concat!( + "aqt[qt6]=={}\",\n", + " \"anki-audio==0.1.0; sys.platform == 'win32' or sys.platform == 'darwin'\",\n", + " \"pyqt6==6.6.1\",\n", + " \"pyqt6-qt6==6.6.2\",\n", + " \"pyqt6-webengine==6.6.0\",\n", + " \"pyqt6-webengine-qt6==6.6.2\",\n", + " \"pyqt6_sip==13.6.0" + ), + version + ), + ) + } + VersionKind::Uv(version) => { + content_str.replace("anki-release", &format!("anki-release=={version}")) + } + }; + write_file(&user_pyproject_path, &updated_content)?; + + // Update .python-version based on version kind + match &version_kind { + VersionKind::PyOxidizer(_) => { + write_file(&user_python_version_path, "3.9")?; + } + VersionKind::Uv(_) => { + copy_file(&dist_python_version_path, &user_python_version_path)?; + } + } + } + MainMenuChoice::Quit => { + std::process::exit(0); + } + } + Ok(()) +} + +fn parse_version_kind(version: &str) -> Option { + let numeric_chars: String = version + .chars() + .filter(|c| c.is_ascii_digit() || *c == '.') + .collect(); + + let parts: Vec<&str> = numeric_chars.split('.').collect(); + + if parts.len() < 2 { + return None; + } + + let major: u32 = match parts[0].parse() { + Ok(val) => val, + Err(_) => return None, + }; + + let minor: u32 = match parts[1].parse() { + Ok(val) => val, + Err(_) => return None, + }; + + let patch: u32 = if parts.len() >= 3 { + match parts[2].parse() { + Ok(val) => val, + Err(_) => return None, + } + } else { + 0 // Default patch to 0 if not provided + }; + + // Reject versions < 2.1.50 + if major == 2 && (minor != 1 || patch < 50) { + return None; + } + + if major < 25 || (major == 25 && minor < 6) { + Some(VersionKind::PyOxidizer(version.to_string())) + } else { + Some(VersionKind::Uv(version.to_string())) + } +} + +fn inject_helper_addon(_uv_install_root: &std::path::Path) -> Result<()> { + let addons21_path = get_anki_addons21_path()?; + + if !addons21_path.exists() { + return Ok(()); + } + + let addon_folder = addons21_path.join("anki-launcher"); + + // Remove existing anki-launcher folder if it exists + if addon_folder.exists() { + anki_io::remove_dir_all(&addon_folder)?; + } + + // Create the anki-launcher folder + create_dir_all(&addon_folder)?; + + // Write the embedded files + let init_py_content = include_str!("../addon/__init__.py"); + let manifest_json_content = include_str!("../addon/manifest.json"); + + write_file(addon_folder.join("__init__.py"), init_py_content)?; + write_file(addon_folder.join("manifest.json"), manifest_json_content)?; + + Ok(()) +} + +fn get_anki_base_path() -> Result { + let anki_base_path = if cfg!(target_os = "windows") { + // Windows: %APPDATA%\Anki2 + dirs::config_dir() + .context("Unable to determine config directory")? + .join("Anki2") + } else if cfg!(target_os = "macos") { + // macOS: ~/Library/Application Support/Anki2 + dirs::data_dir() + .context("Unable to determine data directory")? + .join("Anki2") + } else { + // Linux: ~/.local/share/Anki2 + dirs::data_dir() + .context("Unable to determine data directory")? + .join("Anki2") + }; + + Ok(anki_base_path) +} + +fn get_anki_addons21_path() -> Result { + Ok(get_anki_base_path()?.join("addons21")) +} + +fn handle_uninstall(state: &State) -> Result { + println!("Uninstall Anki's program files? (y/n)"); + print!("> "); + let _ = stdout().flush(); + + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + let input = input.trim().to_lowercase(); + + if input != "y" { + println!("Uninstall cancelled."); + println!(); + return Ok(false); + } + + // Remove program files + if state.uv_install_root.exists() { + anki_io::remove_dir_all(&state.uv_install_root)?; + println!("Program files removed."); + } + + println!(); + println!("Remove all profiles/cards? (y/n)"); + print!("> "); + let _ = stdout().flush(); + + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + let input = input.trim().to_lowercase(); + + if input == "y" && state.anki_base_folder.exists() { + anki_io::remove_dir_all(&state.anki_base_folder)?; + println!("User data removed."); + } + + println!(); + + // Platform-specific messages + #[cfg(target_os = "macos")] + platform::mac::finalize_uninstall(); + + #[cfg(target_os = "windows")] + platform::windows::finalize_uninstall(); + + #[cfg(all(unix, not(target_os = "macos")))] + platform::unix::finalize_uninstall(); + + Ok(true) +} + +fn build_python_command(state: &State, args: &[String]) -> Result { + let python_exe = if cfg!(target_os = "windows") { + let show_console = std::env::var("ANKI_CONSOLE").is_ok(); + if show_console { + state.uv_install_root.join(".venv/Scripts/python.exe") + } else { + state.uv_install_root.join(".venv/Scripts/pythonw.exe") + } + } else { + state.uv_install_root.join(".venv/bin/python") + }; + + let mut cmd = Command::new(&python_exe); + cmd.args(["-c", "import aqt, sys; sys.argv[0] = 'Anki'; aqt.run()"]); + cmd.args(args); + // tell the Python code it was invoked by the launcher, and updating is + // available + cmd.env("ANKI_LAUNCHER", std::env::current_exe()?.utf8()?.as_str()); + + // Set UV and Python paths for the Python code + let (exe_dir, _) = get_exe_and_resources_dirs()?; + let uv_path = exe_dir.join(get_uv_binary_name()); + cmd.env("ANKI_LAUNCHER_UV", uv_path.utf8()?.as_str()); + cmd.env("UV_PROJECT", state.uv_install_root.utf8()?.as_str()); + + // Set UV_PRERELEASE=allow if beta mode is enabled + if state.prerelease_marker.exists() { + cmd.env("UV_PRERELEASE", "allow"); + } + + Ok(cmd) +} diff --git a/qt/launcher/src/platform/mac.rs b/qt/launcher/src/platform/mac.rs new file mode 100644 index 000000000..f97d7fd07 --- /dev/null +++ b/qt/launcher/src/platform/mac.rs @@ -0,0 +1,98 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use std::io; +use std::io::Write; +use std::path::Path; +use std::process::Command; +use std::sync::atomic::AtomicBool; +use std::sync::atomic::Ordering; +use std::sync::Arc; +use std::thread; +use std::time::Duration; + +use anki_process::CommandExt as AnkiCommandExt; +use anyhow::Context; +use anyhow::Result; + +pub fn prepare_for_launch_after_update(mut cmd: Command, root: &Path) -> Result<()> { + // Pre-validate by running --version to trigger any Gatekeeper checks + print!("\n\x1B[1mThis may take a few minutes. Please wait\x1B[0m"); + io::stdout().flush().unwrap(); + + // Start progress indicator + let running = Arc::new(AtomicBool::new(true)); + let running_clone = running.clone(); + let progress_thread = thread::spawn(move || { + while running_clone.load(Ordering::Relaxed) { + print!("."); + io::stdout().flush().unwrap(); + thread::sleep(Duration::from_secs(1)); + } + }); + + let _ = cmd + .env("ANKI_FIRST_RUN", "1") + .arg("--version") + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .ensure_success(); + + if cfg!(target_os = "macos") { + // older Anki versions had a short mpv timeout and didn't support + // ANKI_FIRST_RUN, so we need to ensure mpv passes Gatekeeper + // validation prior to launch + let mpv_path = root.join(".venv/lib/python3.9/site-packages/anki_audio/mpv"); + if mpv_path.exists() { + let _ = Command::new(&mpv_path) + .arg("--version") + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .ensure_success(); + } + } + + // Stop progress indicator + running.store(false, Ordering::Relaxed); + progress_thread.join().unwrap(); + println!(); // New line after dots + Ok(()) +} + +pub fn relaunch_in_terminal() -> Result<()> { + let current_exe = std::env::current_exe().context("Failed to get current executable path")?; + Command::new("open") + .args(["-a", "Terminal"]) + .arg(current_exe) + .ensure_spawn()?; + std::process::exit(0); +} + +pub fn finalize_uninstall() { + if let Ok(exe_path) = std::env::current_exe() { + // Find the .app bundle by walking up the directory tree + let mut app_bundle_path = exe_path.as_path(); + while let Some(parent) = app_bundle_path.parent() { + if let Some(name) = parent.file_name() { + if name.to_string_lossy().ends_with(".app") { + let result = Command::new("trash").arg(parent).output(); + + match result { + Ok(output) if output.status.success() => { + println!("Anki has been uninstalled."); + return; + } + _ => { + // Fall back to manual instructions + println!( + "Please manually drag Anki.app to the trash to complete uninstall." + ); + } + } + return; + } + } + app_bundle_path = parent; + } + } +} diff --git a/qt/launcher/src/platform/mod.rs b/qt/launcher/src/platform/mod.rs new file mode 100644 index 000000000..50a303656 --- /dev/null +++ b/qt/launcher/src/platform/mod.rs @@ -0,0 +1,137 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +#[cfg(all(unix, not(target_os = "macos")))] +pub mod unix; + +#[cfg(target_os = "macos")] +pub mod mac; + +#[cfg(target_os = "windows")] +pub mod windows; + +use std::path::PathBuf; + +use anki_process::CommandExt; +use anyhow::Context; +use anyhow::Result; + +pub fn get_exe_and_resources_dirs() -> Result<(PathBuf, PathBuf)> { + let exe_dir = std::env::current_exe() + .context("Failed to get current executable path")? + .parent() + .context("Failed to get executable directory")? + .to_owned(); + + let resources_dir = if cfg!(target_os = "macos") { + // On macOS, resources are in ../Resources relative to the executable + exe_dir + .parent() + .context("Failed to get parent directory")? + .join("Resources") + } else { + // On other platforms, resources are in the same directory as executable + exe_dir.clone() + }; + + Ok((exe_dir, resources_dir)) +} + +pub fn get_uv_binary_name() -> &'static str { + if cfg!(target_os = "windows") { + "uv.exe" + } else if cfg!(target_os = "macos") { + "uv" + } else if cfg!(target_arch = "x86_64") { + "uv.amd64" + } else { + "uv.arm64" + } +} + +pub fn respawn_launcher() -> Result<()> { + use std::process::Stdio; + + let mut launcher_cmd = if cfg!(target_os = "macos") { + // On macOS, we need to launch the .app bundle, not the executable directly + let current_exe = + std::env::current_exe().context("Failed to get current executable path")?; + + // Navigate from Contents/MacOS/launcher to the .app bundle + let app_bundle = current_exe + .parent() // MacOS + .and_then(|p| p.parent()) // Contents + .and_then(|p| p.parent()) // .app + .context("Failed to find .app bundle")?; + + let mut cmd = std::process::Command::new("open"); + cmd.arg(app_bundle); + cmd + } else { + let current_exe = + std::env::current_exe().context("Failed to get current executable path")?; + std::process::Command::new(current_exe) + }; + + launcher_cmd + .stdin(Stdio::null()) + .stdout(Stdio::null()) + .stderr(Stdio::null()); + + #[cfg(windows)] + { + use std::os::windows::process::CommandExt; + const CREATE_NEW_PROCESS_GROUP: u32 = 0x00000200; + const DETACHED_PROCESS: u32 = 0x00000008; + launcher_cmd.creation_flags(CREATE_NEW_PROCESS_GROUP | DETACHED_PROCESS); + } + + #[cfg(all(unix, not(target_os = "macos")))] + { + use std::os::unix::process::CommandExt; + launcher_cmd.process_group(0); + } + + let child = launcher_cmd.ensure_spawn()?; + std::mem::forget(child); + + Ok(()) +} + +pub fn launch_anki_normally(mut cmd: std::process::Command) -> Result<()> { + #[cfg(windows)] + { + crate::platform::windows::prepare_to_launch_normally(); + cmd.ensure_success()?; + } + #[cfg(unix)] + cmd.ensure_exec()?; + Ok(()) +} + +#[cfg(windows)] +pub use windows::ensure_terminal_shown; + +#[cfg(unix)] +pub fn ensure_terminal_shown() -> Result<()> { + use std::io::IsTerminal; + + let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout()); + if !stdout_is_terminal { + #[cfg(target_os = "macos")] + mac::relaunch_in_terminal()?; + #[cfg(not(target_os = "macos"))] + unix::relaunch_in_terminal()?; + } + + // Set terminal title to "Anki Launcher" + print!("\x1b]2;Anki Launcher\x07"); + Ok(()) +} + +pub fn ensure_os_supported() -> Result<()> { + #[cfg(all(unix, not(target_os = "macos")))] + unix::ensure_glibc_supported()?; + + Ok(()) +} diff --git a/qt/launcher/src/platform/unix.rs b/qt/launcher/src/platform/unix.rs new file mode 100644 index 000000000..0df33838f --- /dev/null +++ b/qt/launcher/src/platform/unix.rs @@ -0,0 +1,98 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use std::process::Command; + +use anyhow::Context; +use anyhow::Result; + +pub fn relaunch_in_terminal() -> Result<()> { + let current_exe = std::env::current_exe().context("Failed to get current executable path")?; + + // Try terminals in order of preference + let terminals = [ + ("x-terminal-emulator", vec!["-e"]), + ("gnome-terminal", vec!["--"]), + ("konsole", vec!["-e"]), + ("xfce4-terminal", vec!["-e"]), + ("alacritty", vec!["-e"]), + ("kitty", vec![]), + ("foot", vec![]), + ("urxvt", vec!["-e"]), + ("xterm", vec!["-e"]), + ]; + + for (terminal_cmd, args) in &terminals { + // Check if terminal exists + if Command::new("which") + .arg(terminal_cmd) + .output() + .map(|o| o.status.success()) + .unwrap_or(false) + { + // Try to launch the terminal + let mut cmd = Command::new(terminal_cmd); + if args.is_empty() { + cmd.arg(¤t_exe); + } else { + cmd.args(args).arg(¤t_exe); + } + + if cmd.spawn().is_ok() { + std::process::exit(0); + } + } + } + + // If no terminal worked, continue without relaunching + Ok(()) +} + +pub fn finalize_uninstall() { + use std::io::stdin; + use std::io::stdout; + use std::io::Write; + + let uninstall_script = std::path::Path::new("/usr/local/share/anki/uninstall.sh"); + + if uninstall_script.exists() { + println!("To finish uninstalling, run 'sudo /usr/local/share/anki/uninstall.sh'"); + } else { + println!("Anki has been uninstalled."); + } + println!("Press enter to quit."); + let _ = stdout().flush(); + let mut input = String::new(); + let _ = stdin().read_line(&mut input); +} + +pub fn ensure_glibc_supported() -> Result<()> { + use std::ffi::CStr; + let get_glibc_version = || -> Option<(u32, u32)> { + let version_ptr = unsafe { libc::gnu_get_libc_version() }; + if version_ptr.is_null() { + return None; + } + + let version_cstr = unsafe { CStr::from_ptr(version_ptr) }; + let version_str = version_cstr.to_str().ok()?; + + // Parse version string (format: "2.36" or "2.36.1") + let version_parts: Vec<&str> = version_str.split('.').collect(); + if version_parts.len() < 2 { + return None; + } + + let major: u32 = version_parts[0].parse().ok()?; + let minor: u32 = version_parts[1].parse().ok()?; + + Some((major, minor)) + }; + + let (major, minor) = get_glibc_version().unwrap_or_default(); + if major < 2 || (major == 2 && minor < 36) { + anyhow::bail!("Anki requires a modern Linux distro with glibc 2.36 or later."); + } + + Ok(()) +} diff --git a/qt/launcher/src/platform/windows.rs b/qt/launcher/src/platform/windows.rs new file mode 100644 index 000000000..3c060a9de --- /dev/null +++ b/qt/launcher/src/platform/windows.rs @@ -0,0 +1,217 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use std::io::stdin; +use std::process::Command; + +use anyhow::Context; +use anyhow::Result; +use widestring::u16cstr; +use windows::core::PCWSTR; +use windows::Win32::System::Console::AttachConsole; +use windows::Win32::System::Console::GetConsoleWindow; +use windows::Win32::System::Console::ATTACH_PARENT_PROCESS; +use windows::Win32::System::Registry::RegCloseKey; +use windows::Win32::System::Registry::RegOpenKeyExW; +use windows::Win32::System::Registry::RegQueryValueExW; +use windows::Win32::System::Registry::HKEY; +use windows::Win32::System::Registry::HKEY_CURRENT_USER; +use windows::Win32::System::Registry::KEY_READ; +use windows::Win32::System::Registry::REG_SZ; +use windows::Win32::UI::Shell::SetCurrentProcessExplicitAppUserModelID; + +pub fn ensure_terminal_shown() -> Result<()> { + unsafe { + if !GetConsoleWindow().is_invalid() { + // We already have a console, no need to spawn anki-console.exe + return Ok(()); + } + } + + if std::env::var("ANKI_IMPLICIT_CONSOLE").is_ok() && attach_to_parent_console() { + // Successfully attached to parent console + reconnect_stdio_to_console(); + return Ok(()); + } + + // No console available, spawn anki-console.exe and exit + let current_exe = std::env::current_exe().context("Failed to get current executable path")?; + let exe_dir = current_exe + .parent() + .context("Failed to get executable directory")?; + + let console_exe = exe_dir.join("anki-console.exe"); + + if !console_exe.exists() { + anyhow::bail!("anki-console.exe not found in the same directory"); + } + + // Spawn anki-console.exe without waiting + Command::new(&console_exe) + .env("ANKI_IMPLICIT_CONSOLE", "1") + .spawn() + .context("Failed to spawn anki-console.exe")?; + + // Exit immediately after spawning + std::process::exit(0); +} + +pub fn attach_to_parent_console() -> bool { + unsafe { + if !GetConsoleWindow().is_invalid() { + // we have a console already + return false; + } + + if AttachConsole(ATTACH_PARENT_PROCESS).is_ok() { + // successfully attached to parent + reconnect_stdio_to_console(); + true + } else { + false + } + } +} + +/// Reconnect stdin/stdout/stderr to the console. +fn reconnect_stdio_to_console() { + use std::ffi::CString; + + use libc_stdhandle::*; + + // we launched without a console, so we'll need to open stdin/out/err + let conin = CString::new("CONIN$").unwrap(); + let conout = CString::new("CONOUT$").unwrap(); + let r = CString::new("r").unwrap(); + let w = CString::new("w").unwrap(); + + // Python uses the CRT for I/O, and it requires the descriptors are reopened. + unsafe { + libc::freopen(conin.as_ptr(), r.as_ptr(), stdin()); + libc::freopen(conout.as_ptr(), w.as_ptr(), stdout()); + libc::freopen(conout.as_ptr(), w.as_ptr(), stderr()); + } +} + +pub fn finalize_uninstall() { + let uninstaller_path = get_uninstaller_path(); + + match uninstaller_path { + Some(path) => { + println!("Launching Windows uninstaller..."); + let result = Command::new(&path).env("ANKI_LAUNCHER", "1").spawn(); + + match result { + Ok(_) => { + println!("Uninstaller launched successfully."); + return; + } + Err(e) => { + println!("Failed to launch uninstaller: {e}"); + println!("You can manually run: {}", path.display()); + } + } + } + None => { + println!("Windows uninstaller not found."); + println!("You may need to uninstall via Windows Settings > Apps."); + } + } + println!("Press enter to close..."); + let mut input = String::new(); + let _ = stdin().read_line(&mut input); +} + +fn get_uninstaller_path() -> Option { + // Try to read install directory from registry + if let Some(install_dir) = read_registry_install_dir() { + let uninstaller = install_dir.join("uninstall.exe"); + if uninstaller.exists() { + return Some(uninstaller); + } + } + + // Fall back to default location + let default_dir = dirs::data_local_dir()?.join("Programs").join("Anki"); + let uninstaller = default_dir.join("uninstall.exe"); + if uninstaller.exists() { + return Some(uninstaller); + } + + None +} + +fn read_registry_install_dir() -> Option { + unsafe { + let mut hkey = HKEY::default(); + + // Convert the registry path to wide string + let subkey = u16cstr!("SOFTWARE\\Anki"); + + // Open the registry key + let result = RegOpenKeyExW( + HKEY_CURRENT_USER, + PCWSTR(subkey.as_ptr()), + Some(0), + KEY_READ, + &mut hkey, + ); + + if result.is_err() { + return None; + } + + // Query the Install_Dir64 value + let value_name = u16cstr!("Install_Dir64"); + + let mut value_type = REG_SZ; + let mut data_size = 0u32; + + // First call to get the size + let result = RegQueryValueExW( + hkey, + PCWSTR(value_name.as_ptr()), + None, + Some(&mut value_type), + None, + Some(&mut data_size), + ); + + if result.is_err() || data_size == 0 { + let _ = RegCloseKey(hkey); + return None; + } + + // Allocate buffer and read the value + let mut buffer: Vec = vec![0; (data_size / 2) as usize]; + let result = RegQueryValueExW( + hkey, + PCWSTR(value_name.as_ptr()), + None, + Some(&mut value_type), + Some(buffer.as_mut_ptr() as *mut u8), + Some(&mut data_size), + ); + + let _ = RegCloseKey(hkey); + + if result.is_ok() { + // Convert wide string back to PathBuf + let len = buffer.iter().position(|&x| x == 0).unwrap_or(buffer.len()); + let path_str = String::from_utf16_lossy(&buffer[..len]); + Some(std::path::PathBuf::from(path_str)) + } else { + None + } + } +} + +pub fn prepare_to_launch_normally() { + // Set the App User Model ID for Windows taskbar grouping + unsafe { + let _ = + SetCurrentProcessExplicitAppUserModelID(PCWSTR(u16cstr!("Ankitects.Anki").as_ptr())); + } + + attach_to_parent_console(); +} diff --git a/qt/bundle/win/anki-icon.ico b/qt/launcher/win/anki-icon.ico similarity index 100% rename from qt/bundle/win/anki-icon.ico rename to qt/launcher/win/anki-icon.ico diff --git a/qt/bundle/win/anki-manifest.rc b/qt/launcher/win/anki-manifest.rc similarity index 100% rename from qt/bundle/win/anki-manifest.rc rename to qt/launcher/win/anki-manifest.rc diff --git a/qt/launcher/win/anki.exe.manifest b/qt/launcher/win/anki.exe.manifest new file mode 100644 index 000000000..93e98c6ec --- /dev/null +++ b/qt/launcher/win/anki.exe.manifest @@ -0,0 +1,25 @@ + + + + + + + + + + + + + true + PerMonitorV2 + true + UTF-8 + + + + + + + + + diff --git a/qt/bundle/win/anki.template.nsi b/qt/launcher/win/anki.template.nsi similarity index 85% rename from qt/bundle/win/anki.template.nsi rename to qt/launcher/win/anki.template.nsi index 513c81bfb..84dedf9c8 100644 --- a/qt/bundle/win/anki.template.nsi +++ b/qt/launcher/win/anki.template.nsi @@ -23,8 +23,8 @@ Name "Anki" Unicode true -; The file to write (make relative to repo root instead of out/bundle) -OutFile "..\..\@@INSTALLER@@" +; The file to write (relative to nsis directory) +OutFile "..\launcher_exe\anki-install.exe" ; Non elevated RequestExecutionLevel user @@ -62,7 +62,7 @@ Function .onInit FunctionEnd !ifdef WRITE_UNINSTALLER -!uninstfinalize 'copy "%1" "std\uninstall.exe"' +!uninstfinalize 'copy "%1" "uninstall.exe"' !endif ;-------------------------------- @@ -191,7 +191,7 @@ Section "" ; Add files to installer !ifndef WRITE_UNINSTALLER - File /r ..\..\@@SRC@@\*.* + File /r ..\launcher\*.* !endif !insertmacro APP_ASSOCIATE_HKCU "apkg" "anki.apkg" \ @@ -213,8 +213,8 @@ Section "" WriteRegStr HKCU Software\Anki "Install_Dir64" "$INSTDIR" ; Write the uninstall keys for Windows - WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayName" "Anki" - WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayVersion" "@@VERSION@@" + WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayName" "Anki Launcher" + WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayVersion" "1.0.0" WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "UninstallString" '"$INSTDIR\uninstall.exe"' WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "QuietUninstallString" '"$INSTDIR\uninstall.exe" /S' WriteRegDWORD HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "NoModify" 1 @@ -224,15 +224,44 @@ Section "" WriteUninstaller "uninstall.exe" !endif + ; Ensure uv gets re-run + Push "$INSTDIR\pyproject.toml" + Call TouchFile + + ; Launch Anki after installation + Exec "$INSTDIR\anki.exe" + Quit + SectionEnd ; end the section ;-------------------------------- +; Touch file function to update mtime using copy trick +Function TouchFile + Exch $R0 ; file path + + nsExec::Exec 'cmd /c "copy /B "$R0" +,,"' + + Pop $R0 +FunctionEnd + +;-------------------------------- + ; Uninstaller function un.onInit - MessageBox MB_OKCANCEL "This will remove Anki's program files, but will not delete your card data. If you wish to delete your card data as well, you can do so via File>Switch Profile inside Anki first. Are you sure you wish to uninstall Anki?" /SD IDOK IDOK next - Quit + ; Check for ANKI_LAUNCHER environment variable + ReadEnvStr $R0 "ANKI_LAUNCHER" + ${If} $R0 != "" + ; Wait for launcher to exit + Sleep 2000 + Goto next + ${Else} + ; Try to launch anki.exe with ANKI_LAUNCHER_UNINSTALL=1 + IfFileExists "$INSTDIR\anki.exe" 0 next + nsExec::Exec 'cmd /c "set ANKI_LAUNCHER_UNINSTALL=1 && start /b "" "$INSTDIR\anki.exe""' + Quit + ${EndIf} next: functionEnd @@ -252,9 +281,15 @@ Section "Uninstall" !insertmacro APP_UNASSOCIATE_HKCU "ankiaddon" "anki.ankiaddon" !insertmacro UPDATEFILEASSOC + ; Schedule uninstaller for deletion on reboot + Delete /REBOOTOK "$INSTDIR\uninstall.exe" + ; try to remove top level folder if empty RMDir "$INSTDIR" + ; Remove AnkiProgramData folder created during runtime + RMDir /r "$LOCALAPPDATA\AnkiProgramFiles" + ; Remove registry keys DeleteRegKey HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" DeleteRegKey HKCU Software\Anki diff --git a/qt/launcher/win/build.bat b/qt/launcher/win/build.bat new file mode 100644 index 000000000..da574f210 --- /dev/null +++ b/qt/launcher/win/build.bat @@ -0,0 +1,10 @@ +@echo off + +if "%NOCOMP%"=="1" ( + set NO_COMPRESS=1 + set CODESIGN=0 +) else ( + set CODESIGN=1 + set NO_COMPRESS=0 +) +cargo run --bin build_win diff --git a/qt/bundle/win/fileassoc.nsh b/qt/launcher/win/fileassoc.nsh similarity index 100% rename from qt/bundle/win/fileassoc.nsh rename to qt/launcher/win/fileassoc.nsh diff --git a/qt/mac/anki_mac_helper/__init__.py b/qt/mac/anki_mac_helper/__init__.py new file mode 100644 index 000000000..a0adb469e --- /dev/null +++ b/qt/mac/anki_mac_helper/__init__.py @@ -0,0 +1,51 @@ +# Copyright: Ankitects Pty Ltd and contributors +# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +from __future__ import annotations + +import sys +from collections.abc import Callable +from ctypes import CDLL, CFUNCTYPE, c_bool, c_char_p +from pathlib import Path + + +class _MacOSHelper: + def __init__(self) -> None: + # Look for the dylib in the same directory as this module + module_dir = Path(__file__).parent + path = module_dir / "libankihelper.dylib" + + self._dll = CDLL(str(path)) + self._dll.system_is_dark.restype = c_bool + + def system_is_dark(self) -> bool: + return self._dll.system_is_dark() + + def set_darkmode_enabled(self, enabled: bool) -> bool: + return self._dll.set_darkmode_enabled(enabled) + + def start_wav_record(self, path: str, on_error: Callable[[str], None]) -> None: + global _on_audio_error + _on_audio_error = on_error + self._dll.start_wav_record(path.encode("utf8"), _audio_error_callback) + + def end_wav_record(self) -> None: + "On completion, file should be saved if no error has arrived." + self._dll.end_wav_record() + + +# this must not be overwritten or deallocated +@CFUNCTYPE(None, c_char_p) # type: ignore +def _audio_error_callback(msg: str) -> None: + if handler := _on_audio_error: + handler(msg) + + +_on_audio_error: Callable[[str], None] | None = None + +macos_helper: _MacOSHelper | None = None +if sys.platform == "darwin": + try: + macos_helper = _MacOSHelper() + except Exception as e: + print("macos_helper:", e) diff --git a/qt/mac/anki_mac_helper/py.typed b/qt/mac/anki_mac_helper/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/qt/mac/ankihelper.xcodeproj/project.pbxproj b/qt/mac/ankihelper.xcodeproj/project.pbxproj index 8232fba8e..016b03192 100644 --- a/qt/mac/ankihelper.xcodeproj/project.pbxproj +++ b/qt/mac/ankihelper.xcodeproj/project.pbxproj @@ -172,7 +172,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - MACOSX_DEPLOYMENT_TARGET = 11.6; + MACOSX_DEPLOYMENT_TARGET = 11; MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; MTL_FAST_MATH = YES; ONLY_ACTIVE_ARCH = YES; @@ -225,7 +225,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - MACOSX_DEPLOYMENT_TARGET = 11.6; + MACOSX_DEPLOYMENT_TARGET = 11; MTL_ENABLE_DEBUG_INFO = NO; MTL_FAST_MATH = YES; SDKROOT = macosx; diff --git a/qt/mac/build.sh b/qt/mac/build.sh new file mode 100755 index 000000000..4c14a13f4 --- /dev/null +++ b/qt/mac/build.sh @@ -0,0 +1,20 @@ +#!/bin/bash +# Copyright: Ankitects Pty Ltd and contributors +# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +set -e + +# Get the project root directory +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJ_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" + +# Build the dylib first +echo "Building macOS helper dylib..." +"$PROJ_ROOT/out/pyenv/bin/python" "$SCRIPT_DIR/helper_build.py" + +# Create the wheel using uv +echo "Creating wheel..." +cd "$SCRIPT_DIR" +"$PROJ_ROOT/out/extracted/uv/uv" build --wheel + +echo "Build complete!" diff --git a/qt/mac/helper_build.py b/qt/mac/helper_build.py index 4edbd05c1..aaf997669 100644 --- a/qt/mac/helper_build.py +++ b/qt/mac/helper_build.py @@ -7,23 +7,52 @@ import subprocess import sys from pathlib import Path -out_dylib, *src_files = sys.argv[1:] -out_dir = Path(out_dylib).parent.resolve() +# If no arguments provided, build for the anki_mac_helper package +if len(sys.argv) == 1: + script_dir = Path(__file__).parent + out_dylib = script_dir / "anki_mac_helper" / "libankihelper.dylib" + src_files = list(script_dir.glob("*.swift")) +else: + out_dylib, *src_files = sys.argv[1:] + +out_dylib = Path(out_dylib) +out_dir = out_dylib.parent.resolve() src_dir = Path(src_files[0]).parent.resolve() -if platform.machine() == "arm64" and not os.environ.get("MAC_X86"): - target = "arm64-apple-macos11" -else: - target = "x86_64-apple-macos10.14" +# Build for both architectures +architectures = ["arm64", "x86_64"] +temp_files = [] -args = [ - "swiftc", - "-target", - target, - "-emit-library", - "-module-name", - "ankihelper", - "-O", +for arch in architectures: + target = f"{arch}-apple-macos11" + temp_out = out_dir / f"temp_{arch}.dylib" + temp_files.append(temp_out) + + args = [ + "swiftc", + "-target", + target, + "-emit-library", + "-module-name", + "ankihelper", + "-O", + ] + if isinstance(src_files[0], Path): + args.extend(src_files) + else: + args.extend(src_dir / Path(file).name for file in src_files) + args.extend(["-o", str(temp_out)]) + subprocess.run(args, check=True, cwd=out_dir) + +# Ensure output directory exists +out_dir.mkdir(parents=True, exist_ok=True) + +# Create universal binary +lipo_args = ["lipo", "-create", "-output", str(out_dylib)] + [ + str(f) for f in temp_files ] -args.extend(src_dir / Path(file).name for file in src_files) -subprocess.run(args, check=True, cwd=out_dir) +subprocess.run(lipo_args, check=True) + +# Clean up temporary files +for temp_file in temp_files: + temp_file.unlink() diff --git a/qt/mac/pyproject.toml b/qt/mac/pyproject.toml new file mode 100644 index 000000000..93f4e939b --- /dev/null +++ b/qt/mac/pyproject.toml @@ -0,0 +1,17 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "anki-mac-helper" +version = "0.1.0" +description = "Small support library for Anki on Macs" +requires-python = ">=3.9" +license = { text = "AGPL-3.0-or-later" } +authors = [ + { name = "Anki Team" }, +] +urls = { Homepage = "https://github.com/ankitects/anki" } + +[tool.hatch.build.targets.wheel] +packages = ["anki_mac_helper"] diff --git a/qt/pyproject.toml b/qt/pyproject.toml new file mode 100644 index 000000000..5a237edb0 --- /dev/null +++ b/qt/pyproject.toml @@ -0,0 +1,87 @@ +[project] +name = "aqt" +dynamic = ["version"] +requires-python = ">=3.9" +license = "AGPL-3.0-or-later" +dependencies = [ + "beautifulsoup4", + "flask", + "flask_cors", + "jsonschema", + "requests", + "send2trash", + "waitress>=2.0.0", + "pywin32; sys.platform == 'win32'", + "anki-mac-helper; sys.platform == 'darwin'", + "pip-system-certs!=5.1", + "pyqt6>=6.2", + "pyqt6-webengine>=6.2", + # anki dependency is added dynamically in hatch_build.py with exact version +] + +[project.optional-dependencies] +audio = [ + "anki-audio==0.1.0; sys.platform == 'win32' or sys.platform == 'darwin'", +] +qt66 = [ + "pyqt6==6.6.1", + "pyqt6-qt6==6.6.2", + "pyqt6-webengine==6.6.0", + "pyqt6-webengine-qt6==6.6.2", + "pyqt6_sip==13.6.0", +] +qt67 = [ + "pyqt6==6.7.1", + "pyqt6-qt6==6.7.3", + "pyqt6-webengine==6.7.0", + "pyqt6-webengine-qt6==6.7.3", + "pyqt6_sip==13.10.2", +] +qt69 = [ + "pyqt6==6.9.1", + "pyqt6-qt6==6.9.1", + "pyqt6-webengine==6.9.0", + "pyqt6-webengine-qt6==6.9.1", + "pyqt6_sip==13.10.2", +] +qt = [ + "pyqt6==6.8.0", + "pyqt6-qt6==6.8.1", + "pyqt6-webengine==6.8.0", + "pyqt6-webengine-qt6==6.8.1", + "pyqt6_sip==13.10.2", +] + +[tool.uv] +conflicts = [ + [ + { extra = "qt" }, + { extra = "qt66" }, + { extra = "qt67" }, + { extra = "qt69" }, + ], +] + +[tool.uv.sources] +anki = { workspace = true } + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project.scripts] +anki = "aqt:run" + +[project.gui-scripts] +ankiw = "aqt:run" + +[tool.hatch.build.targets.wheel] +packages = ["aqt"] +exclude = ["**/*.scss", "**/*.ui"] + +[tool.hatch.version] +source = "code" +path = "../python/version.py" + +[tool.hatch.build.hooks.custom] +path = "hatch_build.py" diff --git a/qt/release/.gitignore b/qt/release/.gitignore new file mode 100644 index 000000000..b09017c0e --- /dev/null +++ b/qt/release/.gitignore @@ -0,0 +1,2 @@ +pyproject.toml +pyproject.toml.old \ No newline at end of file diff --git a/qt/release/build.sh b/qt/release/build.sh new file mode 100755 index 000000000..423638bc4 --- /dev/null +++ b/qt/release/build.sh @@ -0,0 +1,71 @@ +#!/bin/bash + +set -e + +test -f build.sh || { + echo "run from release folder" + exit 1 +} + +# Get the project root (two levels up from qt/release) +PROJ_ROOT="$(cd "$(dirname "$0")/../.." && pwd)" + +# Use extracted uv binary +UV="$PROJ_ROOT/out/extracted/uv/uv" + +# Read version from .version file +VERSION=$(cat "$PROJ_ROOT/.version" | tr -d '[:space:]') + +# Copy existing pyproject.toml to .old if it exists +if [ -f pyproject.toml ]; then + cp pyproject.toml pyproject.toml.old +fi + +# Export dependencies using uv +echo "Exporting dependencies..." +rm -f pyproject.toml +DEPS=$(cd "$PROJ_ROOT" && "$UV" export --no-hashes --no-annotate --no-header --extra audio --extra qt --all-packages --no-dev --no-emit-workspace) + +# Generate the pyproject.toml file +cat > pyproject.toml << EOF +[project] +name = "anki-release" +version = "$VERSION" +description = "A package to lock Anki's dependencies" +requires-python = ">=3.9" +dependencies = [ + "anki==$VERSION", + "aqt==$VERSION", +EOF + +# Add the exported dependencies to the file +echo "$DEPS" | while IFS= read -r line; do + if [[ -n "$line" ]]; then + echo " \"$line\"," >> pyproject.toml + fi +done + +# Complete the pyproject.toml file +cat >> pyproject.toml << 'EOF' +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +# hatch throws an error if nothing is included +[tool.hatch.build.targets.wheel] +include = ["no-such-file"] +EOF + +echo "Generated pyproject.toml with version $VERSION" + +# Show diff if .old file exists +if [ -f pyproject.toml.old ]; then + echo + echo "Differences from previous release version:" + diff -u --color=always pyproject.toml.old pyproject.toml || true +fi + +echo "Building wheel..." +"$UV" build --wheel --out-dir "$PROJ_ROOT/out/wheels" diff --git a/qt/tools/build_ui.py b/qt/tools/build_ui.py index 776375598..b87031213 100644 --- a/qt/tools/build_ui.py +++ b/qt/tools/build_ui.py @@ -6,16 +6,10 @@ from __future__ import annotations import io import re import sys +from dataclasses import dataclass from pathlib import Path -try: - from PyQt6.uic import compileUi -except ImportError: - # ARM64 Linux builds may not have access to PyQt6, and may have aliased - # it to PyQt5. We allow fallback, but the _qt6.py files will not be valid. - from PyQt5.uic import compileUi # type: ignore - -from dataclasses import dataclass +from PyQt6.uic import compileUi def compile(ui_file: str | Path) -> str: @@ -53,21 +47,9 @@ def with_fixes_for_qt6(code: str) -> str: return "\n".join(outlines) -def with_fixes_for_qt5(code: str) -> str: - code = code.replace( - "from PyQt5 import QtCore, QtGui, QtWidgets", - "from PyQt5 import QtCore, QtGui, QtWidgets\nfrom aqt.utils import tr\n", - ) - code = code.replace("Qt6", "Qt5") - code = code.replace("QtGui.QAction", "QtWidgets.QAction") - code = code.replace("import icons_rc", "") - return code - - @dataclass class UiFileAndOutputs: ui_file: Path - qt5_file: str qt6_file: str @@ -82,7 +64,6 @@ def get_files() -> list[UiFileAndOutputs]: out.append( UiFileAndOutputs( ui_file=path, - qt5_file=outpath.replace(".ui", "_qt5.py"), qt6_file=outpath.replace(".ui", "_qt6.py"), ) ) @@ -93,8 +74,5 @@ if __name__ == "__main__": for entry in get_files(): stock = compile(entry.ui_file) for_qt6 = with_fixes_for_qt6(stock) - for_qt5 = with_fixes_for_qt5(for_qt6) - with open(entry.qt5_file, "w") as file: - file.write(for_qt5) with open(entry.qt6_file, "w") as file: file.write(for_qt6) diff --git a/rslib/Cargo.toml b/rslib/Cargo.toml index 1c4abf46f..a1d24cc87 100644 --- a/rslib/Cargo.toml +++ b/rslib/Cargo.toml @@ -48,6 +48,7 @@ async-trait.workspace = true axum.workspace = true axum-client-ip.workspace = true axum-extra.workspace = true +bitflags.workspace = true blake3.workspace = true bytes.workspace = true chrono.workspace = true @@ -106,6 +107,5 @@ unicode-normalization.workspace = true zip.workspace = true zstd.workspace = true -[target.'cfg(windows)'.dependencies.windows] -version = "0.56.0" -features = ["Media_SpeechSynthesis", "Foundation_Collections", "Storage_Streams"] +[target.'cfg(windows)'.dependencies] +windows.workspace = true diff --git a/rslib/i18n/check.rs b/rslib/i18n/check.rs index f168406d1..48521bbdd 100644 --- a/rslib/i18n/check.rs +++ b/rslib/i18n/check.rs @@ -21,14 +21,11 @@ pub fn check(lang_map: &TranslationsByLang) { fn check_content(lang: &str, fname: &str, content: &str) { let lang_id: LanguageIdentifier = "en-US".parse().unwrap(); let resource = FluentResource::try_new(content.into()).unwrap_or_else(|e| { - panic!("{}\nUnable to parse {}/{}: {:?}", content, lang, fname, e); + panic!("{content}\nUnable to parse {lang}/{fname}: {e:?}"); }); let mut bundle: FluentBundle = FluentBundle::new(vec![lang_id]); bundle.add_resource(resource).unwrap_or_else(|e| { - panic!( - "{}\nUnable to bundle - duplicate key? {}/{}: {:?}", - content, lang, fname, e - ); + panic!("{content}\nUnable to bundle - duplicate key? {lang}/{fname}: {e:?}"); }); } diff --git a/rslib/i18n/gather.rs b/rslib/i18n/gather.rs index a25e5813e..835e108af 100644 --- a/rslib/i18n/gather.rs +++ b/rslib/i18n/gather.rs @@ -48,8 +48,7 @@ fn add_folder(map: &mut TranslationsByLang, folder: &Path, lang: &str) { let text = fs::read_to_string(entry.path()).unwrap(); assert!( text.ends_with('\n'), - "file was missing final newline: {:?}", - entry + "file was missing final newline: {entry:?}" ); map_entry.entry(module).or_default().push_str(&text); println!("cargo:rerun-if-changed={}", entry.path().to_str().unwrap()); diff --git a/rslib/i18n/src/lib.rs b/rslib/i18n/src/lib.rs index 1d79198bf..bfd6f5ba2 100644 --- a/rslib/i18n/src/lib.rs +++ b/rslib/i18n/src/lib.rs @@ -130,7 +130,7 @@ fn get_bundle( ) -> Option> { let res = FluentResource::try_new(text.into()) .map_err(|e| { - println!("Unable to parse translations file: {:?}", e); + println!("Unable to parse translations file: {e:?}"); }) .ok()?; @@ -138,14 +138,14 @@ fn get_bundle( bundle .add_resource(res) .map_err(|e| { - println!("Duplicate key detected in translation file: {:?}", e); + println!("Duplicate key detected in translation file: {e:?}"); }) .ok()?; if !extra_text.is_empty() { match FluentResource::try_new(extra_text) { Ok(res) => bundle.add_resource_overriding(res), - Err((_res, e)) => println!("Unable to parse translations file: {:?}", e), + Err((_res, e)) => println!("Unable to parse translations file: {e:?}"), } } @@ -291,7 +291,7 @@ impl I18n { let mut errs = vec![]; let out = bundle.format_pattern(pat, args.as_ref(), &mut errs); if !errs.is_empty() { - println!("Error(s) in translation '{}': {:?}", key, errs); + println!("Error(s) in translation '{key}': {errs:?}"); } // clone so we can discard args return out.to_string().into(); diff --git a/rslib/i18n/typescript.rs b/rslib/i18n/typescript.rs index 0c2230338..ce30048e2 100644 --- a/rslib/i18n/typescript.rs +++ b/rslib/i18n/typescript.rs @@ -81,7 +81,7 @@ fn get_args(variables: &[Variable]) -> String { .iter() .map(|v| format!("\"{}\": args.{}", v.name, typescript_arg_name(&v.name))) .join(", "); - format!("{{{}}}", out) + format!("{{{out}}}") } } diff --git a/rslib/i18n/write_strings.rs b/rslib/i18n/write_strings.rs index f9df5716f..36af62eeb 100644 --- a/rslib/i18n/write_strings.rs +++ b/rslib/i18n/write_strings.rs @@ -69,12 +69,6 @@ impl I18n { {var_build} self.translate("{key}"{out_args}) }}"#, - func = func, - key = key, - doc = doc, - in_args = in_args, - out_args = out_args, - var_build = var_build, ) .unwrap(); } @@ -103,9 +97,6 @@ fn build_vars(translation: &Translation) -> String { writeln!( buf, r#" args.set("{fluent_name}", {rust_name}{trailer});"#, - fluent_name = fluent_name, - rust_name = rust_name, - trailer = trailer, ) .unwrap(); } @@ -204,13 +195,7 @@ pub(crate) const {lang_name}: phf::Map<&str, &str> = phf::phf_map! {{", .unwrap(); for (module, contents) in modules { - writeln!( - buf, - r###" "{module}" => r##"{contents}"##,"###, - module = module, - contents = contents - ) - .unwrap(); + writeln!(buf, r###" "{module}" => r##"{contents}"##,"###).unwrap(); } buf.push_str("};\n"); diff --git a/rslib/io/src/lib.rs b/rslib/io/src/lib.rs index c1d4c0205..cb44467e6 100644 --- a/rslib/io/src/lib.rs +++ b/rslib/io/src/lib.rs @@ -152,6 +152,34 @@ pub fn copy_file(src: impl AsRef, dst: impl AsRef) -> Result { }) } +/// Copy a file from src to dst if dst doesn't exist or if src is newer than +/// dst. Preserves the modification time from the source file. +pub fn copy_if_newer(src: impl AsRef, dst: impl AsRef) -> Result { + let src = src.as_ref(); + let dst = dst.as_ref(); + + let should_copy = if !dst.exists() { + true + } else { + let src_time = modified_time(src)?; + let dst_time = modified_time(dst)?; + src_time > dst_time + }; + + if should_copy { + copy_file(src, dst)?; + + // Preserve the modification time from the source file + let src_mtime = modified_time(src)?; + let times = FileTimes::new().set_modified(src_mtime); + set_file_times(dst, times)?; + + Ok(true) + } else { + Ok(false) + } +} + /// Like [read_file], but skips the section that is potentially locked by /// SQLite. pub fn read_locked_db_file(path: impl AsRef) -> Result> { @@ -188,6 +216,14 @@ pub fn metadata(path: impl AsRef) -> Result { }) } +/// Get the modification time of a file. +pub fn modified_time(path: impl AsRef) -> Result { + metadata(&path)?.modified().context(FileIoSnafu { + path: path.as_ref(), + op: FileOp::Metadata, + }) +} + pub fn new_tempfile() -> Result { NamedTempFile::new().context(FileIoSnafu { path: std::env::temp_dir(), diff --git a/rslib/linkchecker/tests/links.rs b/rslib/linkchecker/tests/links.rs index 48656ace4..2f39fbe31 100644 --- a/rslib/linkchecker/tests/links.rs +++ b/rslib/linkchecker/tests/links.rs @@ -52,7 +52,7 @@ impl CheckableUrl { fn anchor(&self) -> Cow { match *self { Self::HelpPage(page) => help_page_link_suffix(page).into(), - Self::String(s) => s.split('#').last().unwrap_or_default().into(), + Self::String(s) => s.split('#').next_back().unwrap_or_default().into(), } } } diff --git a/rslib/process/src/lib.rs b/rslib/process/src/lib.rs index d026dec34..dcf0703f6 100644 --- a/rslib/process/src/lib.rs +++ b/rslib/process/src/lib.rs @@ -57,6 +57,9 @@ pub trait CommandExt { fn ensure_success(&mut self) -> Result<&mut Self>; fn utf8_output(&mut self) -> Result; + fn ensure_spawn(&mut self) -> Result; + #[cfg(unix)] + fn ensure_exec(&mut self) -> Result<()>; } impl CommandExt for Command { @@ -94,6 +97,23 @@ impl CommandExt for Command { })?, }) } + + fn ensure_spawn(&mut self) -> Result { + self.spawn().with_context(|_| DidNotExecuteSnafu { + cmdline: get_cmdline(self), + }) + } + + #[cfg(unix)] + fn ensure_exec(&mut self) -> Result<()> { + use std::os::unix::process::CommandExt as UnixCommandExt; + let cmdline = get_cmdline(self); + let error = self.exec(); + Err(Error::DidNotExecute { + cmdline, + source: error, + }) + } } fn get_cmdline(arg: &mut Command) -> String { diff --git a/rslib/proto/python.rs b/rslib/proto/python.rs index 0ca2c15ea..a5adb4179 100644 --- a/rslib/proto/python.rs +++ b/rslib/proto/python.rs @@ -183,9 +183,9 @@ fn python_type(field: &FieldDescriptor, output: bool) -> String { }; if field.is_list() { if output { - format!("Sequence[{}]", kind) + format!("Sequence[{kind}]") } else { - format!("Iterable[{}]", kind) + format!("Iterable[{kind}]") } } else if field.is_map() { let map_kind = field.kind(); @@ -213,7 +213,6 @@ fn write_header(out: &mut impl Write) -> Result<()> { out.write_all( br#"# Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; https://www.gnu.org/licenses/agpl.html -# pylint: skip-file from __future__ import annotations diff --git a/rslib/rust_interface.rs b/rslib/rust_interface.rs index a75100b5b..6861df7dc 100644 --- a/rslib/rust_interface.rs +++ b/rslib/rust_interface.rs @@ -263,7 +263,7 @@ impl MethodHelpers for Method { fn get_input_arg_with_label(&self) -> String { self.input_type() .as_ref() - .map(|t| format!("input: {}", t)) + .map(|t| format!("input: {t}")) .unwrap_or_default() } diff --git a/rslib/src/backend/config.rs b/rslib/src/backend/config.rs index 349f2d9af..b6e81ce2a 100644 --- a/rslib/src/backend/config.rs +++ b/rslib/src/backend/config.rs @@ -39,6 +39,7 @@ impl From for BoolKey { BoolKeyProto::RenderLatex => BoolKey::RenderLatex, BoolKeyProto::LoadBalancerEnabled => BoolKey::LoadBalancerEnabled, BoolKeyProto::FsrsShortTermWithStepsEnabled => BoolKey::FsrsShortTermWithStepsEnabled, + BoolKeyProto::FsrsLegacyEvaluate => BoolKey::FsrsLegacyEvaluate, } } } diff --git a/rslib/src/browser_table.rs b/rslib/src/browser_table.rs index 022708a80..85b0572d3 100644 --- a/rslib/src/browser_table.rs +++ b/rslib/src/browser_table.rs @@ -515,7 +515,7 @@ impl RowContext { return "".into(); }; if self.cards[0].is_undue_queue() { - format!("({})", due) + format!("({due})") } else { due.into() } @@ -623,7 +623,7 @@ impl RowContext { if self.notes_mode { let decks = self.cards.iter().map(|c| c.deck_id).unique().count(); if decks > 1 { - return format!("({})", decks); + return format!("({decks})"); } } let deck_name = self.deck.human_name(); diff --git a/rslib/src/card/mod.rs b/rslib/src/card/mod.rs index 8d7821e2c..598ac602b 100644 --- a/rslib/src/card/mod.rs +++ b/rslib/src/card/mod.rs @@ -185,10 +185,16 @@ impl Card { self.usn = usn; } + pub fn clear_fsrs_data(&mut self) { + self.memory_state = None; + self.desired_retention = None; + self.decay = None; + } + /// Caller must ensure provided deck exists and is not filtered. fn set_deck(&mut self, deck: DeckId) { self.remove_from_filtered_deck_restoring_queue(); - self.memory_state = None; + self.clear_fsrs_data(); self.deck_id = deck; } diff --git a/rslib/src/card_rendering/parser.rs b/rslib/src/card_rendering/parser.rs index 6f1cc662e..b124c069d 100644 --- a/rslib/src/card_rendering/parser.rs +++ b/rslib/src/card_rendering/parser.rs @@ -14,14 +14,14 @@ use nom::combinator::recognize; use nom::combinator::rest; use nom::combinator::success; use nom::combinator::value; -use nom::multi::fold_many0; use nom::multi::many0; use nom::sequence::delimited; use nom::sequence::pair; use nom::sequence::preceded; use nom::sequence::separated_pair; use nom::sequence::terminated; -use nom::sequence::tuple; +use nom::Input; +use nom::Parser; use super::CardNodes; use super::Directive; @@ -86,9 +86,12 @@ impl<'a> Directive<'a> { } /// Consume 0 or more of anything in " \t\r\n" after `parser`. -fn trailing_whitespace0<'parser, 's, P, O>(parser: P) -> impl FnMut(&'s str) -> IResult<'s, O> +fn trailing_whitespace0(parser: P) -> impl Parser where - P: FnMut(&'s str) -> IResult<'s, O> + 'parser, + I: Input, + ::Item: nom::AsChar, + E: nom::error::ParseError, + P: Parser, { terminated(parser, multispace0) } @@ -97,11 +100,11 @@ where fn is_not0<'parser, 'arr: 'parser, 's: 'parser>( arr: &'arr str, ) -> impl FnMut(&'s str) -> IResult<'s, &'s str> + 'parser { - alt((is_not(arr), success(""))) + move |s| alt((is_not(arr), success(""))).parse(s) } fn node(s: &str) -> IResult { - alt((sound_node, tag_node, text_node))(s) + alt((sound_node, tag_node, text_node)).parse(s) } /// A sound tag `[sound:resource]`, where `resource` is pointing to a sound or @@ -110,11 +113,11 @@ fn sound_node(s: &str) -> IResult { map( delimited(tag("[sound:"), is_not("]"), tag("]")), Node::SoundOrVideo, - )(s) + ) + .parse(s) } fn take_till_potential_tag_start(s: &str) -> IResult<&str> { - use nom::InputTake; // first char could be '[', but wasn't part of a node, so skip (eof ends parse) let (after, offset) = anychar(s).map(|(s, c)| (s, c.len_utf8()))?; Ok(match after.find('[') { @@ -127,7 +130,7 @@ fn take_till_potential_tag_start(s: &str) -> IResult<&str> { fn tag_node(s: &str) -> IResult { /// Match the start of an opening tag and return its name. fn name(s: &str) -> IResult<&str> { - preceded(tag("[anki:"), is_not("] \t\r\n"))(s) + preceded(tag("[anki:"), is_not("] \t\r\n")).parse(s) } /// Return a parser to match an opening `name` tag and return its options. @@ -138,31 +141,35 @@ fn tag_node(s: &str) -> IResult { /// empty. fn options(s: &str) -> IResult> { fn key(s: &str) -> IResult<&str> { - is_not("] \t\r\n=")(s) + is_not("] \t\r\n=").parse(s) } fn val(s: &str) -> IResult<&str> { alt(( delimited(tag("\""), is_not0("\""), tag("\"")), is_not0("] \t\r\n\""), - ))(s) + )) + .parse(s) } - many0(trailing_whitespace0(separated_pair(key, tag("="), val)))(s) + many0(trailing_whitespace0(separated_pair(key, tag("="), val))).parse(s) } - delimited( - pair(tag("[anki:"), trailing_whitespace0(tag(name))), - options, - tag("]"), - ) + move |s| { + delimited( + pair(tag("[anki:"), trailing_whitespace0(tag(name))), + options, + tag("]"), + ) + .parse(s) + } } /// Return a parser to match a closing `name` tag. fn closing_parser<'parser, 'name: 'parser, 's: 'parser>( name: &'name str, ) -> impl FnMut(&'s str) -> IResult<'s, ()> + 'parser { - value((), tuple((tag("[/anki:"), tag(name), tag("]")))) + move |s| value((), (tag("[/anki:"), tag(name), tag("]"))).parse(s) } /// Return a parser to match and return anything until a closing `name` tag @@ -170,12 +177,13 @@ fn tag_node(s: &str) -> IResult { fn content_parser<'parser, 'name: 'parser, 's: 'parser>( name: &'name str, ) -> impl FnMut(&'s str) -> IResult<'s, &'s str> + 'parser { - recognize(fold_many0( - pair(not(closing_parser(name)), take_till_potential_tag_start), - // we don't need to accumulate anything - || (), - |_, _| (), - )) + move |s| { + recognize(many0(pair( + not(closing_parser(name)), + take_till_potential_tag_start, + ))) + .parse(s) + } } let (_, tag_name) = name(s)?; @@ -185,11 +193,12 @@ fn tag_node(s: &str) -> IResult { closing_parser(tag_name), ), |(options, content)| Node::Directive(Directive::new(tag_name, options, content)), - )(s) + ) + .parse(s) } fn text_node(s: &str) -> IResult { - map(take_till_potential_tag_start, Node::Text)(s) + map(take_till_potential_tag_start, Node::Text).parse(s) } #[cfg(test)] diff --git a/rslib/src/card_rendering/tts/windows.rs b/rslib/src/card_rendering/tts/windows.rs index a53994c2e..5afb5cc2d 100644 --- a/rslib/src/card_rendering/tts/windows.rs +++ b/rslib/src/card_rendering/tts/windows.rs @@ -5,12 +5,13 @@ use std::fs::File; use std::io::Write; use anki_proto::card_rendering::all_tts_voices_response::TtsVoice; -use futures::executor::block_on; +use windows::core::Interface; use windows::core::HSTRING; use windows::Media::SpeechSynthesis::SpeechSynthesisStream; use windows::Media::SpeechSynthesis::SpeechSynthesizer; use windows::Media::SpeechSynthesis::VoiceInformation; use windows::Storage::Streams::DataReader; +use windows::Storage::Streams::IRandomAccessStream; use crate::error::windows::WindowsErrorDetails; use crate::error::windows::WindowsSnafu; @@ -45,7 +46,7 @@ fn find_voice(voice_id: &str) -> Result { fn to_hstring(text: &str) -> HSTRING { let utf16: Vec = text.encode_utf16().collect(); - HSTRING::from_wide(&utf16).expect("Strings are valid Unicode") + HSTRING::from_wide(&utf16) } fn synthesize_stream( @@ -64,16 +65,20 @@ fn synthesize_stream( details: WindowsErrorDetails::SettingRate(speed), })?; let async_op = synthesizer.SynthesizeTextToStreamAsync(&to_hstring(text))?; - let stream = block_on(async_op).context(WindowsSnafu { + let stream = async_op.get().context(WindowsSnafu { details: WindowsErrorDetails::Synthesizing, })?; Ok(stream) } fn write_stream_to_path(stream: SpeechSynthesisStream, path: &str) -> Result<()> { - let input_stream = stream.GetInputStreamAt(0)?; + let random_access_stream: IRandomAccessStream = stream.cast()?; + let input_stream = random_access_stream.GetInputStreamAt(0)?; let date_reader = DataReader::CreateDataReader(&input_stream)?; - let stream_size = stream.Size()?.try_into().or_invalid("stream too large")?; + let stream_size = random_access_stream + .Size()? + .try_into() + .or_invalid("stream too large")?; date_reader.LoadAsync(stream_size)?; let mut file = File::create(path)?; write_reader_to_file(date_reader, &mut file, stream_size as usize) diff --git a/rslib/src/card_rendering/writer.rs b/rslib/src/card_rendering/writer.rs index 892cb9087..22fb1fb34 100644 --- a/rslib/src/card_rendering/writer.rs +++ b/rslib/src/card_rendering/writer.rs @@ -52,7 +52,7 @@ trait Write { } fn write_sound(&mut self, buf: &mut String, resource: &str) { - write!(buf, "[sound:{}]", resource).unwrap(); + write!(buf, "[sound:{resource}]").unwrap(); } fn write_directive(&mut self, buf: &mut String, directive: &Directive) { @@ -94,9 +94,9 @@ trait Write { fn write_directive_option(&mut self, buf: &mut String, key: &str, val: &str) { if val.contains([']', ' ', '\t', '\r', '\n']) { - write!(buf, " {}=\"{}\"", key, val).unwrap(); + write!(buf, " {key}=\"{val}\"").unwrap(); } else { - write!(buf, " {}={}", key, val).unwrap(); + write!(buf, " {key}={val}").unwrap(); } } @@ -158,7 +158,7 @@ impl Write for AvExtractor<'_> { fn write_tts_directive(&mut self, buf: &mut String, directive: &TtsDirective) { if let Some(error) = directive.error(self.tr) { - write!(buf, "[{}]", error).unwrap(); + write!(buf, "[{error}]").unwrap(); return; } @@ -173,7 +173,7 @@ impl Write for AvExtractor<'_> { other_args: directive .options .iter() - .map(|(key, val)| format!("{}={}", key, val)) + .map(|(key, val)| format!("{key}={val}")) .collect(), }, )), @@ -204,7 +204,7 @@ impl AvPrettifier { impl Write for AvPrettifier { fn write_sound(&mut self, buf: &mut String, resource: &str) { - write!(buf, "🔉{}🔉", resource).unwrap(); + write!(buf, "🔉{resource}🔉").unwrap(); } fn write_tts_directive(&mut self, buf: &mut String, directive: &TtsDirective) { diff --git a/rslib/src/cloze.rs b/rslib/src/cloze.rs index f57d07ab0..02919dc12 100644 --- a/rslib/src/cloze.rs +++ b/rslib/src/cloze.rs @@ -15,6 +15,7 @@ use nom::bytes::complete::tag; use nom::bytes::complete::take_while; use nom::combinator::map; use nom::IResult; +use nom::Parser; use regex::Captures; use regex::Regex; @@ -24,6 +25,9 @@ use crate::latex::contains_latex; use crate::template::RenderContext; use crate::text::strip_html_preserving_entities; +static CLOZE: LazyLock = + LazyLock::new(|| Regex::new(r"(?s)\{\{c\d+::(.*?)(::.*?)?\}\}").unwrap()); + static MATHJAX: LazyLock = LazyLock::new(|| { Regex::new( r"(?xsi) @@ -72,7 +76,7 @@ fn tokenize(mut text: &str) -> impl Iterator { } fn close_cloze(text: &str) -> IResult<&str, Token> { - map(tag("}}"), |_| Token::CloseCloze)(text) + map(tag("}}"), |_| Token::CloseCloze).parse(text) } /// Match a run of text until an open/close marker is encountered. @@ -87,7 +91,7 @@ fn tokenize(mut text: &str) -> impl Iterator { // start with the no-match case let mut index = text.len(); for (idx, _) in text.char_indices() { - if other_token(&text[idx..]).is_ok() { + if other_token.parse(&text[idx..]).is_ok() { index = idx; break; } @@ -99,8 +103,9 @@ fn tokenize(mut text: &str) -> impl Iterator { if text.is_empty() { None } else { - let (remaining_text, token) = - alt((open_cloze, close_cloze, normal_text))(text).unwrap(); + let (remaining_text, token) = alt((open_cloze, close_cloze, normal_text)) + .parse(text) + .unwrap(); text = remaining_text; Some(token) } @@ -451,6 +456,10 @@ pub fn cloze_number_in_fields(fields: impl IntoIterator>) -> Ha set } +pub(crate) fn strip_clozes(text: &str) -> Cow<'_, str> { + CLOZE.replace_all(text, "$1") +} + fn strip_html_inside_mathjax(text: &str) -> Cow { MATHJAX.replace_all(text, |caps: &Captures| -> String { format!( @@ -608,6 +617,16 @@ mod test { ); } + #[test] + fn strip_clozes_regex() { + assert_eq!( + strip_clozes( + r#"The {{c1::moon::🌛}} {{c2::orbits::this hint has "::" in it}} the {{c3::🌏}}."# + ), + "The moon orbits the 🌏." + ); + } + #[test] fn mathjax_html() { // escaped angle brackets should be preserved diff --git a/rslib/src/config/bool.rs b/rslib/src/config/bool.rs index 39273b931..c76787cb0 100644 --- a/rslib/src/config/bool.rs +++ b/rslib/src/config/bool.rs @@ -41,6 +41,7 @@ pub enum BoolKey { WithDeckConfigs, Fsrs, FsrsHealthCheck, + FsrsLegacyEvaluate, LoadBalancerEnabled, FsrsShortTermWithStepsEnabled, #[strum(to_string = "normalize_note_text")] diff --git a/rslib/src/config/deck.rs b/rslib/src/config/deck.rs index d684534c0..a88ca61a7 100644 --- a/rslib/src/config/deck.rs +++ b/rslib/src/config/deck.rs @@ -41,5 +41,5 @@ impl Collection { } fn build_aux_deck_key(deck: DeckId, key: &str) -> String { - format!("_deck_{deck}_{key}", deck = deck, key = key) + format!("_deck_{deck}_{key}") } diff --git a/rslib/src/config/notetype.rs b/rslib/src/config/notetype.rs index 0d3fd9611..f9b70292d 100644 --- a/rslib/src/config/notetype.rs +++ b/rslib/src/config/notetype.rs @@ -32,7 +32,7 @@ impl Collection { }; Ok(get_aux_notetype_config_key( ntid, - &format!("{}_{}", key, ordinal), + &format!("{key}_{ordinal}"), )) } } @@ -70,5 +70,5 @@ impl Collection { } pub fn get_aux_notetype_config_key(ntid: NotetypeId, key: &str) -> String { - format!("_nt_{ntid}_{key}", ntid = ntid, key = key) + format!("_nt_{ntid}_{key}") } diff --git a/rslib/src/dbcheck.rs b/rslib/src/dbcheck.rs index ae960ab5c..f58a2184a 100644 --- a/rslib/src/dbcheck.rs +++ b/rslib/src/dbcheck.rs @@ -387,10 +387,10 @@ impl Collection { let mut basic = all_stock_notetypes(&self.tr).remove(0); let mut field = 3; while basic.fields.len() < field_count { - basic.add_field(format!("{}", field)); + basic.add_field(format!("{field}")); field += 1; } - basic.name = format!("db-check-{}-{}", stamp, field_count); + basic.name = format!("db-check-{stamp}-{field_count}"); let qfmt = basic.templates[0].config.q_format.clone(); let afmt = basic.templates[0].config.a_format.clone(); for n in 0..extra_cards_required { diff --git a/rslib/src/deckconfig/update.rs b/rslib/src/deckconfig/update.rs index 128e43770..9eb3b595f 100644 --- a/rslib/src/deckconfig/update.rs +++ b/rslib/src/deckconfig/update.rs @@ -74,6 +74,7 @@ impl Collection { apply_all_parent_limits: self.get_config_bool(BoolKey::ApplyAllParentLimits), fsrs: self.get_config_bool(BoolKey::Fsrs), fsrs_health_check: self.get_config_bool(BoolKey::FsrsHealthCheck), + fsrs_legacy_evaluate: self.get_config_bool(BoolKey::FsrsLegacyEvaluate), days_since_last_fsrs_optimize, }) } diff --git a/rslib/src/decks/addupdate.rs b/rslib/src/decks/addupdate.rs index be4cb34cd..eb9e242a5 100644 --- a/rslib/src/decks/addupdate.rs +++ b/rslib/src/decks/addupdate.rs @@ -93,7 +93,7 @@ impl Collection { pub(crate) fn recover_missing_deck(&mut self, did: DeckId, usn: Usn) -> Result<()> { let mut deck = Deck::new_normal(); deck.id = did; - deck.name = NativeDeckName::from_native_str(format!("recovered{}", did)); + deck.name = NativeDeckName::from_native_str(format!("recovered{did}")); deck.set_modified(usn); self.add_or_update_single_deck_with_existing_id(&mut deck, usn) } diff --git a/rslib/src/decks/remove.rs b/rslib/src/decks/remove.rs index befb770f8..a3bc78209 100644 --- a/rslib/src/decks/remove.rs +++ b/rslib/src/decks/remove.rs @@ -28,7 +28,7 @@ impl Collection { let card_count = match deck.kind { DeckKind::Normal(_) => self.delete_all_cards_in_normal_deck(deck.id)?, DeckKind::Filtered(_) => { - self.return_all_cards_in_filtered_deck(deck.id)?; + self.return_all_cards_in_filtered_deck(deck)?; 0 } }; diff --git a/rslib/src/error/db.rs b/rslib/src/error/db.rs index b44f771dc..a36cd1a60 100644 --- a/rslib/src/error/db.rs +++ b/rslib/src/error/db.rs @@ -67,7 +67,7 @@ impl From for AnkiError { } AnkiError::DbError { source: DbError { - info: format!("{:?}", err), + info: format!("{err:?}"), kind: DbErrorKind::Other, }, } @@ -88,7 +88,7 @@ impl From for AnkiError { } AnkiError::DbError { source: DbError { - info: format!("{:?}", err), + info: format!("{err:?}"), kind: DbErrorKind::Other, }, } @@ -101,7 +101,7 @@ impl DbError { DbErrorKind::Corrupt => self.info.clone(), // fixme: i18n DbErrorKind::Locked => "Anki already open, or media currently syncing.".into(), - _ => format!("{:?}", self), + _ => format!("{self:?}"), } } } diff --git a/rslib/src/error/invalid_input.rs b/rslib/src/error/invalid_input.rs index f62174578..970e1c692 100644 --- a/rslib/src/error/invalid_input.rs +++ b/rslib/src/error/invalid_input.rs @@ -26,7 +26,7 @@ impl InvalidInputError { pub fn context(&self) -> String { if let Some(source) = &self.source { - format!("{}", source) + format!("{source}") } else { String::new() } diff --git a/rslib/src/error/mod.rs b/rslib/src/error/mod.rs index 0da89e0ff..d2bd11cf0 100644 --- a/rslib/src/error/mod.rs +++ b/rslib/src/error/mod.rs @@ -149,13 +149,13 @@ impl AnkiError { } CardTypeErrorDetails::MissingCloze => tr.card_templates_missing_cloze(), }; - format!("{}
{}", header, details) + format!("{header}
{details}") } AnkiError::DbError { source } => source.message(tr), AnkiError::SearchError { source } => source.message(tr), AnkiError::ParseNumError => tr.errors_parse_number_fail().into(), AnkiError::FilteredDeckError { source } => source.message(tr), - AnkiError::InvalidRegex { info: source } => format!("

{}
", source), + AnkiError::InvalidRegex { info: source } => format!("
{source}
"), AnkiError::MultipleNotetypesSelected => tr.errors_multiple_notetypes_selected().into(), AnkiError::DatabaseCheckRequired => tr.errors_please_check_database().into(), AnkiError::MediaCheckRequired => tr.errors_please_check_media().into(), @@ -172,7 +172,7 @@ impl AnkiError { | AnkiError::InvalidServiceIndex | AnkiError::InvalidMethodIndex | AnkiError::UndoEmpty - | AnkiError::InvalidCertificateFormat => format!("{:?}", self), + | AnkiError::InvalidCertificateFormat => format!("{self:?}"), AnkiError::FileIoError { source } => source.message(), AnkiError::InvalidInput { source } => source.message(), AnkiError::NotFound { source } => source.message(tr), diff --git a/rslib/src/error/network.rs b/rslib/src/error/network.rs index 469978cff..eb293c359 100644 --- a/rslib/src/error/network.rs +++ b/rslib/src/error/network.rs @@ -68,7 +68,7 @@ impl AnkiError { impl From<&reqwest::Error> for AnkiError { fn from(err: &reqwest::Error) -> Self { let url = err.url().map(|url| url.as_str()).unwrap_or(""); - let str_err = format!("{}", err); + let str_err = format!("{err}"); // strip url from error to avoid exposing keys let info = str_err.replace(url, ""); @@ -205,7 +205,7 @@ impl NetworkError { NetworkErrorKind::Other => tr.network_other(), }; let details = tr.network_details(self.info.as_str()); - format!("{}\n\n{}", summary, details) + format!("{summary}\n\n{details}") } } @@ -226,7 +226,7 @@ impl From for AnkiError { } .into() } else { - AnkiError::sync_error(format!("{:?}", err), SyncErrorKind::Other) + AnkiError::sync_error(format!("{err:?}"), SyncErrorKind::Other) } } } diff --git a/rslib/src/image_occlusion/imagedata.rs b/rslib/src/image_occlusion/imagedata.rs index 9319d85c6..fdf8ea4fd 100644 --- a/rslib/src/image_occlusion/imagedata.rs +++ b/rslib/src/image_occlusion/imagedata.rs @@ -77,7 +77,7 @@ impl Collection { ) -> Result { let value = match self.get_image_occlusion_note_inner(note_id) { Ok(note) => Value::Note(note), - Err(err) => Value::Error(format!("{:?}", err)), + Err(err) => Value::Error(format!("{err:?}")), }; Ok(GetImageOcclusionNoteResponse { value: Some(value) }) } diff --git a/rslib/src/image_occlusion/imageocclusion.rs b/rslib/src/image_occlusion/imageocclusion.rs index 2ba83374f..1de86bf87 100644 --- a/rslib/src/image_occlusion/imageocclusion.rs +++ b/rslib/src/image_occlusion/imageocclusion.rs @@ -13,6 +13,7 @@ use nom::character::complete::char; use nom::error::ErrorKind; use nom::sequence::preceded; use nom::sequence::separated_pair; +use nom::Parser; fn unescape(text: &str) -> String { text.replace("\\:", ":") @@ -22,11 +23,12 @@ pub fn parse_image_cloze(text: &str) -> Option { if let Some((shape, _)) = text.split_once(':') { let mut properties = vec![]; let mut remaining = &text[shape.len()..]; - while let Ok((rem, (name, value))) = separated_pair::<_, _, _, _, (_, ErrorKind), _, _, _>( + while let Ok((rem, (name, value))) = separated_pair::<_, _, _, (_, ErrorKind), _, _, _>( preceded(tag(":"), is_not("=")), tag("="), escaped(is_not("\\:"), '\\', char(':')), - )(remaining) + ) + .parse(remaining) { remaining = rem; let value = unescape(value); @@ -96,7 +98,7 @@ pub fn get_image_cloze_data(text: &str) -> String { let Some((x, y)) = point_pair.split_once(',') else { continue; }; - write!(&mut point_str, "{},{} ", x, y).unwrap(); + write!(&mut point_str, "{x},{y} ").unwrap(); } // remove the trailing space point_str.pop(); diff --git a/rslib/src/import_export/package/colpkg/export.rs b/rslib/src/import_export/package/colpkg/export.rs index 9d85c19aa..20f25a1c1 100644 --- a/rslib/src/import_export/package/colpkg/export.rs +++ b/rslib/src/import_export/package/colpkg/export.rs @@ -146,8 +146,12 @@ pub(crate) fn export_collection( Ok(()) } -fn file_options_stored() -> FileOptions { - FileOptions::default().compression_method(CompressionMethod::Stored) +fn file_options_stored() -> FileOptions<'static, ()> { + FileOptions::<'static, ()>::default().compression_method(CompressionMethod::Stored) +} + +fn file_options_default() -> FileOptions<'static, ()> { + FileOptions::<'static, ()>::default() } fn write_collection( @@ -160,7 +164,7 @@ fn write_collection( zip.start_file(meta.collection_filename(), file_options_stored())?; zstd_copy(col, zip, size)?; } else { - zip.start_file(meta.collection_filename(), FileOptions::default())?; + zip.start_file(meta.collection_filename(), file_options_default())?; io::copy(col, zip)?; } Ok(()) diff --git a/rslib/src/import_export/package/colpkg/import.rs b/rslib/src/import_export/package/colpkg/import.rs index b8316c032..5ae8b4c8e 100644 --- a/rslib/src/import_export/package/colpkg/import.rs +++ b/rslib/src/import_export/package/colpkg/import.rs @@ -124,7 +124,7 @@ fn maybe_restore_media_file( Ok(()) } -fn restore_media_file(meta: &Meta, zip_file: &mut ZipFile, path: &Path) -> Result<()> { +fn restore_media_file(meta: &Meta, zip_file: &mut ZipFile, path: &Path) -> Result<()> { let mut tempfile = new_tempfile_in_parent_of(path)?; meta.copy(zip_file, &mut tempfile) .with_context(|_| FileIoSnafu { diff --git a/rslib/src/import_export/package/media.rs b/rslib/src/import_export/package/media.rs index 6b0b4c370..ff5bdf4d7 100644 --- a/rslib/src/import_export/package/media.rs +++ b/rslib/src/import_export/package/media.rs @@ -96,7 +96,10 @@ impl SafeMediaEntry { media_folder.join(&self.name) } - pub(super) fn fetch_file<'a>(&self, archive: &'a mut ZipArchive) -> Result> { + pub(super) fn fetch_file<'a>( + &self, + archive: &'a mut ZipArchive, + ) -> Result> { match archive.by_name(&self.index.to_string()) { Ok(file) => Ok(file), Err(err) => invalid_input!(err, "{} missing from archive", self.index), diff --git a/rslib/src/import_export/text/import.rs b/rslib/src/import_export/text/import.rs index f28c27ca3..4425bb386 100644 --- a/rslib/src/import_export/text/import.rs +++ b/rslib/src/import_export/text/import.rs @@ -274,6 +274,9 @@ impl<'a> Context<'a> { deck.name = NativeDeckName::from_human_name(name); self.col.add_deck_inner(&mut deck, self.usn)?; self.deck_ids.insert(deck.id, deck.human_name()); + if name.is_empty() { + self.deck_ids.default = Some(deck.id); + } Some(deck.id) } else { None diff --git a/rslib/src/latex.rs b/rslib/src/latex.rs index 3ebeebf8a..e5cb002ac 100644 --- a/rslib/src/latex.rs +++ b/rslib/src/latex.rs @@ -100,7 +100,7 @@ fn fname_for_latex(latex: &str, svg: bool) -> String { let ext = if svg { "svg" } else { "png" }; let csum = hex::encode(sha1_of_data(latex.as_bytes())); - format!("latex-{}.{}", csum, ext) + format!("latex-{csum}.{ext}") } fn image_link_for_fname(src: &str, fname: &str) -> String { @@ -122,11 +122,7 @@ mod test { assert_eq!( extract_latex("a[latex]one
and
two[/latex]b", false), ( - format!( - "a\"one
and
two\"b", - fname - ) - .into(), + format!("a\"one
and
two\"b").into(), vec![ExtractedLatex { fname: fname.into(), latex: "one\nand\ntwo".into() diff --git a/rslib/src/log.rs b/rslib/src/log.rs index 4fb4dcfaf..fedc597c4 100644 --- a/rslib/src/log.rs +++ b/rslib/src/log.rs @@ -69,8 +69,8 @@ fn maybe_rotate_log(path: &str) -> io::Result<()> { return Ok(()); } - let path2 = format!("{}.1", path); - let path3 = format!("{}.2", path); + let path2 = format!("{path}.1"); + let path3 = format!("{path}.2"); // if a rotated file already exists, rename it if let Err(e) = fs::rename(&path2, path3) { diff --git a/rslib/src/media/files.rs b/rslib/src/media/files.rs index 9fd3bc85f..6974e2f81 100644 --- a/rslib/src/media/files.rs +++ b/rslib/src/media/files.rs @@ -218,7 +218,7 @@ fn truncate_filename(fname: &str, max_bytes: usize) -> Cow { let mut new_name = if ext.is_empty() { stem.to_string() } else { - format!("{}.{}", stem, ext) + format!("{stem}.{ext}") }; // make sure we don't break Windows by ending with a space or dot diff --git a/rslib/src/notes/mod.rs b/rslib/src/notes/mod.rs index 2b53321b9..932022e99 100644 --- a/rslib/src/notes/mod.rs +++ b/rslib/src/notes/mod.rs @@ -270,7 +270,7 @@ impl Note { self.fields .last_mut() .unwrap() - .push_str(&format!("; {}", last)); + .push_str(&format!("; {last}")); } } } diff --git a/rslib/src/notetype/cardgen.rs b/rslib/src/notetype/cardgen.rs index 8ee2717ba..8e03d8ee4 100644 --- a/rslib/src/notetype/cardgen.rs +++ b/rslib/src/notetype/cardgen.rs @@ -352,7 +352,7 @@ impl Collection { fn random_position(highest_position: u32) -> u32 { let mut rng = StdRng::seed_from_u64(highest_position as u64); - rng.gen_range(1..highest_position.max(1000)) + rng.random_range(1..highest_position.max(1000)) } #[cfg(test)] diff --git a/rslib/src/notetype/schema11.rs b/rslib/src/notetype/schema11.rs index 272456ab7..8d713cbe7 100644 --- a/rslib/src/notetype/schema11.rs +++ b/rslib/src/notetype/schema11.rs @@ -126,7 +126,7 @@ fn other_to_bytes(other: &HashMap) -> Vec { } else { serde_json::to_vec(other).unwrap_or_else(|e| { // theoretically should never happen - println!("serialization failed for {:?}: {}", other, e); + println!("serialization failed for {other:?}: {e}"); vec![] }) } @@ -140,7 +140,7 @@ pub(crate) fn parse_other_fields( Default::default() } else { let mut map: HashMap = serde_json::from_slice(bytes).unwrap_or_else(|e| { - println!("deserialization failed for other: {}", e); + println!("deserialization failed for other: {e}"); Default::default() }); map.retain(|k, _v| !reserved.contains(k)); diff --git a/rslib/src/notetype/stock.rs b/rslib/src/notetype/stock.rs index f17f6b949..9b5df66d5 100644 --- a/rslib/src/notetype/stock.rs +++ b/rslib/src/notetype/stock.rs @@ -179,8 +179,8 @@ pub(crate) fn cloze(tr: &I18n) -> Notetype { let back_extra = tr.notetypes_back_extra_field(); config = nt.add_field(back_extra.as_ref()); config.tag = Some(ClozeField::BackExtra as u32); - let qfmt = format!("{{{{cloze:{}}}}}", text); - let afmt = format!("{}
\n{{{{{}}}}}", qfmt, back_extra); + let qfmt = format!("{{{{cloze:{text}}}}}"); + let afmt = format!("{qfmt}
\n{{{{{back_extra}}}}}"); nt.add_template(nt.name.clone(), qfmt, afmt); nt } diff --git a/rslib/src/scheduler/answering/learning.rs b/rslib/src/scheduler/answering/learning.rs index 80204e13f..3283dc3ee 100644 --- a/rslib/src/scheduler/answering/learning.rs +++ b/rslib/src/scheduler/answering/learning.rs @@ -87,7 +87,7 @@ impl CardStateUpdater { if secs >= upper_exclusive { secs } else { - rng.gen_range(secs..upper_exclusive) + rng.random_range(secs..upper_exclusive) } } else { secs diff --git a/rslib/src/scheduler/answering/mod.rs b/rslib/src/scheduler/answering/mod.rs index 02e5ee8d6..ce6720d3d 100644 --- a/rslib/src/scheduler/answering/mod.rs +++ b/rslib/src/scheduler/answering/mod.rs @@ -33,6 +33,7 @@ use crate::deckconfig::LeechAction; use crate::decks::Deck; use crate::prelude::*; use crate::scheduler::fsrs::memory_state::fsrs_item_for_memory_state; +use crate::scheduler::fsrs::memory_state::get_decay_from_params; use crate::scheduler::states::PreviewState; use crate::search::SearchNode; @@ -433,7 +434,9 @@ impl Collection { let config = self.home_deck_config(deck.config_id(), card.original_deck_id)?; let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs); let fsrs_next_states = if fsrs_enabled { - let fsrs = FSRS::new(Some(config.fsrs_params()))?; + let params = config.fsrs_params(); + let fsrs = FSRS::new(Some(params))?; + card.decay = Some(get_decay_from_params(params)); if card.memory_state.is_none() && card.ctype != CardType::New { // Card has been moved or imported into an FSRS deck after params were set, // and will need its initial memory state to be calculated based on review @@ -630,7 +633,7 @@ fn get_fuzz_seed_for_id_and_reps(card_id: CardId, card_reps: u32) -> Option /// Return a fuzz factor from the range `0.0..1.0`, using the provided seed. /// None if seed is None. fn get_fuzz_factor(seed: Option) -> Option { - seed.map(|s| StdRng::seed_from_u64(s).gen_range(0.0..1.0)) + seed.map(|s| StdRng::seed_from_u64(s).random_range(0.0..1.0)) } #[cfg(test)] @@ -886,22 +889,20 @@ pub(crate) mod test { ) -> Result<()> { // Change due time to fake card answer_time, // works since answer_time is calculated as due - last_ivl - let update_due_string = format!("update cards set due={}", shift_due_time); + let update_due_string = format!("update cards set due={shift_due_time}"); col.storage.db.execute_batch(&update_due_string)?; col.clear_study_queues(); let current_card_state = current_state(col, post_answer.card_id); let state = match current_card_state { CardState::Normal(NormalState::Learning(state)) => state, - _ => panic!("State is not Normal: {:?}", current_card_state), + _ => panic!("State is not Normal: {current_card_state:?}"), }; let elapsed_secs = state.elapsed_secs as i32; // Give a 1 second leeway when the test runs on the off chance // that the test runs as a second rolls over. assert!( (elapsed_secs - expected_elapsed_secs).abs() <= 1, - "elapsed_secs: {} != expected_elapsed_secs: {}", - elapsed_secs, - expected_elapsed_secs + "elapsed_secs: {elapsed_secs} != expected_elapsed_secs: {expected_elapsed_secs}" ); Ok(()) diff --git a/rslib/src/scheduler/filtered/mod.rs b/rslib/src/scheduler/filtered/mod.rs index f1f3cc07d..331e54e5d 100644 --- a/rslib/src/scheduler/filtered/mod.rs +++ b/rslib/src/scheduler/filtered/mod.rs @@ -64,7 +64,8 @@ impl Collection { pub fn empty_filtered_deck(&mut self, did: DeckId) -> Result> { self.transact(Op::EmptyFilteredDeck, |col| { - col.return_all_cards_in_filtered_deck(did) + let deck = col.get_deck(did)?.or_not_found(did)?; + col.return_all_cards_in_filtered_deck(&deck) }) } @@ -78,8 +79,11 @@ impl Collection { } impl Collection { - pub(crate) fn return_all_cards_in_filtered_deck(&mut self, did: DeckId) -> Result<()> { - let cids = self.storage.all_cards_in_single_deck(did)?; + pub(crate) fn return_all_cards_in_filtered_deck(&mut self, deck: &Deck) -> Result<()> { + if !deck.is_filtered() { + return Err(FilteredDeckError::FilteredDeckRequired.into()); + } + let cids = self.storage.all_cards_in_single_deck(deck.id)?; self.return_cards_to_home_deck(&cids) } @@ -195,7 +199,7 @@ impl Collection { timing, }; - self.return_all_cards_in_filtered_deck(deck.id)?; + self.return_all_cards_in_filtered_deck(deck)?; self.build_filtered_deck(ctx) } @@ -214,14 +218,14 @@ impl Collection { .search_terms .get_mut(0) .unwrap(); - term1.search = format!("{} is:due", search); + term1.search = format!("{search} is:due"); let term2 = deck .filtered_mut() .unwrap() .search_terms .get_mut(1) .unwrap(); - term2.search = format!("{} is:new", search); + term2.search = format!("{search} is:new"); } } diff --git a/rslib/src/scheduler/fsrs/memory_state.rs b/rslib/src/scheduler/fsrs/memory_state.rs index 787fa212d..425d8da69 100644 --- a/rslib/src/scheduler/fsrs/memory_state.rs +++ b/rslib/src/scheduler/fsrs/memory_state.rs @@ -30,6 +30,18 @@ pub struct ComputeMemoryProgress { pub total_cards: u32, } +/// Helper function to determine the appropriate decay value based on FSRS +/// parameters +pub(crate) fn get_decay_from_params(params: &[f32]) -> f32 { + if params.is_empty() { + FSRS6_DEFAULT_DECAY // default decay for FSRS-6 + } else if params.len() < 21 { + FSRS5_DEFAULT_DECAY // default decay for FSRS-4.5 and FSRS-5 + } else { + params[20] + } +} + #[derive(Debug)] pub(crate) struct UpdateMemoryStateRequest { pub params: Params, @@ -77,15 +89,7 @@ impl Collection { .then(|| Rescheduler::new(self)) .transpose()?; let fsrs = FSRS::new(req.as_ref().map(|w| &w.params[..]).or(Some([].as_slice())))?; - let decay = req.as_ref().map(|w| { - if w.params.is_empty() { - FSRS6_DEFAULT_DECAY // default decay for FSRS-6 - } else if w.params.len() < 21 { - FSRS5_DEFAULT_DECAY // default decay for FSRS-4.5 and FSRS-5 - } else { - w.params[20] - } - }); + let decay = req.as_ref().map(|w| get_decay_from_params(&w.params)); let historical_retention = req.as_ref().map(|w| w.historical_retention); let items = fsrs_items_for_memory_states( &fsrs, @@ -101,77 +105,90 @@ impl Collection { progress.update(true, |state| state.current_cards = idx as u32 + 1)?; let mut card = self.storage.get_card(card_id)?.or_not_found(card_id)?; let original = card.clone(); - if let (Some(req), Some(item)) = (&req, item) { - card.set_memory_state(&fsrs, Some(item), historical_retention.unwrap())?; + if let Some(req) = &req { + // Store decay and desired retention in the card so that add-ons, card info, + // stats and browser search/sorts don't need to access the deck config. + // Unlike memory states, scheduler doesn't use decay and dr stored in the card. card.desired_retention = desired_retention; card.decay = decay; - // if rescheduling - if let Some(reviews) = &last_revlog_info { - // and we have a last review time for the card - if let Some(last_info) = reviews.get(&card.id) { - if let Some(last_review) = &last_info.last_reviewed_at { - let days_elapsed = - timing.next_day_at.elapsed_days_since(*last_review) as i32; - // and the card's not new - if let Some(state) = &card.memory_state { - // or in (re)learning - if card.ctype == CardType::Review { - let deck = self - .get_deck(card.original_or_current_deck_id())? - .or_not_found(card.original_or_current_deck_id())?; - let deckconfig_id = deck.config_id().unwrap(); - // reschedule it - let original_interval = card.interval; - let interval = fsrs.next_interval( - Some(state.stability), - card.desired_retention.unwrap(), - 0, - ); - card.interval = rescheduler - .as_mut() - .and_then(|r| { - r.find_interval( - interval, - 1, - req.max_interval, - days_elapsed as u32, - deckconfig_id, - get_fuzz_seed(&card, true), - ) - }) - .unwrap_or_else(|| { - with_review_fuzz( - card.get_fuzz_factor(true), - interval, - 1, - req.max_interval, - ) - }); - let due = if card.original_due != 0 { - &mut card.original_due - } else { - &mut card.due - }; - let new_due = (timing.days_elapsed as i32) - days_elapsed - + card.interval as i32; - if let Some(rescheduler) = &mut rescheduler { - rescheduler.update_due_cnt_per_day( - *due, - new_due, - deckconfig_id, + if let Some(item) = item { + card.set_memory_state(&fsrs, Some(item), historical_retention.unwrap())?; + // if rescheduling + if let Some(reviews) = &last_revlog_info { + // and we have a last review time for the card + if let Some(last_info) = reviews.get(&card.id) { + if let Some(last_review) = &last_info.last_reviewed_at { + let days_elapsed = + timing.next_day_at.elapsed_days_since(*last_review) as i32; + // and the card's not new + if let Some(state) = &card.memory_state { + // or in (re)learning + if card.ctype == CardType::Review { + let deck = self + .get_deck(card.original_or_current_deck_id())? + .or_not_found(card.original_or_current_deck_id())?; + let deckconfig_id = deck.config_id().unwrap(); + // reschedule it + let original_interval = card.interval; + let interval = fsrs.next_interval( + Some(state.stability), + desired_retention.unwrap(), + 0, ); + card.interval = rescheduler + .as_mut() + .and_then(|r| { + r.find_interval( + interval, + 1, + req.max_interval, + days_elapsed as u32, + deckconfig_id, + get_fuzz_seed(&card, true), + ) + }) + .unwrap_or_else(|| { + with_review_fuzz( + card.get_fuzz_factor(true), + interval, + 1, + req.max_interval, + ) + }); + let due = if card.original_due != 0 { + &mut card.original_due + } else { + &mut card.due + }; + let new_due = (timing.days_elapsed as i32) + - days_elapsed + + card.interval as i32; + if let Some(rescheduler) = &mut rescheduler { + rescheduler.update_due_cnt_per_day( + *due, + new_due, + deckconfig_id, + ); + } + *due = new_due; + // Add a rescheduled revlog entry + self.log_rescheduled_review( + &card, + original_interval, + usn, + )?; } - *due = new_due; - // Add a rescheduled revlog entry - self.log_rescheduled_review(&card, original_interval, usn)?; } } } } + } else { + // clear memory states if item is None + card.memory_state = None; } } else { - card.memory_state = None; - card.desired_retention = None; + // clear FSRS data if FSRS is disabled + card.clear_fsrs_data(); } self.update_card_inner(&mut card, original, usn)?; } @@ -190,7 +207,9 @@ impl Collection { .or_not_found(conf_id)?; let desired_retention = config.inner.desired_retention; let historical_retention = config.inner.historical_retention; - let fsrs = FSRS::new(Some(config.fsrs_params()))?; + let params = config.fsrs_params(); + let decay = get_decay_from_params(params); + let fsrs = FSRS::new(Some(params))?; let revlog = self.revlog_for_srs(SearchNode::CardIds(card.id.to_string()))?; let item = fsrs_item_for_memory_state( &fsrs, @@ -204,13 +223,13 @@ impl Collection { Ok(ComputeMemoryStateResponse { state: card.memory_state.map(Into::into), desired_retention, + decay, }) } else { - card.memory_state = None; - card.desired_retention = None; Ok(ComputeMemoryStateResponse { state: None, desired_retention, + decay, }) } } diff --git a/rslib/src/scheduler/fsrs/params.rs b/rslib/src/scheduler/fsrs/params.rs index 76bc206be..63bdebe79 100644 --- a/rslib/src/scheduler/fsrs/params.rs +++ b/rslib/src/scheduler/fsrs/params.rs @@ -299,6 +299,33 @@ impl Collection { .is_ok() })?) } + + pub fn evaluate_params_legacy( + &mut self, + params: &Params, + search: &str, + ignore_revlogs_before: TimestampMillis, + ) -> Result { + let timing = self.timing_today()?; + let mut anki_progress = self.new_progress_handler::(); + let guard = self.search_cards_into_table(search, SortMode::NoOrder)?; + let revlogs: Vec = guard + .col + .storage + .get_revlog_entries_for_searched_cards_in_card_order()?; + let (items, review_count) = + fsrs_items_for_training(revlogs, timing.next_day_at, ignore_revlogs_before); + anki_progress.state.reviews = review_count as u32; + let fsrs = FSRS::new(Some(params))?; + Ok(fsrs.evaluate(items, |ip| { + anki_progress + .update(false, |p| { + p.total_iterations = ip.total as u32; + p.current_iteration = ip.current as u32; + }) + .is_ok() + })?) + } } #[derive(Default, Clone, Copy, Debug)] diff --git a/rslib/src/scheduler/fsrs/simulator.rs b/rslib/src/scheduler/fsrs/simulator.rs index 133b3ff2c..34cc925d6 100644 --- a/rslib/src/scheduler/fsrs/simulator.rs +++ b/rslib/src/scheduler/fsrs/simulator.rs @@ -68,7 +68,7 @@ pub(crate) fn apply_load_balance_and_easy_days( sibling_modifier: 1.0, easy_days_modifier: easy_days_modifier[interval_index], }); - let fuzz_seed = rng.gen(); + let fuzz_seed = rng.random(); select_weighted_interval(intervals, Some(fuzz_seed)).unwrap() as f32 } @@ -106,7 +106,7 @@ fn create_review_priority_fn( // Random ordering Random => { - wrap!(move |_c, _w| rand::thread_rng().gen_range(0..deck_size) as i32) + wrap!(move |_c, _w| rand::rng().random_range(0..deck_size) as i32) } // Not implemented yet diff --git a/rslib/src/scheduler/new.rs b/rslib/src/scheduler/new.rs index 73b7abf5f..541d8d55e 100644 --- a/rslib/src/scheduler/new.rs +++ b/rslib/src/scheduler/new.rs @@ -127,7 +127,7 @@ fn nids_in_desired_order(cards: &[Card], order: NewCardDueOrder) -> Vec nids.sort_unstable(); } NewCardDueOrder::Random => { - nids.shuffle(&mut rand::thread_rng()); + nids.shuffle(&mut rand::rng()); } NewCardDueOrder::Preserve => unreachable!(), } diff --git a/rslib/src/scheduler/reviews.rs b/rslib/src/scheduler/reviews.rs index 0a7f32032..06390e57d 100644 --- a/rslib/src/scheduler/reviews.rs +++ b/rslib/src/scheduler/reviews.rs @@ -4,8 +4,8 @@ use std::collections::HashMap; use std::sync::LazyLock; -use rand::distributions::Distribution; -use rand::distributions::Uniform; +use rand::distr::Distribution; +use rand::distr::Uniform; use regex::Regex; use super::answering::CardAnswer; @@ -17,6 +17,7 @@ use crate::collection::Collection; use crate::config::StringKey; use crate::error::Result; use crate::prelude::*; +use crate::scheduler::timing::is_unix_epoch_timestamp; impl Card { /// Make card due in `days_from_today`. @@ -27,6 +28,7 @@ impl Card { fn set_due_date( &mut self, today: u32, + next_day_start: i64, days_from_today: u32, ease_factor: f32, force_reset: bool, @@ -34,8 +36,15 @@ impl Card { let new_due = (today + days_from_today) as i32; let fsrs_enabled = self.memory_state.is_some(); let new_interval = if fsrs_enabled { - self.interval - .saturating_add_signed(new_due - self.original_or_current_due()) + let due = self.original_or_current_due(); + let due_diff = if is_unix_epoch_timestamp(due) { + let offset = (due as i64 - next_day_start) / 86_400; + let due = (today as i64 + offset) as i32; + new_due - due + } else { + new_due - due + }; + self.interval.saturating_add_signed(due_diff) } else if force_reset || !matches!(self.ctype, CardType::Review | CardType::Relearn) { days_from_today.max(1) } else { @@ -114,8 +123,9 @@ impl Collection { let spec = parse_due_date_str(days)?; let usn = self.usn()?; let today = self.timing_today()?.days_elapsed; - let mut rng = rand::thread_rng(); - let distribution = Uniform::from(spec.min..=spec.max); + let next_day_start = self.timing_today()?.next_day_at.0; + let mut rng = rand::rng(); + let distribution = Uniform::new_inclusive(spec.min, spec.max).unwrap(); let mut decks_initial_ease: HashMap = HashMap::new(); self.transact(Op::SetDueDate, |col| { for mut card in col.all_cards_for_ids(cids, false)? { @@ -137,7 +147,13 @@ impl Collection { }; let original = card.clone(); let days_from_today = distribution.sample(&mut rng); - card.set_due_date(today, days_from_today, ease_factor, spec.force_reset); + card.set_due_date( + today, + next_day_start, + days_from_today, + ease_factor, + spec.force_reset, + ); col.log_manually_scheduled_review(&card, original.interval, usn)?; col.update_card_inner(&mut card, original, usn)?; } @@ -228,26 +244,26 @@ mod test { let mut c = Card::new(NoteId(0), 0, DeckId(0), 0); // setting the due date of a new card will convert it - c.set_due_date(5, 2, 1.8, false); + c.set_due_date(5, 0, 2, 1.8, false); assert_eq!(c.ctype, CardType::Review); assert_eq!(c.due, 7); assert_eq!(c.interval, 2); assert_eq!(c.ease_factor, 1800); // reschedule it again the next day, shifting it from day 7 to day 9 - c.set_due_date(6, 3, 2.5, false); + c.set_due_date(6, 0, 3, 2.5, false); assert_eq!(c.due, 9); assert_eq!(c.interval, 2); assert_eq!(c.ease_factor, 1800); // interval doesn't change // we can bring cards forward too - return it to its original due date - c.set_due_date(6, 1, 2.4, false); + c.set_due_date(6, 0, 1, 2.4, false); assert_eq!(c.due, 7); assert_eq!(c.interval, 2); assert_eq!(c.ease_factor, 1800); // interval doesn't change // we can force the interval to be reset instead of shifted - c.set_due_date(6, 3, 2.3, true); + c.set_due_date(6, 0, 3, 2.3, true); assert_eq!(c.due, 9); assert_eq!(c.interval, 3); assert_eq!(c.ease_factor, 1800); // interval doesn't change @@ -259,7 +275,7 @@ mod test { c.original_deck_id = DeckId(1); c.due = -10000; c.queue = CardQueue::New; - c.set_due_date(6, 1, 2.2, false); + c.set_due_date(6, 0, 1, 2.2, false); assert_eq!(c.due, 7); assert_eq!(c.interval, 2); assert_eq!(c.ease_factor, 2200); @@ -271,7 +287,7 @@ mod test { c.ctype = CardType::Relearn; c.original_due = c.due; c.due = 12345678; - c.set_due_date(6, 10, 2.1, false); + c.set_due_date(6, 0, 10, 2.1, false); assert_eq!(c.due, 16); assert_eq!(c.interval, 2); assert_eq!(c.ease_factor, 2200); // interval doesn't change diff --git a/rslib/src/scheduler/service/mod.rs b/rslib/src/scheduler/service/mod.rs index 993fd1dbe..43d694e4f 100644 --- a/rslib/src/scheduler/service/mod.rs +++ b/rslib/src/scheduler/service/mod.rs @@ -307,6 +307,21 @@ impl crate::services::SchedulerService for Collection { }) } + fn evaluate_params_legacy( + &mut self, + input: scheduler::EvaluateParamsLegacyRequest, + ) -> Result { + let ret = self.evaluate_params_legacy( + &input.params, + &input.search, + input.ignore_revlogs_before_ms.into(), + )?; + Ok(scheduler::EvaluateParamsResponse { + log_loss: ret.log_loss, + rmse_bins: ret.rmse_bins, + }) + } + fn get_optimal_retention_parameters( &mut self, input: scheduler::GetOptimalRetentionParametersRequest, diff --git a/rslib/src/scheduler/states/load_balancer.rs b/rslib/src/scheduler/states/load_balancer.rs index 915b0b8b3..20b6936df 100644 --- a/rslib/src/scheduler/states/load_balancer.rs +++ b/rslib/src/scheduler/states/load_balancer.rs @@ -5,8 +5,8 @@ use std::collections::HashMap; use std::collections::HashSet; use chrono::Datelike; -use rand::distributions::Distribution; -use rand::distributions::WeightedIndex; +use rand::distr::weighted::WeightedIndex; +use rand::distr::Distribution; use rand::rngs::StdRng; use rand::SeedableRng; diff --git a/rslib/src/scheduler/timespan.rs b/rslib/src/scheduler/timespan.rs index c779d33bc..b015e3e1e 100644 --- a/rslib/src/scheduler/timespan.rs +++ b/rslib/src/scheduler/timespan.rs @@ -25,7 +25,7 @@ pub fn answer_button_time_collapsible(seconds: u32, collapse_secs: u32, tr: &I18 if seconds == 0 { tr.scheduling_end().into() } else if seconds < collapse_secs { - format!("<{}", string) + format!("<{string}") } else { string } diff --git a/rslib/src/search/builder.rs b/rslib/src/search/builder.rs index 452f4d832..a76af0560 100644 --- a/rslib/src/search/builder.rs +++ b/rslib/src/search/builder.rs @@ -219,7 +219,7 @@ impl From for SearchNode { impl From for SearchNode { fn from(n: NoteId) -> Self { - SearchNode::NoteIds(format!("{}", n)) + SearchNode::NoteIds(format!("{n}")) } } diff --git a/rslib/src/search/mod.rs b/rslib/src/search/mod.rs index 63096dad8..ff21bf4ca 100644 --- a/rslib/src/search/mod.rs +++ b/rslib/src/search/mod.rs @@ -240,7 +240,7 @@ impl Collection { } else { self.storage.setup_searched_cards_table()?; } - let sql = format!("insert into search_cids {}", sql); + let sql = format!("insert into search_cids {sql}"); let cards = self .storage @@ -307,7 +307,7 @@ impl Collection { let (sql, args) = writer.build_query(&top_node, mode.required_table())?; self.storage.setup_searched_notes_table()?; - let sql = format!("insert into search_nids {}", sql); + let sql = format!("insert into search_nids {sql}"); let notes = self .storage diff --git a/rslib/src/search/parser.rs b/rslib/src/search/parser.rs index 93df4ea08..ae166ef54 100644 --- a/rslib/src/search/parser.rs +++ b/rslib/src/search/parser.rs @@ -19,6 +19,7 @@ use nom::error::ErrorKind as NomErrorKind; use nom::multi::many0; use nom::sequence::preceded; use nom::sequence::separated_pair; +use nom::Parser; use regex::Captures; use regex::Regex; @@ -93,6 +94,7 @@ pub enum SearchNode { WholeCollection, Regex(String), NoCombining(String), + StripClozes(String), WordBoundary(String), CustomData(String), Preset(String), @@ -202,18 +204,19 @@ fn group_inner(input: &str) -> IResult> { } fn whitespace0(s: &str) -> IResult> { - many0(one_of(" \u{3000}"))(s) + many0(one_of(" \u{3000}")).parse(s) } /// Optional leading space, then a (negated) group or text fn node(s: &str) -> IResult { - preceded(whitespace0, alt((negated_node, group, text)))(s) + preceded(whitespace0, alt((negated_node, group, text))).parse(s) } fn negated_node(s: &str) -> IResult { map(preceded(char('-'), alt((group, text))), |node| { Node::Not(Box::new(node)) - })(s) + }) + .parse(s) } /// One or more nodes surrounded by brackets, eg (one OR two) @@ -233,7 +236,7 @@ fn group(s: &str) -> IResult { /// Either quoted or unquoted text fn text(s: &str) -> IResult { - alt((quoted_term, partially_quoted_term, unquoted_term))(s) + alt((quoted_term, partially_quoted_term, unquoted_term)).parse(s) } /// Quoted text, including the outer double quotes. @@ -248,7 +251,8 @@ fn partially_quoted_term(s: &str) -> IResult { escaped(is_not("\"(): \u{3000}\\"), '\\', none_of(" \u{3000}")), char(':'), quoted_term_str, - )(s)?; + ) + .parse(s)?; Ok(( remaining, Node::Search(search_node_for_text_with_argument(key, val)?), @@ -274,7 +278,7 @@ fn unquoted_term(s: &str) -> IResult { Err(parse_failure( s, FailKind::UnknownEscape { - provided: format!("\\{}", c), + provided: format!("\\{c}"), }, )) } else if "\"() \u{3000}".contains(s.chars().next().unwrap()) { @@ -296,7 +300,7 @@ fn unquoted_term(s: &str) -> IResult { fn quoted_term_str(s: &str) -> IResult<&str> { let (opened, _) = char('"')(s)?; if let Ok((tail, inner)) = - escaped::<_, ParseError, _, _, _, _>(is_not(r#""\"#), '\\', anychar)(opened) + escaped::<_, ParseError, _, _>(is_not(r#""\"#), '\\', anychar).parse(opened) { if let Ok((remaining, _)) = char::<_, ParseError>('"')(tail) { Ok((remaining, inner)) @@ -321,7 +325,8 @@ fn search_node_for_text(s: &str) -> ParseResult { // leading : is only possible error for well-formed input let (tail, head) = verify(escaped(is_not(r":\"), '\\', anychar), |t: &str| { !t.is_empty() - })(s) + }) + .parse(s) .map_err(|_: nom::Err| parse_failure(s, FailKind::MissingKey))?; if tail.is_empty() { Ok(SearchNode::UnqualifiedText(unescape(head)?)) @@ -354,6 +359,7 @@ fn search_node_for_text_with_argument<'a>( "cid" => SearchNode::CardIds(check_id_list(val, key)?.into()), "re" => SearchNode::Regex(unescape_quotes(val)), "nc" => SearchNode::NoCombining(unescape(val)?), + "sc" => SearchNode::StripClozes(unescape(val)?), "w" => SearchNode::WordBoundary(unescape(val)?), "dupe" => parse_dupe(val)?, "has-cd" => SearchNode::CustomData(unescape(val)?), @@ -407,7 +413,7 @@ fn parse_resched(s: &str) -> ParseResult { /// eg prop:ivl>3, prop:ease!=2.5 fn parse_prop(prop_clause: &str) -> ParseResult { - let (tail, prop) = alt::<_, _, ParseError, _>(( + let (tail, prop) = alt(( tag("ivl"), tag("due"), tag("reps"), @@ -421,8 +427,9 @@ fn parse_prop(prop_clause: &str) -> ParseResult { tag("r"), recognize(preceded(tag("cdn:"), alphanumeric1)), recognize(preceded(tag("cds:"), alphanumeric1)), - ))(prop_clause) - .map_err(|_| { + )) + .parse(prop_clause) + .map_err(|_: nom::Err| { parse_failure( prop_clause, FailKind::InvalidPropProperty { @@ -431,15 +438,16 @@ fn parse_prop(prop_clause: &str) -> ParseResult { ) })?; - let (num, operator) = alt::<_, _, ParseError, _>(( + let (num, operator) = alt(( tag("<="), tag(">="), tag("!="), tag("="), tag("<"), tag(">"), - ))(tail) - .map_err(|_| { + )) + .parse(tail) + .map_err(|_: nom::Err| { parse_failure( prop_clause, FailKind::InvalidPropOperator { @@ -631,7 +639,7 @@ fn check_id_list<'a>(s: &'a str, context: &str) -> ParseResult<'a, &'a str> { s, // id lists are undocumented, so no translation FailKind::Other { - info: Some(format!("expected only digits and commas in {}:", context)), + info: Some(format!("expected only digits and commas in {context}:")), }, )) } @@ -1104,19 +1112,19 @@ mod test { for term in &["added", "edited", "rated", "resched"] { assert!(matches!( - failkind(&format!("{}:1.1", term)), + failkind(&format!("{term}:1.1")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); assert!(matches!( - failkind(&format!("{}:-1", term)), + failkind(&format!("{term}:-1")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); assert!(matches!( - failkind(&format!("{}:", term)), + failkind(&format!("{term}:")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); assert!(matches!( - failkind(&format!("{}:foo", term)), + failkind(&format!("{term}:foo")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); } @@ -1217,19 +1225,19 @@ mod test { for term in &["ivl", "reps", "lapses", "pos"] { assert!(matches!( - failkind(&format!("prop:{}>", term)), + failkind(&format!("prop:{term}>")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); assert!(matches!( - failkind(&format!("prop:{}=0.5", term)), + failkind(&format!("prop:{term}=0.5")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); assert!(matches!( - failkind(&format!("prop:{}!=-1", term)), + failkind(&format!("prop:{term}!=-1")), SearchErrorKind::InvalidPositiveWholeNumber { .. } )); assert!(matches!( - failkind(&format!("prop:{} { self.write_unqualified( text, self.col.get_config_bool(BoolKey::IgnoreAccentsInSearch), + false, )? } SearchNode::SingleField { field, text, is_re } => { @@ -143,7 +145,14 @@ impl SqlWriter<'_> { self.write_dupe(*notetype_id, &self.norm_note(text))? } SearchNode::Regex(re) => self.write_regex(&self.norm_note(re), false)?, - SearchNode::NoCombining(text) => self.write_unqualified(&self.norm_note(text), true)?, + SearchNode::NoCombining(text) => { + self.write_unqualified(&self.norm_note(text), true, false)? + } + SearchNode::StripClozes(text) => self.write_unqualified( + &self.norm_note(text), + self.col.get_config_bool(BoolKey::IgnoreAccentsInSearch), + true, + )?, SearchNode::WordBoundary(text) => self.write_word_boundary(&self.norm_note(text))?, // other @@ -158,13 +167,12 @@ impl SqlWriter<'_> { }, SearchNode::Deck(deck) => self.write_deck(&norm(deck))?, SearchNode::NotetypeId(ntid) => { - write!(self.sql, "n.mid = {}", ntid).unwrap(); + write!(self.sql, "n.mid = {ntid}").unwrap(); } SearchNode::DeckIdsWithoutChildren(dids) => { write!( self.sql, - "c.did in ({}) or (c.odid != 0 and c.odid in ({}))", - dids, dids + "c.did in ({dids}) or (c.odid != 0 and c.odid in ({dids}))" ) .unwrap(); } @@ -175,13 +183,13 @@ impl SqlWriter<'_> { SearchNode::Tag { tag, is_re } => self.write_tag(&norm(tag), *is_re), SearchNode::State(state) => self.write_state(state)?, SearchNode::Flag(flag) => { - write!(self.sql, "(c.flags & 7) == {}", flag).unwrap(); + write!(self.sql, "(c.flags & 7) == {flag}").unwrap(); } SearchNode::NoteIds(nids) => { write!(self.sql, "{} in ({})", self.note_id_column(), nids).unwrap(); } SearchNode::CardIds(cids) => { - write!(self.sql, "c.id in ({})", cids).unwrap(); + write!(self.sql, "c.id in ({cids})").unwrap(); } SearchNode::Property { operator, kind } => self.write_prop(operator, kind)?, SearchNode::CustomData(key) => self.write_custom_data(key)?, @@ -191,7 +199,12 @@ impl SqlWriter<'_> { Ok(()) } - fn write_unqualified(&mut self, text: &str, no_combining: bool) -> Result<()> { + fn write_unqualified( + &mut self, + text: &str, + no_combining: bool, + strip_clozes: bool, + ) -> Result<()> { let text = to_sql(text); let text = if no_combining { without_combining(&text) @@ -199,21 +212,41 @@ impl SqlWriter<'_> { text }; // implicitly wrap in % - let text = format!("%{}%", text); + let text = format!("%{text}%"); self.args.push(text); let arg_idx = self.args.len(); - let sfld_expr = if no_combining { - "coalesce(without_combining(cast(n.sfld as text)), n.sfld)" + let mut process_text_flags = ProcessTextFlags::empty(); + if no_combining { + process_text_flags.insert(ProcessTextFlags::NoCombining); + } + if strip_clozes { + process_text_flags.insert(ProcessTextFlags::StripClozes); + } + + let (sfld_expr, flds_expr) = if !process_text_flags.is_empty() { + let bits = process_text_flags.bits(); + ( + Cow::from(format!( + "coalesce(process_text(cast(n.sfld as text), {bits}), n.sfld)" + )), + Cow::from(format!("coalesce(process_text(n.flds, {bits}), n.flds)")), + ) } else { - "n.sfld" - }; - let flds_expr = if no_combining { - "coalesce(without_combining(n.flds), n.flds)" - } else { - "n.flds" + (Cow::from("n.sfld"), Cow::from("n.flds")) }; + if strip_clozes { + let cloze_notetypes_only_clause = self + .col + .get_all_notetypes()? + .iter() + .filter(|nt| nt.is_cloze()) + .map(|nt| format!("n.mid = {}", nt.id)) + .join(" or "); + write!(self.sql, "({cloze_notetypes_only_clause}) and ").unwrap(); + } + if let Some(field_indicies_by_notetype) = self.included_fields_by_notetype()? { let field_idx_str = format!("' || ?{arg_idx} || '"); let other_idx_str = "%".to_string(); @@ -279,7 +312,7 @@ impl SqlWriter<'_> { text => { write!(self.sql, "n.tags regexp ?").unwrap(); let re = &to_custom_re(text, r"\S"); - self.args.push(format!("(?i).* {}(::| ).*", re)); + self.args.push(format!("(?i).* {re}(::| ).*")); } } } @@ -293,10 +326,10 @@ impl SqlWriter<'_> { write!(self.sql, "c.id in (select cid from revlog where id").unwrap(); match op { - ">" => write!(self.sql, " >= {}", target_cutoff_ms), - ">=" => write!(self.sql, " >= {}", day_before_cutoff_ms), - "<" => write!(self.sql, " < {}", day_before_cutoff_ms), - "<=" => write!(self.sql, " < {}", target_cutoff_ms), + ">" => write!(self.sql, " >= {target_cutoff_ms}"), + ">=" => write!(self.sql, " >= {day_before_cutoff_ms}"), + "<" => write!(self.sql, " < {day_before_cutoff_ms}"), + "<=" => write!(self.sql, " < {target_cutoff_ms}"), "=" => write!( self.sql, " between {} and {}", @@ -314,7 +347,7 @@ impl SqlWriter<'_> { .unwrap(); match ease { - RatingKind::AnswerButton(u) => write!(self.sql, " and ease = {})", u), + RatingKind::AnswerButton(u) => write!(self.sql, " and ease = {u})"), RatingKind::AnyAnswerButton => write!(self.sql, " and ease > 0)"), RatingKind::ManualReschedule => write!(self.sql, " and ease = 0)"), } @@ -356,9 +389,9 @@ impl SqlWriter<'_> { pos = pos ) .unwrap(), - PropertyKind::Interval(ivl) => write!(self.sql, "ivl {} {}", op, ivl).unwrap(), - PropertyKind::Reps(reps) => write!(self.sql, "reps {} {}", op, reps).unwrap(), - PropertyKind::Lapses(days) => write!(self.sql, "lapses {} {}", op, days).unwrap(), + PropertyKind::Interval(ivl) => write!(self.sql, "ivl {op} {ivl}").unwrap(), + PropertyKind::Reps(reps) => write!(self.sql, "reps {op} {reps}").unwrap(), + PropertyKind::Lapses(days) => write!(self.sql, "lapses {op} {days}").unwrap(), PropertyKind::Ease(ease) => { write!(self.sql, "factor {} {}", op, (ease * 1000.0) as u32).unwrap() } @@ -474,7 +507,7 @@ impl SqlWriter<'_> { }; // convert to a regex that includes child decks - self.args.push(format!("(?i)^{}($|\x1f)", native_deck)); + self.args.push(format!("(?i)^{native_deck}($|\x1f)")); let arg_idx = self.args.len(); self.sql.push_str(&format!(concat!( "(c.did in (select id from decks where name regexp ?{n})", @@ -491,7 +524,7 @@ impl SqlWriter<'_> { let ids = self.col.storage.deck_id_with_children(&parent)?; let mut buf = String::new(); ids_to_string(&mut buf, &ids); - write!(self.sql, "c.did in {}", buf,).unwrap(); + write!(self.sql, "c.did in {buf}",).unwrap(); } else { self.sql.push_str("false") } @@ -502,7 +535,7 @@ impl SqlWriter<'_> { fn write_template(&mut self, template: &TemplateKind) { match template { TemplateKind::Ordinal(n) => { - write!(self.sql, "c.ord = {}", n).unwrap(); + write!(self.sql, "c.ord = {n}").unwrap(); } TemplateKind::Name(name) => { if is_glob(name) { @@ -550,7 +583,7 @@ impl SqlWriter<'_> { } fn write_all_fields_regexp(&mut self, val: &str) { - self.args.push(format!("(?i){}", val)); + self.args.push(format!("(?i){val}")); write!(self.sql, "regexp_fields(?{}, n.flds)", self.args.len()).unwrap(); } @@ -566,7 +599,7 @@ impl SqlWriter<'_> { return Ok(()); } - self.args.push(format!("(?i){}", val)); + self.args.push(format!("(?i){val}")); let arg_idx = self.args.len(); let all_notetype_clauses = field_indicies_by_notetype @@ -775,13 +808,13 @@ impl SqlWriter<'_> { fn write_added(&mut self, days: u32) -> Result<()> { let cutoff = self.previous_day_cutoff(days)?.as_millis(); - write!(self.sql, "c.id > {}", cutoff).unwrap(); + write!(self.sql, "c.id > {cutoff}").unwrap(); Ok(()) } fn write_edited(&mut self, days: u32) -> Result<()> { let cutoff = self.previous_day_cutoff(days)?; - write!(self.sql, "n.mod > {}", cutoff).unwrap(); + write!(self.sql, "n.mod > {cutoff}").unwrap(); Ok(()) } @@ -804,16 +837,19 @@ impl SqlWriter<'_> { fn write_regex(&mut self, word: &str, no_combining: bool) -> Result<()> { let flds_expr = if no_combining { - "coalesce(without_combining(n.flds), n.flds)" + Cow::from(format!( + "coalesce(process_text(n.flds, {}), n.flds)", + ProcessTextFlags::NoCombining.bits() + )) } else { - "n.flds" + Cow::from("n.flds") }; let word = if no_combining { without_combining(word) } else { std::borrow::Cow::Borrowed(word) }; - self.args.push(format!(r"(?i){}", word)); + self.args.push(format!(r"(?i){word}")); let arg_idx = self.args.len(); if let Some(field_indices_by_notetype) = self.included_fields_for_unqualified_regex()? { let notetype_clause = |ctx: &UnqualifiedRegexSearchContext| -> String { @@ -996,6 +1032,7 @@ impl SearchNode { SearchNode::Duplicates { .. } => RequiredTable::Notes, SearchNode::Regex(_) => RequiredTable::Notes, SearchNode::NoCombining(_) => RequiredTable::Notes, + SearchNode::StripClozes(_) => RequiredTable::Notes, SearchNode::WordBoundary(_) => RequiredTable::Notes, SearchNode::NotetypeId(_) => RequiredTable::Notes, SearchNode::Notetype(_) => RequiredTable::Notes, @@ -1300,6 +1337,9 @@ c.odue != 0 then c.odue else c.due end) != {days}) or (c.queue in (1,4) and "((c.did in (1) or c.odid in (1)))" ); assert_eq!(&s(ctx, "preset:typo").0, "(false)"); + + // strip clozes + assert_eq!(&s(ctx, "sc:abcdef").0, "((n.mid = 1581236385343) and (coalesce(process_text(cast(n.sfld as text), 2), n.sfld) like ?1 escape '\\' or coalesce(process_text(n.flds, 2), n.flds) like ?1 escape '\\'))"); } #[test] diff --git a/rslib/src/search/writer.rs b/rslib/src/search/writer.rs index 600a18fd6..3bbe6fd0a 100644 --- a/rslib/src/search/writer.rs +++ b/rslib/src/search/writer.rs @@ -70,30 +70,31 @@ fn write_search_node(node: &SearchNode) -> String { match node { UnqualifiedText(s) => maybe_quote(&s.replace(':', "\\:")), SingleField { field, text, is_re } => write_single_field(field, text, *is_re), - AddedInDays(u) => format!("added:{}", u), - EditedInDays(u) => format!("edited:{}", u), - IntroducedInDays(u) => format!("introduced:{}", u), + AddedInDays(u) => format!("added:{u}"), + EditedInDays(u) => format!("edited:{u}"), + IntroducedInDays(u) => format!("introduced:{u}"), CardTemplate(t) => write_template(t), - Deck(s) => maybe_quote(&format!("deck:{}", s)), - DeckIdsWithoutChildren(s) => format!("did:{}", s), + Deck(s) => maybe_quote(&format!("deck:{s}")), + DeckIdsWithoutChildren(s) => format!("did:{s}"), // not exposed on the GUI end DeckIdWithChildren(_) => "".to_string(), - NotetypeId(NotetypeIdType(i)) => format!("mid:{}", i), - Notetype(s) => maybe_quote(&format!("note:{}", s)), + NotetypeId(NotetypeIdType(i)) => format!("mid:{i}"), + Notetype(s) => maybe_quote(&format!("note:{s}")), Rated { days, ease } => write_rated(days, ease), Tag { tag, is_re } => write_single_field("tag", tag, *is_re), Duplicates { notetype_id, text } => write_dupe(notetype_id, text), State(k) => write_state(k), - Flag(u) => format!("flag:{}", u), - NoteIds(s) => format!("nid:{}", s), - CardIds(s) => format!("cid:{}", s), + Flag(u) => format!("flag:{u}"), + NoteIds(s) => format!("nid:{s}"), + CardIds(s) => format!("cid:{s}"), Property { operator, kind } => write_property(operator, kind), WholeCollection => "deck:*".to_string(), - Regex(s) => maybe_quote(&format!("re:{}", s)), - NoCombining(s) => maybe_quote(&format!("nc:{}", s)), - WordBoundary(s) => maybe_quote(&format!("w:{}", s)), - CustomData(k) => maybe_quote(&format!("has-cd:{}", k)), - Preset(s) => maybe_quote(&format!("preset:{}", s)), + Regex(s) => maybe_quote(&format!("re:{s}")), + NoCombining(s) => maybe_quote(&format!("nc:{s}")), + StripClozes(s) => maybe_quote(&format!("sc:{s}")), + WordBoundary(s) => maybe_quote(&format!("w:{s}")), + CustomData(k) => maybe_quote(&format!("has-cd:{k}")), + Preset(s) => maybe_quote(&format!("preset:{s}")), } } @@ -128,23 +129,23 @@ fn write_single_field(field: &str, text: &str, is_re: bool) -> String { fn write_template(template: &TemplateKind) -> String { match template { TemplateKind::Ordinal(u) => format!("card:{}", u + 1), - TemplateKind::Name(s) => maybe_quote(&format!("card:{}", s)), + TemplateKind::Name(s) => maybe_quote(&format!("card:{s}")), } } fn write_rated(days: &u32, ease: &RatingKind) -> String { use RatingKind::*; match ease { - AnswerButton(n) => format!("rated:{}:{}", days, n), - AnyAnswerButton => format!("rated:{}", days), - ManualReschedule => format!("resched:{}", days), + AnswerButton(n) => format!("rated:{days}:{n}"), + AnyAnswerButton => format!("rated:{days}"), + ManualReschedule => format!("resched:{days}"), } } /// Escape double quotes and backslashes: \" fn write_dupe(notetype_id: &NotetypeId, text: &str) -> String { let esc = text.replace('\\', r"\\"); - maybe_quote(&format!("dupe:{},{}", notetype_id, esc)) + maybe_quote(&format!("dupe:{notetype_id},{esc}")) } fn write_state(kind: &StateKind) -> String { @@ -167,19 +168,19 @@ fn write_state(kind: &StateKind) -> String { fn write_property(operator: &str, kind: &PropertyKind) -> String { use PropertyKind::*; match kind { - Due(i) => format!("prop:due{}{}", operator, i), - Interval(u) => format!("prop:ivl{}{}", operator, u), - Reps(u) => format!("prop:reps{}{}", operator, u), - Lapses(u) => format!("prop:lapses{}{}", operator, u), - Ease(f) => format!("prop:ease{}{}", operator, f), - Position(u) => format!("prop:pos{}{}", operator, u), - Stability(u) => format!("prop:s{}{}", operator, u), - Difficulty(u) => format!("prop:d{}{}", operator, u), - Retrievability(u) => format!("prop:r{}{}", operator, u), + Due(i) => format!("prop:due{operator}{i}"), + Interval(u) => format!("prop:ivl{operator}{u}"), + Reps(u) => format!("prop:reps{operator}{u}"), + Lapses(u) => format!("prop:lapses{operator}{u}"), + Ease(f) => format!("prop:ease{operator}{f}"), + Position(u) => format!("prop:pos{operator}{u}"), + Stability(u) => format!("prop:s{operator}{u}"), + Difficulty(u) => format!("prop:d{operator}{u}"), + Retrievability(u) => format!("prop:r{operator}{u}"), Rated(u, ease) => match ease { - RatingKind::AnswerButton(val) => format!("prop:rated{}{}:{}", operator, u, val), - RatingKind::AnyAnswerButton => format!("prop:rated{}{}", operator, u), - RatingKind::ManualReschedule => format!("prop:resched{}{}", operator, u), + RatingKind::AnswerButton(val) => format!("prop:rated{operator}{u}:{val}"), + RatingKind::AnyAnswerButton => format!("prop:rated{operator}{u}"), + RatingKind::ManualReschedule => format!("prop:resched{operator}{u}"), }, CustomDataNumber { key, value } => format!("prop:cdn:{key}{operator}{value}"), CustomDataString { key, value } => { diff --git a/rslib/src/storage/card/mod.rs b/rslib/src/storage/card/mod.rs index 38cf5ef0f..0205aef0d 100644 --- a/rslib/src/storage/card/mod.rs +++ b/rslib/src/storage/card/mod.rs @@ -829,8 +829,7 @@ impl fmt::Display for ReviewOrderSubclause { ReviewOrderSubclause::RetrievabilitySm2 { today, order } => { temp_string = format!( // - (elapsed days+0.001)/(scheduled interval) - "-(1 + cast({today}-due+0.001 as real)/ivl) {order}", - today = today + "-(1 + cast({today}-due+0.001 as real)/ivl) {order}" ); &temp_string } @@ -844,7 +843,7 @@ impl fmt::Display for ReviewOrderSubclause { ReviewOrderSubclause::Added => "nid asc, ord asc", ReviewOrderSubclause::ReverseAdded => "nid desc, ord asc", }; - write!(f, "{}", clause) + write!(f, "{clause}") } } diff --git a/rslib/src/storage/deck/mod.rs b/rslib/src/storage/deck/mod.rs index 7b1e08d58..d47d03894 100644 --- a/rslib/src/storage/deck/mod.rs +++ b/rslib/src/storage/deck/mod.rs @@ -33,7 +33,7 @@ fn row_to_deck(row: &Row) -> Result { common, kind: kind.kind.ok_or_else(|| { AnkiError::db_error( - format!("invalid deck kind: {}", id), + format!("invalid deck kind: {id}"), DbErrorKind::MissingEntity, ) })?, @@ -347,8 +347,8 @@ impl SqliteStorage { ))?; let top = current.name.as_native_str(); - let prefix_start = &format!("{}\x1f", top); - let prefix_end = &format!("{}\x20", top); + let prefix_start = &format!("{top}\x1f"); + let prefix_end = &format!("{top}\x20"); self.db .prepare_cached(include_str!("update_active.sql"))? @@ -379,7 +379,7 @@ impl SqliteStorage { let decks = self .get_schema11_decks() .map_err(|e| AnkiError::JsonError { - info: format!("decoding decks: {}", e), + info: format!("decoding decks: {e}"), })?; let mut names = HashSet::new(); for (_id, deck) in decks { diff --git a/rslib/src/storage/deckconfig/mod.rs b/rslib/src/storage/deckconfig/mod.rs index 2103e1512..5cc39cfc8 100644 --- a/rslib/src/storage/deckconfig/mod.rs +++ b/rslib/src/storage/deckconfig/mod.rs @@ -197,7 +197,7 @@ impl SqliteStorage { serde_json::from_value(conf) }) .map_err(|e| AnkiError::JsonError { - info: format!("decoding deck config: {}", e), + info: format!("decoding deck config: {e}"), }) })?; for (id, mut conf) in conf.into_iter() { diff --git a/rslib/src/storage/mod.rs b/rslib/src/storage/mod.rs index f240555eb..015f4fdc7 100644 --- a/rslib/src/storage/mod.rs +++ b/rslib/src/storage/mod.rs @@ -19,6 +19,7 @@ mod upgrades; use std::fmt::Write; +pub(crate) use sqlite::ProcessTextFlags; pub(crate) use sqlite::SqliteStorage; #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -52,7 +53,7 @@ where { let mut trailing_sep = false; for id in ids { - write!(buf, "{},", id).unwrap(); + write!(buf, "{id},").unwrap(); trailing_sep = true; } if trailing_sep { diff --git a/rslib/src/storage/notetype/mod.rs b/rslib/src/storage/notetype/mod.rs index 88c4074ac..692b68887 100644 --- a/rslib/src/storage/notetype/mod.rs +++ b/rslib/src/storage/notetype/mod.rs @@ -345,7 +345,7 @@ impl SqliteStorage { let nts = self .get_schema11_notetypes() .map_err(|e| AnkiError::JsonError { - info: format!("decoding models: {:?}", e), + info: format!("decoding models: {e:?}"), })?; let mut names = HashSet::new(); for (mut ntid, nt) in nts { diff --git a/rslib/src/storage/sqlite.rs b/rslib/src/storage/sqlite.rs index f882cee32..34e2a85d1 100644 --- a/rslib/src/storage/sqlite.rs +++ b/rslib/src/storage/sqlite.rs @@ -9,12 +9,14 @@ use std::hash::Hasher; use std::path::Path; use std::sync::Arc; +use bitflags::bitflags; use fnv::FnvHasher; use fsrs::FSRS; use fsrs::FSRS5_DEFAULT_DECAY; use regex::Regex; use rusqlite::functions::FunctionFlags; use rusqlite::params; +use rusqlite::trace::TraceEvent; use rusqlite::Connection; use serde_json::Value; use unicase::UniCase; @@ -23,6 +25,7 @@ use super::upgrades::SCHEMA_MAX_VERSION; use super::upgrades::SCHEMA_MIN_VERSION; use super::upgrades::SCHEMA_STARTING_VERSION; use super::SchemaVersion; +use crate::cloze::strip_clozes; use crate::config::schema11::schema11_config_as_string; use crate::error::DbErrorKind; use crate::prelude::*; @@ -30,6 +33,7 @@ use crate::scheduler::timing::local_minutes_west_for_stamp; use crate::scheduler::timing::v1_creation_date; use crate::storage::card::data::CardData; use crate::text::without_combining; +use crate::text::CowMapping; fn unicase_compare(s1: &str, s2: &str) -> Ordering { UniCase::new(s1).cmp(&UniCase::new(s2)) @@ -47,10 +51,13 @@ pub struct SqliteStorage { } fn open_or_create_collection_db(path: &Path) -> Result { - let mut db = Connection::open(path)?; + let db = Connection::open(path)?; if std::env::var("TRACESQL").is_ok() { - db.trace(Some(trace)); + db.trace_v2( + rusqlite::trace::TraceEventCodes::SQLITE_TRACE_STMT, + Some(trace), + ); } db.busy_timeout(std::time::Duration::from_secs(0))?; @@ -70,7 +77,7 @@ fn open_or_create_collection_db(path: &Path) -> Result { add_regexp_function(&db)?; add_regexp_fields_function(&db)?; add_regexp_tags_function(&db)?; - add_without_combining_function(&db)?; + add_process_text_function(&db)?; add_fnvhash_function(&db)?; add_extract_original_position_function(&db)?; add_extract_custom_data_function(&db)?; @@ -107,17 +114,28 @@ fn add_field_index_function(db: &Connection) -> rusqlite::Result<()> { ) } -fn add_without_combining_function(db: &Connection) -> rusqlite::Result<()> { +bitflags! { + pub(crate) struct ProcessTextFlags: u8 { + const NoCombining = 1; + const StripClozes = 1 << 1; + } +} + +fn add_process_text_function(db: &Connection) -> rusqlite::Result<()> { db.create_scalar_function( - "without_combining", - 1, + "process_text", + 2, FunctionFlags::SQLITE_DETERMINISTIC, |ctx| { - let text = ctx.get_raw(0).as_str()?; - Ok(match without_combining(text) { - Cow::Borrowed(_) => None, - Cow::Owned(o) => Some(o), - }) + let mut text = Cow::from(ctx.get_raw(0).as_str()?); + let opt = ProcessTextFlags::from_bits_truncate(ctx.get_raw(1).as_i64()? as u8); + if opt.contains(ProcessTextFlags::StripClozes) { + text = text.map_cow(strip_clozes); + } + if opt.contains(ProcessTextFlags::NoCombining) { + text = text.map_cow(without_combining); + } + Ok(text.get_owned()) }, ) } @@ -415,8 +433,10 @@ fn schema_version(db: &Connection) -> Result<(bool, u8)> { )) } -fn trace(s: &str) { - println!("sql: {}", s.trim().replace('\n', " ")); +fn trace(event: TraceEvent) { + if let TraceEvent::Stmt(_, sql) = event { + println!("sql: {}", sql.trim().replace('\n', " ")); + } } impl SqliteStorage { @@ -581,7 +601,7 @@ impl SqliteStorage { }) { Ok(corrupt) => corrupt, Err(e) => { - println!("error: {:?}", e); + println!("error: {e:?}"); true } } diff --git a/rslib/src/storage/sync.rs b/rslib/src/storage/sync.rs index 4bd0e5242..256566d68 100644 --- a/rslib/src/storage/sync.rs +++ b/rslib/src/storage/sync.rs @@ -54,7 +54,7 @@ impl SqliteStorage { if let Some(new_usn) = server_usn_if_client { let mut stmt = self .db - .prepare_cached(&format!("update {} set usn=? where id=?", table))?; + .prepare_cached(&format!("update {table} set usn=? where id=?"))?; for id in ids { stmt.execute(params![new_usn, id])?; } diff --git a/rslib/src/storage/sync_check.rs b/rslib/src/storage/sync_check.rs index 50e92f7d0..7693a5921 100644 --- a/rslib/src/storage/sync_check.rs +++ b/rslib/src/storage/sync_check.rs @@ -11,7 +11,7 @@ impl SqliteStorage { fn table_has_usn(&self, table: &str) -> Result { Ok(self .db - .prepare(&format!("select null from {} where usn=-1", table))? + .prepare(&format!("select null from {table} where usn=-1"))? .query([])? .next()? .is_some()) @@ -19,7 +19,7 @@ impl SqliteStorage { fn table_count(&self, table: &str) -> Result { self.db - .query_row(&format!("select count() from {}", table), [], |r| r.get(0)) + .query_row(&format!("select count() from {table}"), [], |r| r.get(0)) .map_err(Into::into) } @@ -36,7 +36,7 @@ impl SqliteStorage { ] { if self.table_has_usn(table)? { return Err(AnkiError::sync_error( - format!("table had usn=-1: {}", table), + format!("table had usn=-1: {table}"), SyncErrorKind::Other, )); } diff --git a/rslib/src/sync/collection/tests.rs b/rslib/src/sync/collection/tests.rs index abf82262f..a7aa6cc8d 100644 --- a/rslib/src/sync/collection/tests.rs +++ b/rslib/src/sync/collection/tests.rs @@ -100,7 +100,7 @@ where _lock = LOCK.lock().await; endpoint } else { - format!("http://{}/", addr) + format!("http://{addr}/") }; let endpoint = Url::try_from(endpoint.as_str()).unwrap(); let auth = SyncAuth { @@ -734,7 +734,7 @@ async fn regular_sync(ctx: &SyncTestContext) -> Result<()> { for table in &["cards", "notes", "decks"] { assert_eq!( col1.storage - .db_scalar::(&format!("select count() from {}", table))?, + .db_scalar::(&format!("select count() from {table}"))?, 2 ); } @@ -754,7 +754,7 @@ async fn regular_sync(ctx: &SyncTestContext) -> Result<()> { for table in &["cards", "notes", "decks"] { assert_eq!( col2.storage - .db_scalar::(&format!("select count() from {}", table))?, + .db_scalar::(&format!("select count() from {table}"))?, 1 ); } diff --git a/rslib/src/sync/http_server/mod.rs b/rslib/src/sync/http_server/mod.rs index f96209df7..ddac26670 100644 --- a/rslib/src/sync/http_server/mod.rs +++ b/rslib/src/sync/http_server/mod.rs @@ -22,7 +22,7 @@ use anki_io::create_dir_all; use axum::extract::DefaultBodyLimit; use axum::routing::get; use axum::Router; -use axum_client_ip::SecureClientIpSource; +use axum_client_ip::ClientIpSource; use pbkdf2::password_hash::PasswordHash; use pbkdf2::password_hash::PasswordHasher; use pbkdf2::password_hash::PasswordVerifier; @@ -69,7 +69,7 @@ pub struct SyncServerConfig { #[serde(default = "default_base", rename = "base")] pub base_folder: PathBuf, #[serde(default = "default_ip_header")] - pub ip_header: SecureClientIpSource, + pub ip_header: ClientIpSource, } fn default_host() -> IpAddr { @@ -86,8 +86,8 @@ fn default_base() -> PathBuf { .join(".syncserver") } -pub fn default_ip_header() -> SecureClientIpSource { - SecureClientIpSource::ConnectInfo +pub fn default_ip_header() -> ClientIpSource { + ClientIpSource::ConnectInfo } impl SimpleServerInner { diff --git a/rslib/src/sync/http_server/routes.rs b/rslib/src/sync/http_server/routes.rs index dd4c0d3bd..072ad6140 100644 --- a/rslib/src/sync/http_server/routes.rs +++ b/rslib/src/sync/http_server/routes.rs @@ -53,7 +53,7 @@ async fn sync_handler( } pub fn collection_sync_router() -> Router

{ - Router::new().route("/:method", post(sync_handler::

)) + Router::new().route("/{method}", post(sync_handler::

)) } /// The Rust code used to send a GET with query params, which was inconsistent @@ -112,5 +112,5 @@ pub fn media_sync_router() -> Router

{ "/begin", get(media_begin_get::

).post(media_begin_post::

), ) - .route("/:method", post(media_sync_handler::

)) + .route("/{method}", post(media_sync_handler::

)) } diff --git a/rslib/src/sync/media/database/client/mod.rs b/rslib/src/sync/media/database/client/mod.rs index e58cb4fd9..f9c6e5ed1 100644 --- a/rslib/src/sync/media/database/client/mod.rs +++ b/rslib/src/sync/media/database/client/mod.rs @@ -283,15 +283,20 @@ fn row_to_name_and_checksum(row: &Row) -> error::Result<(String, Sha1Hash)> { Ok((file_name, sha1)) } -fn trace(s: &str) { - println!("sql: {}", s) +fn trace(event: rusqlite::trace::TraceEvent) { + if let rusqlite::trace::TraceEvent::Stmt(_, sql) = event { + println!("sql: {sql}"); + } } pub(crate) fn open_or_create>(path: P) -> error::Result { let mut db = Connection::open(path)?; if std::env::var("TRACESQL").is_ok() { - db.trace(Some(trace)); + db.trace_v2( + rusqlite::trace::TraceEventCodes::SQLITE_TRACE_STMT, + Some(trace), + ); } db.pragma_update(None, "page_size", 4096)?; diff --git a/rslib/src/sync/media/zip.rs b/rslib/src/sync/media/zip.rs index 0ad558d2d..d9b04f5e3 100644 --- a/rslib/src/sync/media/zip.rs +++ b/rslib/src/sync/media/zip.rs @@ -27,7 +27,8 @@ pub struct ZipFileMetadata { /// The metadata is in a different format to the upload case, since deletions /// don't need to be represented. pub fn zip_files_for_download(files: Vec<(String, Vec)>) -> Result> { - let options = FileOptions::default().compression_method(zip::CompressionMethod::Stored); + let options: FileOptions<'_, ()> = + FileOptions::default().compression_method(zip::CompressionMethod::Stored); let mut zip = ZipWriter::new(io::Cursor::new(vec![])); let mut entries = HashMap::new(); @@ -47,7 +48,8 @@ pub fn zip_files_for_download(files: Vec<(String, Vec)>) -> Result> } pub fn zip_files_for_upload(entries_: Vec<(String, Option>)>) -> Result> { - let options = FileOptions::default().compression_method(zip::CompressionMethod::Stored); + let options: FileOptions<'_, ()> = + FileOptions::default().compression_method(zip::CompressionMethod::Stored); let mut zip = ZipWriter::new(io::Cursor::new(vec![])); let mut entries = vec![]; diff --git a/rslib/src/sync/request/mod.rs b/rslib/src/sync/request/mod.rs index 4678cef9b..329aa44dd 100644 --- a/rslib/src/sync/request/mod.rs +++ b/rslib/src/sync/request/mod.rs @@ -10,14 +10,13 @@ use std::marker::PhantomData; use std::net::IpAddr; use std::sync::LazyLock; -use async_trait::async_trait; use axum::body::Body; use axum::extract::FromRequest; use axum::extract::Multipart; use axum::http::Request; use axum::http::StatusCode; use axum::RequestPartsExt; -use axum_client_ip::SecureClientIp; +use axum_client_ip::ClientIp; use axum_extra::TypedHeader; use header_and_stream::SyncHeader; use serde::de::DeserializeOwned; @@ -101,19 +100,18 @@ where } } -#[async_trait] -impl FromRequest for SyncRequest +impl FromRequest for SyncRequest where S: Send + Sync, T: DeserializeOwned, { type Rejection = HttpError; - async fn from_request(req: Request, state: &S) -> HttpResult { + async fn from_request(req: Request, state: &S) -> Result { let (mut parts, body) = req.into_parts(); let ip = parts - .extract::() + .extract::() .await .map_err(|_| { HttpError::new_without_source(StatusCode::INTERNAL_SERVER_ERROR, "missing ip") diff --git a/rslib/src/tags/findreplace.rs b/rslib/src/tags/findreplace.rs index 5db6e3ed2..b60b5ed88 100644 --- a/rslib/src/tags/findreplace.rs +++ b/rslib/src/tags/findreplace.rs @@ -35,7 +35,7 @@ impl Collection { }; if !match_case { - search = format!("(?i){}", search).into(); + search = format!("(?i){search}").into(); } self.transact(Op::UpdateTag, |col| { diff --git a/rslib/src/tags/matcher.rs b/rslib/src/tags/matcher.rs index b4961015e..d3c6ad88b 100644 --- a/rslib/src/tags/matcher.rs +++ b/rslib/src/tags/matcher.rs @@ -33,7 +33,7 @@ impl TagMatcher { (?:^|\ ) # 1: the tag prefix ( - {} + {tags} ) (?: # 2: an optional child separator @@ -41,8 +41,7 @@ impl TagMatcher { # or a space/end of string the end of the string |\ |$ ) - "#, - tags + "# ))?; Ok(Self { @@ -61,7 +60,7 @@ impl TagMatcher { let out = self.regex.replace(tag, |caps: &Captures| { // if we captured the child separator, add it to the replacement if caps.get(2).is_some() { - Cow::Owned(format!("{}::", replacement)) + Cow::Owned(format!("{replacement}::")) } else { Cow::Borrowed(replacement) } @@ -92,7 +91,7 @@ impl TagMatcher { let replacement = replacer(caps.get(1).unwrap().as_str()); // if we captured the child separator, add it to the replacement if caps.get(2).is_some() { - format!("{}::", replacement) + format!("{replacement}::") } else { replacement } diff --git a/rslib/src/tags/reparent.rs b/rslib/src/tags/reparent.rs index cbab806ff..4976b760e 100644 --- a/rslib/src/tags/reparent.rs +++ b/rslib/src/tags/reparent.rs @@ -109,7 +109,7 @@ fn reparented_name(existing_name: &str, new_parent: Option<&str>) -> Option baz::bar - let new_name = format!("{}::{}", new_parent, existing_base); + let new_name = format!("{new_parent}::{existing_base}"); if new_name != existing_name { Some(new_name) } else { diff --git a/rslib/src/template.rs b/rslib/src/template.rs index d09ade580..4895cc162 100644 --- a/rslib/src/template.rs +++ b/rslib/src/template.rs @@ -13,6 +13,7 @@ use nom::bytes::complete::tag; use nom::bytes::complete::take_until; use nom::combinator::map; use nom::sequence::delimited; +use nom::Parser; use regex::Regex; use crate::cloze::cloze_number_in_fields; @@ -67,7 +68,8 @@ impl TemplateMode { tag(self.end_tag()), ), |out| classify_handle(out), - )(s) + ) + .parse(s) } /// Return the next handlebar, comment or text token. @@ -127,7 +129,8 @@ fn comment_token(s: &str) -> nom::IResult<&str, Token> { tag(COMMENT_END), ), Token::Comment, - )(s) + ) + .parse(s) } fn tokens(mut template: &str) -> impl Iterator>> { @@ -262,10 +265,8 @@ fn template_error_to_anki_error( }; let details = htmlescape::encode_minimal(&localized_template_error(tr, err)); let more_info = tr.card_template_rendering_more_info(); - let source = format!( - "{}
{}
{}", - header, details, TEMPLATE_ERROR_LINK, more_info - ); + let source = + format!("{header}
{details}
{more_info}"); AnkiError::TemplateError { info: source } } @@ -276,32 +277,29 @@ fn localized_template_error(tr: &I18n, err: TemplateError) -> String { .card_template_rendering_no_closing_brackets("}}", tag) .into(), TemplateError::ConditionalNotClosed(tag) => tr - .card_template_rendering_conditional_not_closed(format!("{{{{/{}}}}}", tag)) + .card_template_rendering_conditional_not_closed(format!("{{{{/{tag}}}}}")) .into(), TemplateError::ConditionalNotOpen { closed, currently_open, } => if let Some(open) = currently_open { tr.card_template_rendering_wrong_conditional_closed( - format!("{{{{/{}}}}}", closed), - format!("{{{{/{}}}}}", open), + format!("{{{{/{closed}}}}}"), + format!("{{{{/{open}}}}}"), ) } else { tr.card_template_rendering_conditional_not_open( - format!("{{{{/{}}}}}", closed), - format!("{{{{#{}}}}}", closed), - format!("{{{{^{}}}}}", closed), + format!("{{{{/{closed}}}}}"), + format!("{{{{#{closed}}}}}"), + format!("{{{{^{closed}}}}}"), ) } .into(), TemplateError::FieldNotFound { field, filters } => tr - .card_template_rendering_no_such_field(format!("{{{{{}{}}}}}", filters, field), field) + .card_template_rendering_no_such_field(format!("{{{{{filters}{field}}}}}"), field) .into(), TemplateError::NoSuchConditional(condition) => tr - .card_template_rendering_no_such_field( - format!("{{{{{}}}}}", condition), - &condition[1..], - ) + .card_template_rendering_no_such_field(format!("{{{{{condition}}}}}"), &condition[1..]) .into(), } } @@ -520,10 +518,7 @@ impl RenderContext<'_> { Ok(false ^ negated) } else { let prefix = if negated { "^" } else { "#" }; - Err(TemplateError::NoSuchConditional(format!( - "{}{}", - prefix, key - ))) + Err(TemplateError::NoSuchConditional(format!("{prefix}{key}"))) } } } @@ -855,14 +850,14 @@ fn nodes_to_string(buf: &mut String, nodes: &[ParsedNode]) { .unwrap(); } ParsedNode::Conditional { key, children } => { - write!(buf, "{{{{#{}}}}}", key).unwrap(); + write!(buf, "{{{{#{key}}}}}").unwrap(); nodes_to_string(buf, children); - write!(buf, "{{{{/{}}}}}", key).unwrap(); + write!(buf, "{{{{/{key}}}}}").unwrap(); } ParsedNode::NegatedConditional { key, children } => { - write!(buf, "{{{{^{}}}}}", key).unwrap(); + write!(buf, "{{{{^{key}}}}}").unwrap(); nodes_to_string(buf, children); - write!(buf, "{{{{/{}}}}}", key).unwrap(); + write!(buf, "{{{{/{key}}}}}").unwrap(); } } } diff --git a/rslib/src/template_filters.rs b/rslib/src/template_filters.rs index cb3504fe3..4949e756d 100644 --- a/rslib/src/template_filters.rs +++ b/rslib/src/template_filters.rs @@ -165,15 +165,15 @@ fn furigana_filter(text: &str) -> Cow { /// convert to [[type:...]] for the gui code to process fn type_filter<'a>(field_name: &str) -> Cow<'a, str> { - format!("[[type:{}]]", field_name).into() + format!("[[type:{field_name}]]").into() } fn type_cloze_filter<'a>(field_name: &str) -> Cow<'a, str> { - format!("[[type:cloze:{}]]", field_name).into() + format!("[[type:cloze:{field_name}]]").into() } fn type_nc_filter<'a>(field_name: &str) -> Cow<'a, str> { - format!("[[type:nc:{}]]", field_name).into() + format!("[[type:nc:{field_name}]]").into() } fn hint_filter<'a>(text: &'a str, field_name: &str) -> Cow<'a, str> { @@ -191,18 +191,17 @@ fn hint_filter<'a>(text: &'a str, field_name: &str) -> Cow<'a, str> { r##" -{} -

-"##, - id, field_name, id, text +{field_name} + +"## ) .into() } fn tts_filter(options: &str, text: &str) -> String { - format!("[anki:tts lang={}]{}[/anki:tts]", options, text) + format!("[anki:tts lang={options}]{text}[/anki:tts]") } // Tests diff --git a/rslib/src/text.rs b/rslib/src/text.rs index f83332ff8..590c05b39 100644 --- a/rslib/src/text.rs +++ b/rslib/src/text.rs @@ -484,7 +484,7 @@ pub(crate) fn to_custom_re<'a>(txt: &'a str, wildcard: &str) -> Cow<'a, str> { match s { r"\\" | r"\*" => s.to_string(), r"\_" => "_".to_string(), - "*" => format!("{}*", wildcard), + "*" => format!("{wildcard}*"), "_" => wildcard.to_string(), s => regex::escape(s), } diff --git a/rslib/sync/Cargo.toml b/rslib/sync/Cargo.toml index e2d960503..d23b4f380 100644 --- a/rslib/sync/Cargo.toml +++ b/rslib/sync/Cargo.toml @@ -13,4 +13,9 @@ path = "main.rs" name = "anki-sync-server" [dependencies] -anki.workspace = true + +[target.'cfg(windows)'.dependencies] +anki = { workspace = true, features = ["native-tls"] } + +[target.'cfg(not(windows))'.dependencies] +anki = { workspace = true, features = ["rustls"] } diff --git a/run b/run index fd65a70c7..3051345b1 100755 --- a/run +++ b/run @@ -8,6 +8,7 @@ export PYTHONPYCACHEPREFIX=out/pycache export ANKIDEV=${ANKIDEV-1} export QTWEBENGINE_REMOTE_DEBUGGING=${QTWEBENGINE_REMOTE_DEBUGGING-8080} export QTWEBENGINE_CHROMIUM_FLAGS=${QTWEBENGINE_CHROMIUM_FLAGS---remote-allow-origins=http://localhost:$QTWEBENGINE_REMOTE_DEBUGGING} +export PYENV=${PYENV-out/pyenv} # The pages can be accessed by, e.g. surfing to # http://localhost:40000/_anki/pages/deckconfig.html @@ -16,4 +17,4 @@ export ANKI_API_PORT=${ANKI_API_PORT-40000} export ANKI_API_HOST=${ANKI_API_HOST-127.0.0.1} ./ninja pylib qt -./out/pyenv/bin/python tools/run.py $* +${PYENV}/bin/python tools/run.py $* diff --git a/run.bat b/run.bat index 03acf0032..c689dda16 100755 --- a/run.bat +++ b/run.bat @@ -9,6 +9,6 @@ set QTWEBENGINE_CHROMIUM_FLAGS=--remote-allow-origins=http://localhost:8080 set ANKI_API_PORT=40000 set ANKI_API_HOST=127.0.0.1 -call tools\ninja pylib qt extract:win_amd64_audio || exit /b 1 +call tools\ninja pylib qt || exit /b 1 .\out\pyenv\scripts\python tools\run.py %* || exit /b 1 popd diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 608f0e356..8a21ec74e 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] # older versions may fail to compile; newer versions may fail the clippy tests -channel = "1.85.0" +channel = "1.88.0" diff --git a/tools/build b/tools/build index 4fd78e195..4074ff98d 100755 --- a/tools/build +++ b/tools/build @@ -1,5 +1,8 @@ #!/bin/bash -set -e -RELEASE=1 ./ninja wheels +set -eo pipefail + +rm -rf out/wheels/* +RELEASE=2 ./ninja wheels +(cd qt/release && ./build.sh) echo "wheels are in out/wheels" diff --git a/tools/build-arm-lin b/tools/build-arm-lin new file mode 100755 index 000000000..67fb4e4bf --- /dev/null +++ b/tools/build-arm-lin @@ -0,0 +1,12 @@ +#!/bin/bash + +set -e + +# sudo apt install libc6-dev-arm64-cross gcc-aarch64-linux-gnu +rustup target add aarch64-unknown-linux-gnu + +export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc +export LIN_ARM64=1 + +RELEASE=2 ./ninja wheels:anki +echo "wheels are in out/wheels" diff --git a/tools/build-x64-mac b/tools/build-x64-mac new file mode 100755 index 000000000..050b9c636 --- /dev/null +++ b/tools/build-x64-mac @@ -0,0 +1,10 @@ +#!/bin/bash + +set -e + +rustup target add x86_64-apple-darwin + +export MAC_X86=1 + +RELEASE=2 ./ninja wheels:anki +echo "wheels are in out/wheels" diff --git a/tools/build.bat b/tools/build.bat index 364f0f0f8..3ace08110 100755 --- a/tools/build.bat +++ b/tools/build.bat @@ -1,5 +1,7 @@ @echo off pushd "%~dp0"\.. -set RELEASE=1 +if exist out\wheels rmdir /s /q out\wheels +set RELEASE=2 tools\ninja wheels || exit /b 1 +echo wheels are in out/wheels popd diff --git a/tools/mac-x86 b/tools/mac-x86 deleted file mode 100755 index 312e19320..000000000 --- a/tools/mac-x86 +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -# -# Run a command with an alternative buildroot and Intel architecture target, for building Intel on an ARM Mac. -# Eg ./tools/mac-x86 ./tools/run-qt5.14 -# -# Uses hard-coded paths to Python and build folders. -# - -export PYTHON_BINARY=/usr/local/bin/python3.9-intel64 -export BUILD_ROOT=~/Local/build/anki-x86 -export NORMAL_BUILD_ROOT=~/Local/build/anki -export MAC_X86=1 - - -# run provided command -$* - -BUILD_ROOT=$NORMAL_BUILD_ROOT ./ninja just-to-restore-build-root-and-failure-is-expected diff --git a/tools/minilints/src/main.rs b/tools/minilints/src/main.rs index dfe9bef89..37e213570 100644 --- a/tools/minilints/src/main.rs +++ b/tools/minilints/src/main.rs @@ -21,16 +21,9 @@ use walkdir::WalkDir; const NONSTANDARD_HEADER: &[&str] = &[ "./pylib/anki/_vendor/stringcase.py", - "./pylib/anki/importing/pauker.py", - "./pylib/anki/importing/supermemo_xml.py", "./pylib/anki/statsbg.py", - "./pylib/tools/protoc-gen-mypy.py", - "./python/pyqt/install.py", - "./python/write_wheel.py", "./qt/aqt/mpv.py", "./qt/aqt/winpaths.py", - "./qt/bundle/build.rs", - "./qt/bundle/src/main.rs", ]; const IGNORED_FOLDERS: &[&str] = &[ @@ -38,7 +31,6 @@ const IGNORED_FOLDERS: &[&str] = &[ "./node_modules", "./qt/aqt/forms", "./tools/workspace-hack", - "./qt/bundle/PyOxidizer", "./target", ".mypy_cache", "./extra", @@ -116,7 +108,7 @@ impl LintContext { LazyCell::force(&self.unstaged_changes); fix_copyright(path)?; } else { - println!("missing standard copyright header: {:?}", path); + println!("missing standard copyright header: {path:?}"); self.found_problems = true; } } @@ -209,7 +201,7 @@ fn sveltekit_temp_file(path: &str) -> bool { } fn check_cargo_deny() -> Result<()> { - Command::run("cargo install cargo-deny@0.18.2")?; + Command::run("cargo install cargo-deny@0.18.3")?; Command::run("cargo deny check")?; Ok(()) } @@ -249,7 +241,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html .write(true) .open(path) .with_context(|| format!("opening {path}"))?; - write!(file, "{}{}", header, data).with_context(|| format!("writing {path}"))?; + write!(file, "{header}{data}").with_context(|| format!("writing {path}"))?; Ok(()) } diff --git a/tools/ninja.bat b/tools/ninja.bat index 7d939846e..6310103c3 100755 --- a/tools/ninja.bat +++ b/tools/ninja.bat @@ -1,5 +1,5 @@ @echo off -set CARGO_TARGET_DIR=%~dp0\..\out\rust +set CARGO_TARGET_DIR=%~dp0..\out\rust REM separate build+run steps so build env doesn't leak into subprocesses cargo build -p runner --release || exit /b 1 out\rust\release\runner build %* || exit /b 1 diff --git a/tools/publish b/tools/publish new file mode 100755 index 000000000..6214f27ed --- /dev/null +++ b/tools/publish @@ -0,0 +1,14 @@ +#!/bin/bash + +set -e +shopt -s extglob + +#export UV_PUBLISH_TOKEN=$(pass show w/pypi-api-test) +#out/extracted/uv/uv publish --index testpypi out/wheels/* + +export UV_PUBLISH_TOKEN=$(pass show w/pypi-api) + +# Upload all wheels except anki_release*.whl first +out/extracted/uv/uv publish out/wheels/!(anki_release*).whl +# Then upload anki_release*.whl +out/extracted/uv/uv publish out/wheels/anki_release*.whl diff --git a/tools/reload_webviews.py b/tools/reload_webviews.py index 948401d9b..bb84c2554 100755 --- a/tools/reload_webviews.py +++ b/tools/reload_webviews.py @@ -43,11 +43,11 @@ except Exception as e: print_error( f"Could not establish connection to Chromium remote debugger. Is Anki Open? Exception:\n{e}" ) - exit(1) + sys.exit(1) if chrome.tabs is None: print_error("Was unable to get active web views.") - exit(1) + sys.exit(1) for tab_index, tab_data in enumerate(chrome.tabs): print(f"Reloading page: {tab_data['title']}") diff --git a/tools/run-qt5.14 b/tools/run-qt5.14 deleted file mode 100755 index bc7b539f4..000000000 --- a/tools/run-qt5.14 +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -set -e - -export PYTHONWARNINGS=default -export PYTHONPYCACHEPREFIX=out/pycache -export ANKIDEV=${ANKIDEV-1} -export QTWEBENGINE_REMOTE_DEBUGGING=${QTWEBENGINE_REMOTE_DEBUGGING-8080} -export QTWEBENGINE_CHROMIUM_FLAGS=${QTWEBENGINE_CHROMIUM_FLAGS---remote-allow-origins=http://localhost:$QTWEBENGINE_REMOTE_DEBUGGING} - -./ninja pylib qt pyenv-qt5.14 -./out/pyenv-qt5.14/bin/python tools/run.py $* diff --git a/tools/run-qt5.15 b/tools/run-qt5.15 deleted file mode 100755 index 67a9b6f32..000000000 --- a/tools/run-qt5.15 +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -set -e - -export PYTHONWARNINGS=default -export PYTHONPYCACHEPREFIX=out/pycache -export ANKIDEV=${ANKIDEV-1} -export QTWEBENGINE_REMOTE_DEBUGGING=${QTWEBENGINE_REMOTE_DEBUGGING-8080} -export QTWEBENGINE_CHROMIUM_FLAGS=${QTWEBENGINE_CHROMIUM_FLAGS---remote-allow-origins=http://localhost:$QTWEBENGINE_REMOTE_DEBUGGING} -./ninja pylib qt pyenv-qt5.15 -./out/pyenv-qt5.15/bin/python tools/run.py $* diff --git a/tools/run-qt5.15.bat b/tools/run-qt5.15.bat deleted file mode 100755 index d17d5b534..000000000 --- a/tools/run-qt5.15.bat +++ /dev/null @@ -1,10 +0,0 @@ -@echo off -pushd "%~dp0\.." - -set PYTHONWARNINGS=default -set PYTHONPYCACHEPREFIX=out\pycache -set ANKIDEV=1 - -call tools\ninja pylib qt pyenv-qt5.15 || exit /b 1 -.\out\pyenv-qt5.15\scripts\python tools\run.py %* || exit /b 1 -popd diff --git a/tools/run-qt6.6 b/tools/run-qt6.6 new file mode 100755 index 000000000..dc0461229 --- /dev/null +++ b/tools/run-qt6.6 @@ -0,0 +1,9 @@ +#!/bin/bash + +set -e + +./ninja extract:uv + +export PYENV=./out/pyenv66 +UV_PROJECT_ENVIRONMENT=$PYENV ./out/extracted/uv/uv sync --all-packages --extra qt66 +./run $* diff --git a/tools/run-qt6.7 b/tools/run-qt6.7 new file mode 100755 index 000000000..d01d46cea --- /dev/null +++ b/tools/run-qt6.7 @@ -0,0 +1,9 @@ +#!/bin/bash + +set -e + +./ninja extract:uv + +export PYENV=./out/pyenv67 +UV_PROJECT_ENVIRONMENT=$PYENV ./out/extracted/uv/uv sync --all-packages --extra qt67 +./run $* diff --git a/tools/run-qt6.8 b/tools/run-qt6.8 deleted file mode 100755 index 9083f343e..000000000 --- a/tools/run-qt6.8 +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -set -e - -export PYTHONWARNINGS=default -export PYTHONPYCACHEPREFIX=out/pycache -export ANKIDEV=${ANKIDEV-1} -export QTWEBENGINE_REMOTE_DEBUGGING=${QTWEBENGINE_REMOTE_DEBUGGING-8080} -export QTWEBENGINE_CHROMIUM_FLAGS=${QTWEBENGINE_CHROMIUM_FLAGS---remote-allow-origins=http://localhost:$QTWEBENGINE_REMOTE_DEBUGGING} -./ninja pylib qt pyenv-qt6.8 -./out/pyenv-qt6.8/bin/python tools/run.py $* diff --git a/tools/run-qt6.9 b/tools/run-qt6.9 new file mode 100755 index 000000000..6576b6c81 --- /dev/null +++ b/tools/run-qt6.9 @@ -0,0 +1,9 @@ +#!/bin/bash + +set -e + +./ninja extract:uv + +export PYENV=./out/pyenv69 +UV_PROJECT_ENVIRONMENT=$PYENV ./out/extracted/uv/uv sync --all-packages --extra qt69 +./run $* diff --git a/tools/update-launcher-env b/tools/update-launcher-env new file mode 100755 index 000000000..c84569f55 --- /dev/null +++ b/tools/update-launcher-env @@ -0,0 +1,15 @@ +#!/bin/bash +# +# Install our latest anki/aqt code into the launcher venv + +set -e + +rm -rf out/wheels +./ninja wheels +if [[ "$OSTYPE" == "darwin"* ]]; then + export VIRTUAL_ENV=$HOME/Library/Application\ Support/AnkiProgramFiles/.venv +else + export VIRTUAL_ENV=$HOME/.local/share/AnkiProgramFiles/.venv +fi +./out/extracted/uv/uv pip install out/wheels/* + diff --git a/tools/update-launcher-env.bat b/tools/update-launcher-env.bat new file mode 100644 index 000000000..9b0b814c6 --- /dev/null +++ b/tools/update-launcher-env.bat @@ -0,0 +1,8 @@ +@echo off +rem +rem Install our latest anki/aqt code into the launcher venv + +rmdir /s /q out\wheels 2>nul +call tools\ninja wheels +set VIRTUAL_ENV=%LOCALAPPDATA%\AnkiProgramFiles\.venv +for %%f in (out\wheels\*.whl) do out\extracted\uv\uv pip install "%%f" \ No newline at end of file diff --git a/ts/editor/LabelName.svelte b/ts/editor/LabelName.svelte index 527acdbda..ea3c7e0b4 100644 --- a/ts/editor/LabelName.svelte +++ b/ts/editor/LabelName.svelte @@ -6,9 +6,3 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - - diff --git a/ts/lib/components/HelpSection.svelte b/ts/lib/components/HelpSection.svelte index 71b45fb6c..b52486865 100644 --- a/ts/lib/components/HelpSection.svelte +++ b/ts/lib/components/HelpSection.svelte @@ -8,6 +8,8 @@ import Row from "./Row.svelte"; import type { HelpItem } from "./types"; + import { mdiEarth } from "./icons"; + import Icon from "./Icon.svelte"; export let item: HelpItem; @@ -21,6 +23,11 @@ {/if} {#if item.help} + {#if item.global} +
+ +
+ {/if} {@html renderMarkdown(item.help)} {:else} {@html renderMarkdown( @@ -54,4 +61,12 @@ color: var(--fg-subtle); font-size: small; } + + .icon { + display: inline-block; + width: 1em; + fill: currentColor; + margin-right: 0.25em; + margin-bottom: 1.25em; + } diff --git a/ts/lib/components/types.ts b/ts/lib/components/types.ts index 9a2105d9e..2f94a2778 100644 --- a/ts/lib/components/types.ts +++ b/ts/lib/components/types.ts @@ -9,6 +9,7 @@ export type HelpItem = { help?: string; url?: string; sched?: HelpItemScheduler; + global?: boolean; }; export enum HelpItemScheduler { diff --git a/ts/lib/tag-editor/TagEditor.svelte b/ts/lib/tag-editor/TagEditor.svelte index cbbbf3f57..eb033ef7a 100644 --- a/ts/lib/tag-editor/TagEditor.svelte +++ b/ts/lib/tag-editor/TagEditor.svelte @@ -510,7 +510,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html border: 1px solid var(--border); border-radius: var(--border-radius); padding: 6px; - margin: 1px; + margin: 1px 3px 3px 1px; &:focus-within { outline-offset: -1px; diff --git a/ts/lib/tag-editor/TagInput.svelte b/ts/lib/tag-editor/TagInput.svelte index a8d76bcee..31d3b51f6 100644 --- a/ts/lib/tag-editor/TagInput.svelte +++ b/ts/lib/tag-editor/TagInput.svelte @@ -166,7 +166,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html } function onKeydown(event: KeyboardEvent): void { - switch (event.code) { + switch (event.key) { case "Enter": onEnter(event); break; diff --git a/ts/lib/tslib/keys.ts b/ts/lib/tslib/keys.ts index 9bd6b42d8..58f571fac 100644 --- a/ts/lib/tslib/keys.ts +++ b/ts/lib/tslib/keys.ts @@ -90,7 +90,7 @@ export function keyToPlatformString(key: string): string { } export function isArrowLeft(event: KeyboardEvent): boolean { - if (event.code === "ArrowLeft") { + if (event.key === "ArrowLeft") { return true; } @@ -98,7 +98,7 @@ export function isArrowLeft(event: KeyboardEvent): boolean { } export function isArrowRight(event: KeyboardEvent): boolean { - if (event.code === "ArrowRight") { + if (event.key === "ArrowRight") { return true; } @@ -106,7 +106,7 @@ export function isArrowRight(event: KeyboardEvent): boolean { } export function isArrowUp(event: KeyboardEvent): boolean { - if (event.code === "ArrowUp") { + if (event.key === "ArrowUp") { return true; } @@ -114,7 +114,7 @@ export function isArrowUp(event: KeyboardEvent): boolean { } export function isArrowDown(event: KeyboardEvent): boolean { - if (event.code === "ArrowDown") { + if (event.key === "ArrowDown") { return true; } diff --git a/ts/licenses.json b/ts/licenses.json index 2e88336b3..412d1dae3 100644 --- a/ts/licenses.json +++ b/ts/licenses.json @@ -95,8 +95,8 @@ "repository": "https://github.com/TooTallNate/node-agent-base", "publisher": "Nathan Rajlich", "email": "nathan@tootallnate.net", - "path": "node_modules/http-proxy-agent/node_modules/agent-base", - "licenseFile": "node_modules/http-proxy-agent/node_modules/agent-base/README.md" + "path": "node_modules/https-proxy-agent/node_modules/agent-base", + "licenseFile": "node_modules/https-proxy-agent/node_modules/agent-base/README.md" }, "asynckit@0.4.0": { "licenses": "MIT", diff --git a/ts/routes/card-info/CardInfo.svelte b/ts/routes/card-info/CardInfo.svelte index 020037dda..938c7d92a 100644 --- a/ts/routes/card-info/CardInfo.svelte +++ b/ts/routes/card-info/CardInfo.svelte @@ -22,12 +22,12 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html $: decay = (() => { const paramsLength = stats?.fsrsParams?.length ?? 0; if (paramsLength === 0) { - return 0.2; // default decay for FSRS-6 + return 0.1542; // default decay for FSRS-6 } if (paramsLength < 21) { return 0.5; // default decay for FSRS-4.5 and FSRS-5 } - return stats?.fsrsParams?.[20] ?? 0.2; + return stats?.fsrsParams?.[20] ?? 0.1542; })(); diff --git a/ts/routes/deck-options/AdvancedOptions.svelte b/ts/routes/deck-options/AdvancedOptions.svelte index 31c3f0d4c..fb892b7ec 100644 --- a/ts/routes/deck-options/AdvancedOptions.svelte +++ b/ts/routes/deck-options/AdvancedOptions.svelte @@ -82,6 +82,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html title: tr.deckConfigCustomScheduling(), help: tr.deckConfigCustomSchedulingTooltip(), url: "https://faqs.ankiweb.net/the-2021-scheduler.html#add-ons-and-custom-scheduling", + global: true, }, }; const helpSections: HelpItem[] = Object.values(settings); diff --git a/ts/routes/deck-options/DailyLimits.svelte b/ts/routes/deck-options/DailyLimits.svelte index 9b156ca00..ea403c1f4 100644 --- a/ts/routes/deck-options/DailyLimits.svelte +++ b/ts/routes/deck-options/DailyLimits.svelte @@ -133,14 +133,15 @@ }, newCardsIgnoreReviewLimit: { title: tr.deckConfigNewCardsIgnoreReviewLimit(), - help: newCardsIgnoreReviewLimitHelp, url: HelpPage.DeckOptions.newCardsday, + global: true, }, applyAllParentLimits: { title: tr.deckConfigApplyAllParentLimits(), help: applyAllParentLimitsHelp, url: HelpPage.DeckOptions.newCardsday, + global: true, }, }; const helpSections: HelpItem[] = Object.values(settings); diff --git a/ts/routes/deck-options/FsrsOptions.svelte b/ts/routes/deck-options/FsrsOptions.svelte index f573a0278..706407889 100644 --- a/ts/routes/deck-options/FsrsOptions.svelte +++ b/ts/routes/deck-options/FsrsOptions.svelte @@ -10,7 +10,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import { SimulateFsrsReviewRequest } from "@generated/anki/scheduler_pb"; import { computeFsrsParams, - evaluateParams, + evaluateParamsLegacy, getRetentionWorkload, setWantsAbort, } from "@generated/backend"; @@ -31,6 +31,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html GetRetentionWorkloadRequest, UpdateDeckConfigsMode, } from "@generated/anki/deck_config_pb"; + import type Modal from "bootstrap/js/dist/modal"; export let state: DeckOptionsState; export let openHelpModal: (String) => void; @@ -243,10 +244,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html const search = $config.paramSearch ? $config.paramSearch : defaultparamSearch; - const resp = await evaluateParams({ + const resp = await evaluateParamsLegacy({ search, ignoreRevlogsBeforeMs: getIgnoreRevlogsBeforeMs(), - numOfRelearningSteps: $config.relearnSteps.length, + params: fsrsParams($config), }); if (computeParamsProgress) { computeParamsProgress.current = computeParamsProgress.total; @@ -296,7 +297,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html state.save(UpdateDeckConfigsMode.COMPUTE_ALL_PARAMS); } - let showSimulator = false; + let simulatorModal: Modal; - {#if false} - + {#if state.legacyEvaluate}
-
-
diff --git a/ts/routes/deck-options/FsrsOptionsOuter.svelte b/ts/routes/deck-options/FsrsOptionsOuter.svelte index 1f31e0bf9..fa543b5fc 100644 --- a/ts/routes/deck-options/FsrsOptionsOuter.svelte +++ b/ts/routes/deck-options/FsrsOptionsOuter.svelte @@ -35,10 +35,14 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html title: "FSRS", help: tr.deckConfigFsrsTooltip(), url: HelpPage.DeckOptions.fsrs, + global: true, }, desiredRetention: { title: tr.deckConfigDesiredRetention(), - help: tr.deckConfigDesiredRetentionTooltip(), + help: + tr.deckConfigDesiredRetentionTooltip() + + "\n\n" + + tr.deckConfigDesiredRetentionTooltip2(), sched: HelpItemScheduler.FSRS, }, modelParams: { @@ -53,6 +57,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html title: tr.deckConfigRescheduleCardsOnChange(), help: tr.deckConfigRescheduleCardsOnChangeTooltip(), sched: HelpItemScheduler.FSRS, + global: true, }, computeOptimalRetention: { title: tr.deckConfigComputeOptimalRetention(), @@ -62,10 +67,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html healthCheck: { title: tr.deckConfigHealthCheck(), help: + tr.deckConfigAffectsEntireCollection() + + "\n\n" + tr.deckConfigHealthCheckTooltip1() + "\n\n" + tr.deckConfigHealthCheckTooltip2(), sched: HelpItemScheduler.FSRS, + global: true, }, }; const helpSections: HelpItem[] = Object.values(settings); diff --git a/ts/routes/deck-options/SimulatorModal.svelte b/ts/routes/deck-options/SimulatorModal.svelte index 64b712560..546f840d6 100644 --- a/ts/routes/deck-options/SimulatorModal.svelte +++ b/ts/routes/deck-options/SimulatorModal.svelte @@ -33,8 +33,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import EasyDaysInput from "./EasyDaysInput.svelte"; import Warning from "./Warning.svelte"; import type { ComputeRetentionProgress } from "@generated/anki/collection_pb"; + import Modal from "bootstrap/js/dist/modal"; - export let shown = false; export let state: DeckOptionsState; export let simulateFsrsRequest: SimulateFsrsReviewRequest; export let computing: boolean; @@ -234,9 +234,21 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html } $: easyDayPercentages = [...$config.easyDaysPercentages]; + + export let modal: Modal | null = null; + + function setupModal(node: Element) { + modal = new Modal(node); + return { + destroy() { + modal?.dispose(); + modal = null; + }, + }; + } -