mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 22:12:21 -04:00
Merge branch 'main' into patch-1
This commit is contained in:
commit
2cf6667967
426 changed files with 9366 additions and 9994 deletions
|
@ -5,7 +5,8 @@ DESCRIPTORS_BIN = { value = "out/rslib/proto/descriptors.bin", relative = true }
|
||||||
# build script will append .exe if necessary
|
# build script will append .exe if necessary
|
||||||
PROTOC = { value = "out/extracted/protoc/bin/protoc", relative = true }
|
PROTOC = { value = "out/extracted/protoc/bin/protoc", relative = true }
|
||||||
PYO3_NO_PYTHON = "1"
|
PYO3_NO_PYTHON = "1"
|
||||||
MACOSX_DEPLOYMENT_TARGET = "10.13.4"
|
MACOSX_DEPLOYMENT_TARGET = "11"
|
||||||
|
PYTHONDONTWRITEBYTECODE = "1" # prevent junk files on Windows
|
||||||
|
|
||||||
[term]
|
[term]
|
||||||
color = "always"
|
color = "always"
|
||||||
|
|
|
@ -5,9 +5,6 @@
|
||||||
db-path = "~/.cargo/advisory-db"
|
db-path = "~/.cargo/advisory-db"
|
||||||
db-urls = ["https://github.com/rustsec/advisory-db"]
|
db-urls = ["https://github.com/rustsec/advisory-db"]
|
||||||
ignore = [
|
ignore = [
|
||||||
# pyoxidizer is stuck on an old ring version
|
|
||||||
"RUSTSEC-2025-0009",
|
|
||||||
"RUSTSEC-2025-0010",
|
|
||||||
# burn depends on an unmaintained package 'paste'
|
# burn depends on an unmaintained package 'paste'
|
||||||
"RUSTSEC-2024-0436",
|
"RUSTSEC-2024-0436",
|
||||||
]
|
]
|
||||||
|
@ -17,12 +14,11 @@ allow = [
|
||||||
"MIT",
|
"MIT",
|
||||||
"Apache-2.0",
|
"Apache-2.0",
|
||||||
"Apache-2.0 WITH LLVM-exception",
|
"Apache-2.0 WITH LLVM-exception",
|
||||||
|
"CDLA-Permissive-2.0",
|
||||||
"ISC",
|
"ISC",
|
||||||
"MPL-2.0",
|
"MPL-2.0",
|
||||||
"Unicode-DFS-2016",
|
|
||||||
"BSD-2-Clause",
|
"BSD-2-Clause",
|
||||||
"BSD-3-Clause",
|
"BSD-3-Clause",
|
||||||
"OpenSSL",
|
|
||||||
"CC0-1.0",
|
"CC0-1.0",
|
||||||
"Unlicense",
|
"Unlicense",
|
||||||
"Zlib",
|
"Zlib",
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
"ftl/usage",
|
"ftl/usage",
|
||||||
"licenses.json",
|
"licenses.json",
|
||||||
".dmypy.json",
|
".dmypy.json",
|
||||||
"qt/bundle/PyOxidizer",
|
|
||||||
"target",
|
"target",
|
||||||
".mypy_cache",
|
".mypy_cache",
|
||||||
"extra",
|
"extra",
|
||||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -18,3 +18,5 @@ node_modules
|
||||||
yarn-error.log
|
yarn-error.log
|
||||||
ts/.svelte-kit
|
ts/.svelte-kit
|
||||||
.yarn
|
.yarn
|
||||||
|
.claude/settings.local.json
|
||||||
|
.claude/user.md
|
||||||
|
|
6
.gitmodules
vendored
6
.gitmodules
vendored
|
@ -6,9 +6,3 @@
|
||||||
path = ftl/qt-repo
|
path = ftl/qt-repo
|
||||||
url = https://github.com/ankitects/anki-desktop-ftl.git
|
url = https://github.com/ankitects/anki-desktop-ftl.git
|
||||||
shallow = true
|
shallow = true
|
||||||
[submodule "qt/bundle/PyOxidizer"]
|
|
||||||
path = qt/bundle/PyOxidizer
|
|
||||||
url = https://github.com/ankitects/PyOxidizer.git
|
|
||||||
shallow = true
|
|
||||||
update = none
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
[settings]
|
|
||||||
py_version=39
|
|
||||||
known_first_party=anki,aqt,tests
|
|
||||||
profile=black
|
|
||||||
extend_skip=qt/bundle
|
|
|
@ -18,7 +18,7 @@ mypy_path =
|
||||||
ftl,
|
ftl,
|
||||||
pylib/tools,
|
pylib/tools,
|
||||||
python
|
python
|
||||||
exclude = (qt/bundle/PyOxidizer|pylib/anki/_vendor)
|
exclude = (pylib/anki/_vendor)
|
||||||
|
|
||||||
[mypy-anki.*]
|
[mypy-anki.*]
|
||||||
disallow_untyped_defs = True
|
disallow_untyped_defs = True
|
||||||
|
@ -165,3 +165,5 @@ ignore_missing_imports = True
|
||||||
ignore_missing_imports = True
|
ignore_missing_imports = True
|
||||||
[mypy-pip_system_certs.*]
|
[mypy-pip_system_certs.*]
|
||||||
ignore_missing_imports = True
|
ignore_missing_imports = True
|
||||||
|
[mypy-anki_audio]
|
||||||
|
ignore_missing_imports = True
|
||||||
|
|
48
.pylintrc
48
.pylintrc
|
@ -1,48 +0,0 @@
|
||||||
[MASTER]
|
|
||||||
ignore-patterns=.*_pb2.*
|
|
||||||
persistent = no
|
|
||||||
extension-pkg-whitelist=orjson,PyQt6
|
|
||||||
init-hook="import sys; sys.path.extend(['pylib/anki/_vendor', 'out/qt'])"
|
|
||||||
|
|
||||||
[REPORTS]
|
|
||||||
output-format=colorized
|
|
||||||
|
|
||||||
[MESSAGES CONTROL]
|
|
||||||
disable=
|
|
||||||
R,
|
|
||||||
line-too-long,
|
|
||||||
too-many-lines,
|
|
||||||
missing-function-docstring,
|
|
||||||
missing-module-docstring,
|
|
||||||
missing-class-docstring,
|
|
||||||
import-outside-toplevel,
|
|
||||||
wrong-import-position,
|
|
||||||
wrong-import-order,
|
|
||||||
fixme,
|
|
||||||
unused-wildcard-import,
|
|
||||||
attribute-defined-outside-init,
|
|
||||||
redefined-builtin,
|
|
||||||
wildcard-import,
|
|
||||||
broad-except,
|
|
||||||
bare-except,
|
|
||||||
unused-argument,
|
|
||||||
unused-variable,
|
|
||||||
redefined-outer-name,
|
|
||||||
global-statement,
|
|
||||||
protected-access,
|
|
||||||
arguments-differ,
|
|
||||||
arguments-renamed,
|
|
||||||
consider-using-f-string,
|
|
||||||
invalid-name,
|
|
||||||
broad-exception-raised
|
|
||||||
|
|
||||||
[BASIC]
|
|
||||||
good-names =
|
|
||||||
id,
|
|
||||||
tr,
|
|
||||||
db,
|
|
||||||
ok,
|
|
||||||
ip,
|
|
||||||
|
|
||||||
[IMPORTS]
|
|
||||||
ignored-modules = anki.*_pb2, anki.sync_pb2, win32file,pywintypes,socket,win32pipe,pyaudio,anki.scheduler_pb2,anki.notetypes_pb2
|
|
1
.python-version
Normal file
1
.python-version
Normal file
|
@ -0,0 +1 @@
|
||||||
|
3.13.5
|
93
.ruff.toml
93
.ruff.toml
|
@ -1,2 +1,91 @@
|
||||||
target-version = "py39"
|
lint.select = [
|
||||||
extend-exclude = ["qt/bundle"]
|
"E", # pycodestyle errors
|
||||||
|
"F", # Pyflakes errors
|
||||||
|
"PL", # Pylint rules
|
||||||
|
"I", # Isort rules
|
||||||
|
"ARG",
|
||||||
|
# "UP", # pyupgrade
|
||||||
|
# "B", # flake8-bugbear
|
||||||
|
# "SIM", # flake8-simplify
|
||||||
|
]
|
||||||
|
|
||||||
|
extend-exclude = ["*_pb2.py", "*_pb2.pyi"]
|
||||||
|
|
||||||
|
lint.ignore = [
|
||||||
|
# Docstring rules (missing-*-docstring in pylint)
|
||||||
|
"D100", # Missing docstring in public module
|
||||||
|
"D101", # Missing docstring in public class
|
||||||
|
"D103", # Missing docstring in public function
|
||||||
|
|
||||||
|
# Import rules (wrong-import-* in pylint)
|
||||||
|
"E402", # Module level import not at top of file
|
||||||
|
"E501", # Line too long
|
||||||
|
|
||||||
|
# pycodestyle rules
|
||||||
|
"E741", # ambiguous-variable-name
|
||||||
|
|
||||||
|
# Comment rules (fixme in pylint)
|
||||||
|
"FIX002", # Line contains TODO
|
||||||
|
|
||||||
|
# Pyflakes rules
|
||||||
|
"F402", # import-shadowed-by-loop-var
|
||||||
|
"F403", # undefined-local-with-import-star
|
||||||
|
"F405", # undefined-local-with-import-star-usage
|
||||||
|
|
||||||
|
# Naming rules (invalid-name in pylint)
|
||||||
|
"N801", # Class name should use CapWords convention
|
||||||
|
"N802", # Function name should be lowercase
|
||||||
|
"N803", # Argument name should be lowercase
|
||||||
|
"N806", # Variable in function should be lowercase
|
||||||
|
"N811", # Constant imported as non-constant
|
||||||
|
"N812", # Lowercase imported as non-lowercase
|
||||||
|
"N813", # Camelcase imported as lowercase
|
||||||
|
"N814", # Camelcase imported as constant
|
||||||
|
"N815", # Variable in class scope should not be mixedCase
|
||||||
|
"N816", # Variable in global scope should not be mixedCase
|
||||||
|
"N817", # CamelCase imported as acronym
|
||||||
|
"N818", # Error suffix in exception names
|
||||||
|
|
||||||
|
# Pylint rules
|
||||||
|
"PLW0603", # global-statement
|
||||||
|
"PLW2901", # redefined-loop-name
|
||||||
|
"PLC0415", # import-outside-top-level
|
||||||
|
"PLR2004", # magic-value-comparison
|
||||||
|
|
||||||
|
# Exception handling (broad-except, bare-except in pylint)
|
||||||
|
"BLE001", # Do not catch blind exception
|
||||||
|
|
||||||
|
# Argument rules (unused-argument in pylint)
|
||||||
|
"ARG001", # Unused function argument
|
||||||
|
"ARG002", # Unused method argument
|
||||||
|
"ARG005", # Unused lambda argument
|
||||||
|
|
||||||
|
# Access rules (protected-access in pylint)
|
||||||
|
"SLF001", # Private member accessed
|
||||||
|
|
||||||
|
# String formatting (consider-using-f-string in pylint)
|
||||||
|
"UP032", # Use f-string instead of format call
|
||||||
|
|
||||||
|
# Exception rules (broad-exception-raised in pylint)
|
||||||
|
"TRY301", # Abstract raise to an inner function
|
||||||
|
|
||||||
|
# Builtin shadowing (redefined-builtin in pylint)
|
||||||
|
"A001", # Variable shadows a Python builtin
|
||||||
|
"A002", # Argument shadows a Python builtin
|
||||||
|
"A003", # Class attribute shadows a Python builtin
|
||||||
|
]
|
||||||
|
|
||||||
|
[lint.per-file-ignores]
|
||||||
|
"**/anki/*_pb2.py" = ["ALL"]
|
||||||
|
|
||||||
|
[lint.pep8-naming]
|
||||||
|
ignore-names = ["id", "tr", "db", "ok", "ip"]
|
||||||
|
|
||||||
|
[lint.pylint]
|
||||||
|
max-args = 12
|
||||||
|
max-returns = 10
|
||||||
|
max-branches = 35
|
||||||
|
max-statements = 125
|
||||||
|
|
||||||
|
[lint.isort]
|
||||||
|
known-first-party = ["anki", "aqt", "tests"]
|
||||||
|
|
2
.version
2
.version
|
@ -1 +1 @@
|
||||||
25.06
|
25.07.1
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
"recommendations": [
|
"recommendations": [
|
||||||
"dprint.dprint",
|
"dprint.dprint",
|
||||||
"ms-python.python",
|
"ms-python.python",
|
||||||
"ms-python.black-formatter",
|
"charliermarsh.ruff",
|
||||||
"rust-lang.rust-analyzer",
|
"rust-lang.rust-analyzer",
|
||||||
"svelte.svelte-vscode",
|
"svelte.svelte-vscode",
|
||||||
"zxh404.vscode-proto3",
|
"zxh404.vscode-proto3",
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
"out/qt",
|
"out/qt",
|
||||||
"qt"
|
"qt"
|
||||||
],
|
],
|
||||||
"python.formatting.provider": "black",
|
"python.formatting.provider": "charliermarsh.ruff",
|
||||||
"python.linting.mypyEnabled": false,
|
"python.linting.mypyEnabled": false,
|
||||||
"python.analysis.diagnosticSeverityOverrides": {
|
"python.analysis.diagnosticSeverityOverrides": {
|
||||||
"reportMissingModuleSource": "none"
|
"reportMissingModuleSource": "none"
|
||||||
|
@ -31,11 +31,13 @@
|
||||||
"rust-analyzer.rustfmt.extraArgs": ["+nightly"],
|
"rust-analyzer.rustfmt.extraArgs": ["+nightly"],
|
||||||
"search.exclude": {
|
"search.exclude": {
|
||||||
"**/node_modules": true,
|
"**/node_modules": true,
|
||||||
".bazel/**": true,
|
".bazel/**": true
|
||||||
"qt/bundle/PyOxidizer": true
|
|
||||||
},
|
},
|
||||||
"rust-analyzer.cargo.buildScripts.enable": true,
|
"rust-analyzer.cargo.buildScripts.enable": true,
|
||||||
"python.analysis.typeCheckingMode": "off",
|
"python.analysis.typeCheckingMode": "off",
|
||||||
|
"python.analysis.exclude": [
|
||||||
|
"out/launcher/**"
|
||||||
|
],
|
||||||
"terminal.integrated.env.windows": {
|
"terminal.integrated.env.windows": {
|
||||||
"PATH": "c:\\msys64\\usr\\bin;${env:Path}"
|
"PATH": "c:\\msys64\\usr\\bin;${env:Path}"
|
||||||
}
|
}
|
||||||
|
|
86
CLAUDE.md
Normal file
86
CLAUDE.md
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
# Claude Code Configuration
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
|
||||||
|
Anki is a spaced repetition flashcard program with a multi-layered architecture. Main components:
|
||||||
|
|
||||||
|
- Web frontend: Svelte/TypeScript in ts/
|
||||||
|
- PyQt GUI, which embeds the web components in aqt/
|
||||||
|
- Python library which wraps our rust Layer (pylib/, with Rust module in pylib/rsbridge)
|
||||||
|
- Core Rust layer in rslib/
|
||||||
|
- Protobuf definitions in proto/ that are used by the different layers to
|
||||||
|
talk to each other.
|
||||||
|
|
||||||
|
## Building/checking
|
||||||
|
|
||||||
|
./check (check.bat) will format the code and run the main build & checks.
|
||||||
|
Please do this as a final step before marking a task as completed.
|
||||||
|
|
||||||
|
## Quick iteration
|
||||||
|
|
||||||
|
During development, you can build/check subsections of our code:
|
||||||
|
|
||||||
|
- Rust: 'cargo check'
|
||||||
|
- Python: './tools/dmypy', and if wheel-related, './ninja wheels'
|
||||||
|
- TypeScript/Svelte: './ninja check:svelte'
|
||||||
|
|
||||||
|
Be mindful that some changes (such as modifications to .proto files) may
|
||||||
|
need a full build with './check' first.
|
||||||
|
|
||||||
|
## Build tooling
|
||||||
|
|
||||||
|
'./check' and './ninja' invoke our build system, which is implemented in build/. It takes care of downloading required deps and invoking our build
|
||||||
|
steps.
|
||||||
|
|
||||||
|
## Translations
|
||||||
|
|
||||||
|
ftl/ contains our Fluent translation files. We have scripts in rslib/i18n
|
||||||
|
to auto-generate an API for Rust, TypeScript and Python so that our code can
|
||||||
|
access the translations in a type-safe manner. Changes should be made to
|
||||||
|
ftl/core or ftl/qt. Except for features specific to our Qt interface, prefer
|
||||||
|
the core module. When adding new strings, confirm the appropriate ftl file
|
||||||
|
first, and try to match the existing style.
|
||||||
|
|
||||||
|
## Protobuf and IPC
|
||||||
|
|
||||||
|
Our build scripts use the .proto files to define our Rust library's
|
||||||
|
non-Rust API. pylib/rsbridge exposes that API, and _backend.py exposes
|
||||||
|
snake_case methods for each protobuf RPC that call into the API.
|
||||||
|
Similar tooling creates a @generated/backend TypeScript module for
|
||||||
|
communicating with the Rust backend (which happens over POST requests).
|
||||||
|
|
||||||
|
## Fixing errors
|
||||||
|
|
||||||
|
When dealing with build errors or failing tests, invoke 'check' or one
|
||||||
|
of the quick iteration commands regularly. This helps verify your changes
|
||||||
|
are correct. To locate other instances of a problem, run the check again -
|
||||||
|
don't attempt to grep the codebase.
|
||||||
|
|
||||||
|
## Ignores
|
||||||
|
|
||||||
|
The files in out/ are auto-generated. Mostly you should ignore that folder,
|
||||||
|
though you may sometimes find it useful to view out/{pylib/anki,qt/_aqt,ts/lib/generated} when dealing with cross-language communication or our other generated sourcecode.
|
||||||
|
|
||||||
|
## Launcher/installer
|
||||||
|
|
||||||
|
The code for our launcher is in qt/launcher, with separate code for each
|
||||||
|
platform.
|
||||||
|
|
||||||
|
## Rust dependencies
|
||||||
|
|
||||||
|
Prefer adding to the root workspace, and using dep.workspace = true in the individual Rust project.
|
||||||
|
|
||||||
|
## Rust utilities
|
||||||
|
|
||||||
|
rslib/{process,io} contain some helpers for file and process operations,
|
||||||
|
which provide better error messages/context and some ergonomics. Use them
|
||||||
|
when possible.
|
||||||
|
|
||||||
|
## Rust error handling
|
||||||
|
|
||||||
|
in rslib, use error/mod.rs's AnkiError/Result and snafu. In our other Rust modules, prefer anyhow + additional context where appropriate. Unwrapping
|
||||||
|
in build scripts/tests is fine.
|
||||||
|
|
||||||
|
## Individual preferences
|
||||||
|
|
||||||
|
See @.claude/user.md
|
|
@ -63,6 +63,7 @@ Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com>
|
||||||
Akshara Balachandra <akshara.bala.18@gmail.com>
|
Akshara Balachandra <akshara.bala.18@gmail.com>
|
||||||
lukkea <github.com/lukkea/>
|
lukkea <github.com/lukkea/>
|
||||||
David Allison <davidallisongithub@gmail.com>
|
David Allison <davidallisongithub@gmail.com>
|
||||||
|
David Allison <62114487+david-allison@users.noreply.github.com>
|
||||||
Tsung-Han Yu <johan456789@gmail.com>
|
Tsung-Han Yu <johan456789@gmail.com>
|
||||||
Piotr Kubowicz <piotr.kubowicz@gmail.com>
|
Piotr Kubowicz <piotr.kubowicz@gmail.com>
|
||||||
RumovZ <gp5glkw78@relay.firefox.com>
|
RumovZ <gp5glkw78@relay.firefox.com>
|
||||||
|
@ -232,6 +233,7 @@ Spiritual Father <https://github.com/spiritualfather>
|
||||||
Emmanuel Ferdman <https://github.com/emmanuel-ferdman>
|
Emmanuel Ferdman <https://github.com/emmanuel-ferdman>
|
||||||
Sunong2008 <https://github.com/Sunrongguo2008>
|
Sunong2008 <https://github.com/Sunrongguo2008>
|
||||||
Marvin Kopf <marvinkopf@outlook.com>
|
Marvin Kopf <marvinkopf@outlook.com>
|
||||||
|
Kevin Nakamura <grinkers@grinkers.net>
|
||||||
********************
|
********************
|
||||||
|
|
||||||
The text of the 3 clause BSD license follows:
|
The text of the 3 clause BSD license follows:
|
||||||
|
|
1966
Cargo.lock
generated
1966
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
135
Cargo.toml
135
Cargo.toml
|
@ -12,8 +12,7 @@ members = [
|
||||||
"build/runner",
|
"build/runner",
|
||||||
"ftl",
|
"ftl",
|
||||||
"pylib/rsbridge",
|
"pylib/rsbridge",
|
||||||
"qt/bundle/mac",
|
"qt/launcher",
|
||||||
"qt/bundle/win",
|
|
||||||
"rslib",
|
"rslib",
|
||||||
"rslib/i18n",
|
"rslib/i18n",
|
||||||
"rslib/io",
|
"rslib/io",
|
||||||
|
@ -23,7 +22,6 @@ members = [
|
||||||
"rslib/sync",
|
"rslib/sync",
|
||||||
"tools/minilints",
|
"tools/minilints",
|
||||||
]
|
]
|
||||||
exclude = ["qt/bundle"]
|
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.dependencies.percent-encoding-iri]
|
[workspace.dependencies.percent-encoding-iri]
|
||||||
|
@ -35,7 +33,7 @@ git = "https://github.com/ankitects/linkcheck.git"
|
||||||
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
|
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
|
||||||
|
|
||||||
[workspace.dependencies.fsrs]
|
[workspace.dependencies.fsrs]
|
||||||
version = "4.0.0"
|
version = "4.1.1"
|
||||||
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
|
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
|
||||||
# rev = "a7f7efc10f0a26b14ee348cc7402155685f2a24f"
|
# rev = "a7f7efc10f0a26b14ee348cc7402155685f2a24f"
|
||||||
# path = "../open-spaced-repetition/fsrs-rs"
|
# path = "../open-spaced-repetition/fsrs-rs"
|
||||||
|
@ -54,99 +52,100 @@ ninja_gen = { "path" = "build/ninja_gen" }
|
||||||
unicase = "=2.6.0" # any changes could invalidate sqlite indexes
|
unicase = "=2.6.0" # any changes could invalidate sqlite indexes
|
||||||
|
|
||||||
# normal
|
# normal
|
||||||
ammonia = "4.0.0"
|
ammonia = "4.1.0"
|
||||||
anyhow = "1.0.90"
|
anyhow = "1.0.98"
|
||||||
apple-bundles = "0.17.0"
|
async-compression = { version = "0.4.24", features = ["zstd", "tokio"] }
|
||||||
async-compression = { version = "0.4.17", features = ["zstd", "tokio"] }
|
|
||||||
async-stream = "0.3.6"
|
async-stream = "0.3.6"
|
||||||
async-trait = "0.1.83"
|
async-trait = "0.1.88"
|
||||||
axum = { version = "0.7", features = ["multipart", "macros"] }
|
axum = { version = "0.8.4", features = ["multipart", "macros"] }
|
||||||
axum-client-ip = "0.6"
|
axum-client-ip = "1.1.3"
|
||||||
axum-extra = { version = "0.9.4", features = ["typed-header"] }
|
axum-extra = { version = "0.10.1", features = ["typed-header"] }
|
||||||
blake3 = "1.5.4"
|
bitflags = "2.9.1"
|
||||||
bytes = "1.7.2"
|
blake3 = "1.8.2"
|
||||||
camino = "1.1.9"
|
bytes = "1.10.1"
|
||||||
chrono = { version = "0.4.38", default-features = false, features = ["std", "clock"] }
|
camino = "1.1.10"
|
||||||
clap = { version = "4.5.20", features = ["derive"] }
|
chrono = { version = "0.4.41", default-features = false, features = ["std", "clock"] }
|
||||||
coarsetime = "0.1.34"
|
clap = { version = "4.5.40", features = ["derive"] }
|
||||||
convert_case = "0.6.0"
|
coarsetime = "0.1.36"
|
||||||
criterion = { version = "0.5.1" }
|
convert_case = "0.8.0"
|
||||||
csv = "1.3.0"
|
criterion = { version = "0.6.0" }
|
||||||
data-encoding = "2.6.0"
|
csv = "1.3.1"
|
||||||
|
data-encoding = "2.9.0"
|
||||||
difflib = "0.4.0"
|
difflib = "0.4.0"
|
||||||
dirs = "5.0.1"
|
dirs = "6.0.0"
|
||||||
dunce = "1.0.5"
|
dunce = "1.0.5"
|
||||||
|
embed-resource = "3.0.4"
|
||||||
envy = "0.4.2"
|
envy = "0.4.2"
|
||||||
flate2 = "1.0.34"
|
flate2 = "1.1.2"
|
||||||
fluent = "0.16.1"
|
fluent = "0.17.0"
|
||||||
fluent-bundle = "0.15.3"
|
fluent-bundle = "0.16.0"
|
||||||
fluent-syntax = "0.11.1"
|
fluent-syntax = "0.12.0"
|
||||||
fnv = "1.0.7"
|
fnv = "1.0.7"
|
||||||
futures = "0.3.31"
|
futures = "0.3.31"
|
||||||
glob = "0.3.1"
|
globset = "0.4.16"
|
||||||
globset = "0.4.15"
|
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
htmlescape = "0.3.1"
|
htmlescape = "0.3.1"
|
||||||
hyper = "1"
|
hyper = "1"
|
||||||
id_tree = "1.8.0"
|
id_tree = "1.8.0"
|
||||||
inflections = "1.1.1"
|
inflections = "1.1.1"
|
||||||
intl-memoizer = "0.5.2"
|
intl-memoizer = "0.5.3"
|
||||||
itertools = "0.13.0"
|
itertools = "0.14.0"
|
||||||
junction = "1.2.0"
|
junction = "1.2.0"
|
||||||
lazy_static = "1.5.0"
|
libc = "0.2"
|
||||||
|
libc-stdhandle = "0.1"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
nom = "7.1.3"
|
nom = "8.0.0"
|
||||||
num-format = "0.4.4"
|
num-format = "0.4.4"
|
||||||
num_cpus = "1.16.0"
|
num_cpus = "1.17.0"
|
||||||
num_enum = "0.7.3"
|
num_enum = "0.7.3"
|
||||||
once_cell = "1.20.2"
|
once_cell = "1.21.3"
|
||||||
pbkdf2 = { version = "0.12", features = ["simple"] }
|
pbkdf2 = { version = "0.12", features = ["simple"] }
|
||||||
phf = { version = "0.11.2", features = ["macros"] }
|
phf = { version = "0.11.3", features = ["macros"] }
|
||||||
pin-project = "1.1.6"
|
pin-project = "1.1.10"
|
||||||
plist = "1.7.0"
|
prettyplease = "0.2.34"
|
||||||
prettyplease = "0.2.24"
|
|
||||||
prost = "0.13"
|
prost = "0.13"
|
||||||
prost-build = "0.13"
|
prost-build = "0.13"
|
||||||
prost-reflect = "0.14"
|
prost-reflect = "0.14.7"
|
||||||
prost-types = "0.13"
|
prost-types = "0.13"
|
||||||
pulldown-cmark = "0.9.6"
|
pulldown-cmark = "0.13.0"
|
||||||
pyo3 = { version = "0.24", features = ["extension-module", "abi3", "abi3-py39"] }
|
pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] }
|
||||||
rand = "0.8.5"
|
rand = "0.9.1"
|
||||||
regex = "1.11.0"
|
regex = "1.11.1"
|
||||||
reqwest = { version = "0.12.8", default-features = false, features = ["json", "socks", "stream", "multipart"] }
|
reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] }
|
||||||
rusqlite = { version = "0.30.0", features = ["trace", "functions", "collation", "bundled"] }
|
rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] }
|
||||||
rustls-pemfile = "2.2.0"
|
rustls-pemfile = "2.2.0"
|
||||||
scopeguard = "1.2.0"
|
scopeguard = "1.2.0"
|
||||||
serde = { version = "1.0.210", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde-aux = "4.5.0"
|
serde-aux = "4.7.0"
|
||||||
serde_json = "1.0.132"
|
serde_json = "1.0.140"
|
||||||
serde_repr = "0.1.19"
|
serde_repr = "0.1.20"
|
||||||
serde_tuple = "0.5.0"
|
serde_tuple = "1.1.0"
|
||||||
sha1 = "0.10.6"
|
sha1 = "0.10.6"
|
||||||
sha2 = { version = "0.10.8" }
|
sha2 = { version = "0.10.9" }
|
||||||
simple-file-manifest = "0.11.0"
|
|
||||||
snafu = { version = "0.8.6", features = ["rust_1_61"] }
|
snafu = { version = "0.8.6", features = ["rust_1_61"] }
|
||||||
strum = { version = "0.26.3", features = ["derive"] }
|
strum = { version = "0.27.1", features = ["derive"] }
|
||||||
syn = { version = "2.0.82", features = ["parsing", "printing"] }
|
syn = { version = "2.0.103", features = ["parsing", "printing"] }
|
||||||
tar = "0.4.42"
|
tar = "0.4.44"
|
||||||
tempfile = "3.13.0"
|
tempfile = "3.20.0"
|
||||||
termcolor = "1.4.1"
|
termcolor = "1.4.1"
|
||||||
tokio = { version = "1.40", features = ["fs", "rt-multi-thread", "macros", "signal"] }
|
tokio = { version = "1.45", features = ["fs", "rt-multi-thread", "macros", "signal"] }
|
||||||
tokio-util = { version = "0.7.12", features = ["io"] }
|
tokio-util = { version = "0.7.15", features = ["io"] }
|
||||||
tower-http = { version = "0.5", features = ["trace"] }
|
tower-http = { version = "0.6.6", features = ["trace"] }
|
||||||
tracing = { version = "0.1.40", features = ["max_level_trace", "release_max_level_debug"] }
|
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] }
|
||||||
tracing-appender = "0.2.3"
|
tracing-appender = "0.2.3"
|
||||||
tracing-subscriber = { version = "0.3.18", features = ["fmt", "env-filter"] }
|
tracing-subscriber = { version = "0.3.19", features = ["fmt", "env-filter"] }
|
||||||
tugger-windows-codesign = "0.10.0"
|
unic-langid = { version = "0.9.6", features = ["macros"] }
|
||||||
unic-langid = { version = "0.9.5", features = ["macros"] }
|
|
||||||
unic-ucd-category = "0.9.0"
|
unic-ucd-category = "0.9.0"
|
||||||
unicode-normalization = "0.1.24"
|
unicode-normalization = "0.1.24"
|
||||||
walkdir = "2.5.0"
|
walkdir = "2.5.0"
|
||||||
which = "5.0.0"
|
which = "8.0.0"
|
||||||
wiremock = "0.6.2"
|
widestring = "1.1.0"
|
||||||
|
winapi = { version = "0.3", features = ["wincon", "winreg"] }
|
||||||
|
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_Foundation", "Win32_UI_Shell"] }
|
||||||
|
wiremock = "0.6.3"
|
||||||
xz2 = "0.1.7"
|
xz2 = "0.1.7"
|
||||||
zip = { version = "0.6.6", default-features = false, features = ["deflate", "time"] }
|
zip = { version = "4.1.0", default-features = false, features = ["deflate", "time"] }
|
||||||
zstd = { version = "0.13.2", features = ["zstdmt"] }
|
zstd = { version = "0.13.3", features = ["zstdmt"] }
|
||||||
|
|
||||||
# Apply mild optimizations to our dependencies in dev mode, which among other things
|
# Apply mild optimizations to our dependencies in dev mode, which among other things
|
||||||
# improves sha2 performance by about 21x. Opt 1 chosen due to
|
# improves sha2 performance by about 21x. Opt 1 chosen due to
|
||||||
|
|
2
LICENSE
2
LICENSE
|
@ -6,8 +6,6 @@ The following included source code items use a license other than AGPL3:
|
||||||
|
|
||||||
In the pylib folder:
|
In the pylib folder:
|
||||||
|
|
||||||
* The SuperMemo importer: GPL3 and 0BSD.
|
|
||||||
* The Pauker importer: BSD-3.
|
|
||||||
* statsbg.py: CC BY 4.0.
|
* statsbg.py: CC BY 4.0.
|
||||||
|
|
||||||
In the qt folder:
|
In the qt folder:
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Anki
|
# Anki®
|
||||||
|
|
||||||
[](https://buildkite.com/ankitects/anki-ci)
|
[](https://buildkite.com/ankitects/anki-ci)
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,6 @@ pub fn build_and_check_aqt(build: &mut Build) -> Result<()> {
|
||||||
build_forms(build)?;
|
build_forms(build)?;
|
||||||
build_generated_sources(build)?;
|
build_generated_sources(build)?;
|
||||||
build_data_folder(build)?;
|
build_data_folder(build)?;
|
||||||
build_macos_helper(build)?;
|
|
||||||
build_wheel(build)?;
|
build_wheel(build)?;
|
||||||
check_python(build)?;
|
check_python(build)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -39,7 +38,6 @@ fn build_forms(build: &mut Build) -> Result<()> {
|
||||||
let mut py_files = vec![];
|
let mut py_files = vec![];
|
||||||
for path in ui_files.resolve() {
|
for path in ui_files.resolve() {
|
||||||
let outpath = outdir.join(path.file_name().unwrap()).into_string();
|
let outpath = outdir.join(path.file_name().unwrap()).into_string();
|
||||||
py_files.push(outpath.replace(".ui", "_qt5.py"));
|
|
||||||
py_files.push(outpath.replace(".ui", "_qt6.py"));
|
py_files.push(outpath.replace(".ui", "_qt6.py"));
|
||||||
}
|
}
|
||||||
build.add_action(
|
build.add_action(
|
||||||
|
@ -337,47 +335,25 @@ impl BuildAction for BuildThemedIcon<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_macos_helper(build: &mut Build) -> Result<()> {
|
|
||||||
if cfg!(target_os = "macos") {
|
|
||||||
build.add_action(
|
|
||||||
"qt:aqt:data:lib:libankihelper",
|
|
||||||
RunCommand {
|
|
||||||
command: ":pyenv:bin",
|
|
||||||
args: "$script $out $in",
|
|
||||||
inputs: hashmap! {
|
|
||||||
"script" => inputs!["qt/mac/helper_build.py"],
|
|
||||||
"in" => inputs![glob!["qt/mac/*.swift"]],
|
|
||||||
"" => inputs!["out/env"],
|
|
||||||
},
|
|
||||||
outputs: hashmap! {
|
|
||||||
"out" => vec!["qt/_aqt/data/lib/libankihelper.dylib"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_wheel(build: &mut Build) -> Result<()> {
|
fn build_wheel(build: &mut Build) -> Result<()> {
|
||||||
build.add_action(
|
build.add_action(
|
||||||
"wheels:aqt",
|
"wheels:aqt",
|
||||||
BuildWheel {
|
BuildWheel {
|
||||||
name: "aqt",
|
name: "aqt",
|
||||||
version: anki_version(),
|
version: anki_version(),
|
||||||
src_folder: "qt/aqt",
|
|
||||||
gen_folder: "$builddir/qt/_aqt",
|
|
||||||
platform: None,
|
platform: None,
|
||||||
deps: inputs![":qt:aqt", glob!("qt/aqt/**"), "python/requirements.aqt.in"],
|
deps: inputs![
|
||||||
|
":qt:aqt",
|
||||||
|
glob!("qt/aqt/**"),
|
||||||
|
"qt/pyproject.toml",
|
||||||
|
"qt/hatch_build.py"
|
||||||
|
],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_python(build: &mut Build) -> Result<()> {
|
fn check_python(build: &mut Build) -> Result<()> {
|
||||||
python_format(
|
python_format(build, "qt", inputs![glob!("qt/**/*.py")])?;
|
||||||
build,
|
|
||||||
"qt",
|
|
||||||
inputs![glob!("qt/**/*.py", "qt/bundle/PyOxidizer/**")],
|
|
||||||
)?;
|
|
||||||
|
|
||||||
build.add_action(
|
build.add_action(
|
||||||
"check:pytest:aqt",
|
"check:pytest:aqt",
|
||||||
|
|
|
@ -1,442 +0,0 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
|
|
||||||
use std::env;
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use ninja_gen::action::BuildAction;
|
|
||||||
use ninja_gen::archives::download_and_extract;
|
|
||||||
use ninja_gen::archives::empty_manifest;
|
|
||||||
use ninja_gen::archives::with_exe;
|
|
||||||
use ninja_gen::archives::OnlineArchive;
|
|
||||||
use ninja_gen::archives::Platform;
|
|
||||||
use ninja_gen::build::BuildProfile;
|
|
||||||
use ninja_gen::cargo::CargoBuild;
|
|
||||||
use ninja_gen::cargo::RustOutput;
|
|
||||||
use ninja_gen::git::SyncSubmodule;
|
|
||||||
use ninja_gen::glob;
|
|
||||||
use ninja_gen::input::BuildInput;
|
|
||||||
use ninja_gen::inputs;
|
|
||||||
use ninja_gen::python::PythonEnvironment;
|
|
||||||
use ninja_gen::Build;
|
|
||||||
use ninja_gen::Utf8Path;
|
|
||||||
|
|
||||||
use crate::anki_version;
|
|
||||||
use crate::platform::overriden_python_target_platform;
|
|
||||||
use crate::platform::overriden_rust_target_triple;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
|
||||||
enum DistKind {
|
|
||||||
Standard,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DistKind {
|
|
||||||
fn folder_name(&self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
DistKind::Standard => "std",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
DistKind::Standard => "standard",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build_bundle(build: &mut Build) -> Result<()> {
|
|
||||||
// install into venv
|
|
||||||
setup_primary_venv(build)?;
|
|
||||||
install_anki_wheels(build)?;
|
|
||||||
|
|
||||||
// bundle venv into output binary + extra_files
|
|
||||||
build_pyoxidizer(build)?;
|
|
||||||
build_artifacts(build)?;
|
|
||||||
build_binary(build)?;
|
|
||||||
|
|
||||||
// package up outputs with Qt/other deps
|
|
||||||
download_dist_folder_deps(build)?;
|
|
||||||
build_dist_folder(build, DistKind::Standard)?;
|
|
||||||
|
|
||||||
build_packages(build)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn targetting_macos_arm() -> bool {
|
|
||||||
cfg!(all(target_os = "macos", target_arch = "aarch64"))
|
|
||||||
&& overriden_python_target_platform().is_none()
|
|
||||||
}
|
|
||||||
|
|
||||||
const WIN_AUDIO: OnlineArchive = OnlineArchive {
|
|
||||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/audio-win-amd64.tar.gz",
|
|
||||||
sha256: "0815a601baba05e03bc36b568cdc2332b1cf4aa17125fc33c69de125f8dd687f",
|
|
||||||
};
|
|
||||||
|
|
||||||
const MAC_ARM_AUDIO: OnlineArchive = OnlineArchive {
|
|
||||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-05-26/audio-mac-arm64.tar.gz",
|
|
||||||
sha256: "f6c4af9be59ae1c82a16f5c6307f13cbf31b49ad7b69ce1cb6e0e7b403cfdb8f",
|
|
||||||
};
|
|
||||||
|
|
||||||
const MAC_AMD_AUDIO: OnlineArchive = OnlineArchive {
|
|
||||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-05-26/audio-mac-amd64.tar.gz",
|
|
||||||
sha256: "ecbb3c878805cdd58b1a0b8e3fd8c753b8ce3ad36c8b5904a79111f9db29ff42",
|
|
||||||
};
|
|
||||||
|
|
||||||
const MAC_ARM_QT6: OnlineArchive = OnlineArchive {
|
|
||||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2024-02-29/pyqt6.6-mac-arm64.tar.zst",
|
|
||||||
sha256: "9b2ade4ae9b80506689062845e83e8c60f7fa9843545bf7bb2d11d3e2f105878",
|
|
||||||
};
|
|
||||||
|
|
||||||
const MAC_AMD_QT6: OnlineArchive = OnlineArchive {
|
|
||||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2024-02-29/pyqt6.6-mac-amd64.tar.zst",
|
|
||||||
sha256: "dbd0871e4da22820d1fa9ab29220d631467d1178038dcab4b15169ad7f499b1b",
|
|
||||||
};
|
|
||||||
|
|
||||||
const LINUX_QT_PLUGINS: OnlineArchive = OnlineArchive {
|
|
||||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-02/qt-plugins-linux-amd64.tar.gz",
|
|
||||||
sha256: "66bb568aca7242bc55ad419bf5c96755ca15d2a743e1c3a09cba8b83230b138b",
|
|
||||||
};
|
|
||||||
|
|
||||||
const NSIS_PLUGINS: OnlineArchive = OnlineArchive {
|
|
||||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-19/nsis.tar.zst",
|
|
||||||
sha256: "6133f730ece699de19714d0479c73bc848647d277e9cc80dda9b9ebe532b40a8",
|
|
||||||
};
|
|
||||||
|
|
||||||
fn download_dist_folder_deps(build: &mut Build) -> Result<()> {
|
|
||||||
let mut bundle_deps = vec![":wheels"];
|
|
||||||
if cfg!(windows) {
|
|
||||||
download_and_extract(build, "win_amd64_audio", WIN_AUDIO, empty_manifest())?;
|
|
||||||
download_and_extract(build, "nsis_plugins", NSIS_PLUGINS, empty_manifest())?;
|
|
||||||
bundle_deps.extend([":extract:win_amd64_audio", ":extract:nsis_plugins"]);
|
|
||||||
} else if cfg!(target_os = "macos") {
|
|
||||||
if targetting_macos_arm() {
|
|
||||||
download_and_extract(build, "mac_arm_audio", MAC_ARM_AUDIO, empty_manifest())?;
|
|
||||||
download_and_extract(build, "mac_arm_qt6", MAC_ARM_QT6, empty_manifest())?;
|
|
||||||
bundle_deps.extend([":extract:mac_arm_audio", ":extract:mac_arm_qt6"]);
|
|
||||||
} else {
|
|
||||||
download_and_extract(build, "mac_amd_audio", MAC_AMD_AUDIO, empty_manifest())?;
|
|
||||||
download_and_extract(build, "mac_amd_qt6", MAC_AMD_QT6, empty_manifest())?;
|
|
||||||
bundle_deps.extend([":extract:mac_amd_audio", ":extract:mac_amd_qt6"]);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
download_and_extract(
|
|
||||||
build,
|
|
||||||
"linux_qt_plugins",
|
|
||||||
LINUX_QT_PLUGINS,
|
|
||||||
empty_manifest(),
|
|
||||||
)?;
|
|
||||||
bundle_deps.extend([":extract:linux_qt_plugins"]);
|
|
||||||
}
|
|
||||||
build.add_dependency(
|
|
||||||
"bundle:deps",
|
|
||||||
inputs![bundle_deps
|
|
||||||
.iter()
|
|
||||||
.map(ToString::to_string)
|
|
||||||
.collect::<Vec<_>>()],
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
struct Venv {
|
|
||||||
label: &'static str,
|
|
||||||
path_without_builddir: &'static str,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Venv {
|
|
||||||
fn label_as_target(&self, suffix: &str) -> String {
|
|
||||||
format!(":{}{suffix}", self.label)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const PRIMARY_VENV: Venv = Venv {
|
|
||||||
label: "bundle:pyenv",
|
|
||||||
path_without_builddir: "bundle/pyenv",
|
|
||||||
};
|
|
||||||
|
|
||||||
fn setup_primary_venv(build: &mut Build) -> Result<()> {
|
|
||||||
let mut qt6_reqs = inputs![
|
|
||||||
"python/requirements.bundle.txt",
|
|
||||||
"python/requirements.qt6_6.txt",
|
|
||||||
];
|
|
||||||
if cfg!(windows) {
|
|
||||||
qt6_reqs = inputs![qt6_reqs, "python/requirements.win.txt"];
|
|
||||||
}
|
|
||||||
build.add_action(
|
|
||||||
PRIMARY_VENV.label,
|
|
||||||
PythonEnvironment {
|
|
||||||
folder: PRIMARY_VENV.path_without_builddir,
|
|
||||||
base_requirements_txt: "python/requirements.base.txt".into(),
|
|
||||||
requirements_txt: qt6_reqs,
|
|
||||||
extra_binary_exports: &[],
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
struct InstallAnkiWheels {
|
|
||||||
venv: Venv,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BuildAction for InstallAnkiWheels {
|
|
||||||
fn command(&self) -> &str {
|
|
||||||
"$pip install --force-reinstall --no-deps $in"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
|
||||||
build.add_inputs("pip", inputs![self.venv.label_as_target(":pip")]);
|
|
||||||
build.add_inputs("in", inputs![":wheels"]);
|
|
||||||
build.add_output_stamp("bundle/wheels.stamp");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn install_anki_wheels(build: &mut Build) -> Result<()> {
|
|
||||||
build.add_action(
|
|
||||||
"bundle:add_wheels:qt6",
|
|
||||||
InstallAnkiWheels { venv: PRIMARY_VENV },
|
|
||||||
)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_pyoxidizer(build: &mut Build) -> Result<()> {
|
|
||||||
let offline_build = env::var("OFFLINE_BUILD").is_ok();
|
|
||||||
|
|
||||||
build.add_action(
|
|
||||||
"bundle:pyoxidizer:repo",
|
|
||||||
SyncSubmodule {
|
|
||||||
path: "qt/bundle/PyOxidizer",
|
|
||||||
offline_build,
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
let target =
|
|
||||||
overriden_rust_target_triple().unwrap_or_else(|| Platform::current().as_rust_triple());
|
|
||||||
let output_bin = format!("bundle/rust/{target}/release/pyoxidizer",);
|
|
||||||
build.add_action(
|
|
||||||
"bundle:pyoxidizer:bin",
|
|
||||||
CargoBuild {
|
|
||||||
inputs: inputs![
|
|
||||||
":bundle:pyoxidizer:repo",
|
|
||||||
"out/env",
|
|
||||||
glob!["qt/bundle/PyOxidizer/**"]
|
|
||||||
],
|
|
||||||
// can't use ::Binary() here, as we're in a separate workspace
|
|
||||||
outputs: &[RustOutput::Data("bin", &with_exe(&output_bin))],
|
|
||||||
target: Some(target),
|
|
||||||
extra_args: &format!(
|
|
||||||
"--manifest-path={} --target-dir={} -p pyoxidizer",
|
|
||||||
"qt/bundle/PyOxidizer/Cargo.toml", "$builddir/bundle/rust"
|
|
||||||
),
|
|
||||||
release_override: Some(BuildProfile::Release),
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
struct BuildArtifacts {}
|
|
||||||
|
|
||||||
impl BuildAction for BuildArtifacts {
|
|
||||||
fn command(&self) -> &str {
|
|
||||||
"$runner build-artifacts $bundle_root $pyoxidizer_bin"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
|
||||||
build.add_inputs("pyoxidizer_bin", inputs![":bundle:pyoxidizer:bin"]);
|
|
||||||
build.add_inputs("", inputs![PRIMARY_VENV.label_as_target("")]);
|
|
||||||
build.add_inputs("", inputs![":bundle:add_wheels:qt6", glob!["qt/bundle/**"]]);
|
|
||||||
build.add_variable("bundle_root", "$builddir/bundle");
|
|
||||||
build.add_outputs_ext(
|
|
||||||
"pyo3_config",
|
|
||||||
vec!["bundle/artifacts/pyo3-build-config-file.txt"],
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn check_output_timestamps(&self) -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_artifacts(build: &mut Build) -> Result<()> {
|
|
||||||
build.add_action("bundle:artifacts", BuildArtifacts {})
|
|
||||||
}
|
|
||||||
|
|
||||||
struct BuildBundle {}
|
|
||||||
|
|
||||||
impl BuildAction for BuildBundle {
|
|
||||||
fn command(&self) -> &str {
|
|
||||||
"$runner build-bundle-binary"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
|
||||||
build.add_inputs("", inputs![":bundle:artifacts", glob!["qt/bundle/**"]]);
|
|
||||||
build.add_outputs(
|
|
||||||
"",
|
|
||||||
vec![RustOutput::Binary("anki").path(
|
|
||||||
Utf8Path::new("$builddir/bundle/rust"),
|
|
||||||
Some(
|
|
||||||
overriden_rust_target_triple()
|
|
||||||
.unwrap_or_else(|| Platform::current().as_rust_triple()),
|
|
||||||
),
|
|
||||||
// our pyoxidizer bin uses lto on the release profile
|
|
||||||
BuildProfile::Release,
|
|
||||||
)],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_binary(build: &mut Build) -> Result<()> {
|
|
||||||
build.add_action("bundle:binary", BuildBundle {})
|
|
||||||
}
|
|
||||||
|
|
||||||
struct BuildDistFolder {
|
|
||||||
kind: DistKind,
|
|
||||||
deps: BuildInput,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BuildAction for BuildDistFolder {
|
|
||||||
fn command(&self) -> &str {
|
|
||||||
"$runner build-dist-folder $kind $out_folder "
|
|
||||||
}
|
|
||||||
|
|
||||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
|
||||||
build.add_inputs("", &self.deps);
|
|
||||||
build.add_variable("kind", self.kind.name());
|
|
||||||
let folder = match self.kind {
|
|
||||||
DistKind::Standard => "bundle/std",
|
|
||||||
};
|
|
||||||
build.add_outputs("out_folder", vec![folder]);
|
|
||||||
build.add_outputs("stamp", vec![format!("{folder}.stamp")]);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn check_output_timestamps(&self) -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_dist_folder(build: &mut Build, kind: DistKind) -> Result<()> {
|
|
||||||
let deps = inputs![":bundle:deps", ":bundle:binary", glob!["qt/bundle/**"]];
|
|
||||||
let group = match kind {
|
|
||||||
DistKind::Standard => "bundle:folder:std",
|
|
||||||
};
|
|
||||||
build.add_action(group, BuildDistFolder { kind, deps })
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_packages(build: &mut Build) -> Result<()> {
|
|
||||||
if cfg!(windows) {
|
|
||||||
build_windows_installers(build)
|
|
||||||
} else if cfg!(target_os = "macos") {
|
|
||||||
build_mac_app(build, DistKind::Standard)?;
|
|
||||||
build_dmgs(build)
|
|
||||||
} else {
|
|
||||||
build_tarball(build, DistKind::Standard)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct BuildTarball {
|
|
||||||
kind: DistKind,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BuildAction for BuildTarball {
|
|
||||||
fn command(&self) -> &str {
|
|
||||||
"chmod -R a+r $folder && tar -I '$zstd' --transform $transform -cf $tarball -C $folder ."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
|
||||||
let input_folder_name = self.kind.folder_name();
|
|
||||||
let input_folder_target = format!(":bundle:folder:{input_folder_name}");
|
|
||||||
let input_folder_path = format!("$builddir/bundle/{input_folder_name}");
|
|
||||||
|
|
||||||
let version = anki_version();
|
|
||||||
let qt = match self.kind {
|
|
||||||
DistKind::Standard => "qt6",
|
|
||||||
};
|
|
||||||
let output_folder_base = format!("anki-{version}-linux-{qt}");
|
|
||||||
let output_tarball = format!("bundle/package/{output_folder_base}.tar.zst");
|
|
||||||
|
|
||||||
build.add_inputs("", inputs![input_folder_target]);
|
|
||||||
build.add_variable("zstd", "zstd -c --long -T0 -18");
|
|
||||||
build.add_variable("transform", format!("s%^.%{output_folder_base}%S"));
|
|
||||||
build.add_variable("folder", input_folder_path);
|
|
||||||
build.add_outputs("tarball", vec![output_tarball]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_tarball(build: &mut Build, kind: DistKind) -> Result<()> {
|
|
||||||
let name = kind.folder_name();
|
|
||||||
build.add_action(format!("bundle:package:{name}"), BuildTarball { kind })
|
|
||||||
}
|
|
||||||
|
|
||||||
struct BuildWindowsInstallers {}
|
|
||||||
|
|
||||||
impl BuildAction for BuildWindowsInstallers {
|
|
||||||
fn command(&self) -> &str {
|
|
||||||
"cargo run -p makeexe --target-dir=out/rust -- $version $src_root $bundle_root $out"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
|
||||||
let version = anki_version();
|
|
||||||
let outputs = ["qt6"].iter().map(|qt| {
|
|
||||||
let output_base = format!("anki-{version}-windows-{qt}");
|
|
||||||
format!("bundle/package/{output_base}.exe")
|
|
||||||
});
|
|
||||||
|
|
||||||
build.add_inputs("", inputs![":bundle:folder:std"]);
|
|
||||||
build.add_variable("version", &version);
|
|
||||||
build.add_variable("bundle_root", "$builddir/bundle");
|
|
||||||
build.add_outputs("out", outputs);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_windows_installers(build: &mut Build) -> Result<()> {
|
|
||||||
build.add_action("bundle:package", BuildWindowsInstallers {})
|
|
||||||
}
|
|
||||||
|
|
||||||
struct BuildMacApp {
|
|
||||||
kind: DistKind,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BuildAction for BuildMacApp {
|
|
||||||
fn command(&self) -> &str {
|
|
||||||
"cargo run -p makeapp --target-dir=out/rust -- build-app $version $kind $stamp"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
|
||||||
let folder_name = self.kind.folder_name();
|
|
||||||
build.add_inputs("", inputs![format!(":bundle:folder:{folder_name}")]);
|
|
||||||
build.add_variable("version", anki_version());
|
|
||||||
build.add_variable("kind", self.kind.name());
|
|
||||||
build.add_outputs("stamp", vec![format!("bundle/app/{folder_name}.stamp")]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_mac_app(build: &mut Build, kind: DistKind) -> Result<()> {
|
|
||||||
build.add_action(format!("bundle:app:{}", kind.name()), BuildMacApp { kind })
|
|
||||||
}
|
|
||||||
|
|
||||||
struct BuildDmgs {}
|
|
||||||
|
|
||||||
impl BuildAction for BuildDmgs {
|
|
||||||
fn command(&self) -> &str {
|
|
||||||
"cargo run -p makeapp --target-dir=out/rust -- build-dmgs $dmgs"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
|
||||||
let version = anki_version();
|
|
||||||
let platform = if targetting_macos_arm() {
|
|
||||||
"apple"
|
|
||||||
} else {
|
|
||||||
"intel"
|
|
||||||
};
|
|
||||||
let qt = &["qt6"][..];
|
|
||||||
let dmgs = qt
|
|
||||||
.iter()
|
|
||||||
.map(|qt| format!("bundle/dmg/anki-{version}-mac-{platform}-{qt}.dmg"));
|
|
||||||
|
|
||||||
build.add_inputs("", inputs![":bundle:app"]);
|
|
||||||
build.add_outputs("dmgs", dmgs);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_dmgs(build: &mut Build) -> Result<()> {
|
|
||||||
build.add_action("bundle:dmg", BuildDmgs {})
|
|
||||||
}
|
|
44
build/configure/src/launcher.rs
Normal file
44
build/configure/src/launcher.rs
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use ninja_gen::archives::download_and_extract;
|
||||||
|
use ninja_gen::archives::empty_manifest;
|
||||||
|
use ninja_gen::archives::OnlineArchive;
|
||||||
|
use ninja_gen::command::RunCommand;
|
||||||
|
use ninja_gen::hashmap;
|
||||||
|
use ninja_gen::inputs;
|
||||||
|
use ninja_gen::Build;
|
||||||
|
|
||||||
|
pub fn setup_uv_universal(build: &mut Build) -> Result<()> {
|
||||||
|
if !cfg!(target_arch = "aarch64") {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
build.add_action(
|
||||||
|
"launcher:uv_universal",
|
||||||
|
RunCommand {
|
||||||
|
command: "/usr/bin/lipo",
|
||||||
|
args: "-create -output $out $arm_bin $x86_bin",
|
||||||
|
inputs: hashmap! {
|
||||||
|
"arm_bin" => inputs![":extract:uv:bin"],
|
||||||
|
"x86_bin" => inputs![":extract:uv_mac_x86:bin"],
|
||||||
|
},
|
||||||
|
outputs: hashmap! {
|
||||||
|
"out" => vec!["launcher/uv"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build_launcher(build: &mut Build) -> Result<()> {
|
||||||
|
setup_uv_universal(build)?;
|
||||||
|
download_and_extract(build, "nsis_plugins", NSIS_PLUGINS, empty_manifest())?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
const NSIS_PLUGINS: OnlineArchive = OnlineArchive {
|
||||||
|
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-19/nsis.tar.zst",
|
||||||
|
sha256: "6133f730ece699de19714d0479c73bc848647d277e9cc80dda9b9ebe532b40a8",
|
||||||
|
};
|
|
@ -2,7 +2,7 @@
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
mod aqt;
|
mod aqt;
|
||||||
mod bundle;
|
mod launcher;
|
||||||
mod platform;
|
mod platform;
|
||||||
mod pylib;
|
mod pylib;
|
||||||
mod python;
|
mod python;
|
||||||
|
@ -13,13 +13,14 @@ use std::env;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use aqt::build_and_check_aqt;
|
use aqt::build_and_check_aqt;
|
||||||
use bundle::build_bundle;
|
use launcher::build_launcher;
|
||||||
use ninja_gen::glob;
|
use ninja_gen::glob;
|
||||||
use ninja_gen::inputs;
|
use ninja_gen::inputs;
|
||||||
use ninja_gen::protobuf::check_proto;
|
use ninja_gen::protobuf::check_proto;
|
||||||
use ninja_gen::protobuf::setup_protoc;
|
use ninja_gen::protobuf::setup_protoc;
|
||||||
use ninja_gen::python::setup_python;
|
use ninja_gen::python::setup_uv;
|
||||||
use ninja_gen::Build;
|
use ninja_gen::Build;
|
||||||
|
use platform::overriden_python_venv_platform;
|
||||||
use pylib::build_pylib;
|
use pylib::build_pylib;
|
||||||
use pylib::check_pylib;
|
use pylib::check_pylib;
|
||||||
use python::check_python;
|
use python::check_python;
|
||||||
|
@ -47,7 +48,10 @@ fn main() -> Result<()> {
|
||||||
check_proto(build, inputs![glob!["proto/**/*.proto"]])?;
|
check_proto(build, inputs![glob!["proto/**/*.proto"]])?;
|
||||||
|
|
||||||
if env::var("OFFLINE_BUILD").is_err() {
|
if env::var("OFFLINE_BUILD").is_err() {
|
||||||
setup_python(build)?;
|
setup_uv(
|
||||||
|
build,
|
||||||
|
overriden_python_venv_platform().unwrap_or(build.host_platform),
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
setup_venv(build)?;
|
setup_venv(build)?;
|
||||||
|
|
||||||
|
@ -57,7 +61,7 @@ fn main() -> Result<()> {
|
||||||
build_and_check_aqt(build)?;
|
build_and_check_aqt(build)?;
|
||||||
|
|
||||||
if env::var("OFFLINE_BUILD").is_err() {
|
if env::var("OFFLINE_BUILD").is_err() {
|
||||||
build_bundle(build)?;
|
build_launcher(build)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
setup_sphinx(build)?;
|
setup_sphinx(build)?;
|
||||||
|
|
|
@ -5,18 +5,30 @@ use std::env;
|
||||||
|
|
||||||
use ninja_gen::archives::Platform;
|
use ninja_gen::archives::Platform;
|
||||||
|
|
||||||
/// Usually None to use the host architecture; can be overriden by setting
|
/// Please see [`overriden_python_target_platform()`] for details.
|
||||||
/// MAC_X86 to build for x86_64 on Apple Silicon
|
|
||||||
pub fn overriden_rust_target_triple() -> Option<&'static str> {
|
pub fn overriden_rust_target_triple() -> Option<&'static str> {
|
||||||
overriden_python_target_platform().map(|p| p.as_rust_triple())
|
overriden_python_wheel_platform().map(|p| p.as_rust_triple())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Usually None to use the host architecture; can be overriden by setting
|
/// Usually None to use the host architecture, except on Windows which
|
||||||
/// MAC_X86 to build for x86_64 on Apple Silicon
|
/// always uses x86_64, since WebEngine is unavailable for ARM64.
|
||||||
pub fn overriden_python_target_platform() -> Option<Platform> {
|
pub fn overriden_python_venv_platform() -> Option<Platform> {
|
||||||
if env::var("MAC_X86").is_ok() {
|
if cfg!(target_os = "windows") {
|
||||||
Some(Platform::MacX64)
|
Some(Platform::WindowsX64)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Like [`overriden_python_venv_platform`], but:
|
||||||
|
/// If MAC_X86 is set, an X86 wheel will be built on macOS ARM.
|
||||||
|
/// If LIN_ARM64 is set, an ARM64 wheel will be built on Linux AMD64.
|
||||||
|
pub fn overriden_python_wheel_platform() -> Option<Platform> {
|
||||||
|
if env::var("MAC_X86").is_ok() {
|
||||||
|
Some(Platform::MacX64)
|
||||||
|
} else if env::var("LIN_ARM64").is_ok() {
|
||||||
|
Some(Platform::LinuxArm)
|
||||||
|
} else {
|
||||||
|
overriden_python_venv_platform()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -14,7 +14,7 @@ use ninja_gen::python::PythonTest;
|
||||||
use ninja_gen::Build;
|
use ninja_gen::Build;
|
||||||
|
|
||||||
use crate::anki_version;
|
use crate::anki_version;
|
||||||
use crate::platform::overriden_python_target_platform;
|
use crate::platform::overriden_python_wheel_platform;
|
||||||
use crate::python::BuildWheel;
|
use crate::python::BuildWheel;
|
||||||
use crate::python::GenPythonProto;
|
use crate::python::GenPythonProto;
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ pub fn build_pylib(build: &mut Build) -> Result<()> {
|
||||||
output: &format!(
|
output: &format!(
|
||||||
"pylib/anki/_rsbridge.{}",
|
"pylib/anki/_rsbridge.{}",
|
||||||
match build.host_platform {
|
match build.host_platform {
|
||||||
Platform::WindowsX64 => "pyd",
|
Platform::WindowsX64 | Platform::WindowsArm => "pyd",
|
||||||
_ => "so",
|
_ => "so",
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
@ -64,13 +64,12 @@ pub fn build_pylib(build: &mut Build) -> Result<()> {
|
||||||
BuildWheel {
|
BuildWheel {
|
||||||
name: "anki",
|
name: "anki",
|
||||||
version: anki_version(),
|
version: anki_version(),
|
||||||
src_folder: "pylib/anki",
|
platform: overriden_python_wheel_platform().or(Some(build.host_platform)),
|
||||||
gen_folder: "$builddir/pylib/anki",
|
|
||||||
platform: overriden_python_target_platform().or(Some(build.host_platform)),
|
|
||||||
deps: inputs![
|
deps: inputs![
|
||||||
":pylib:anki",
|
":pylib:anki",
|
||||||
glob!("pylib/anki/**"),
|
glob!("pylib/anki/**"),
|
||||||
"python/requirements.anki.in",
|
"pylib/pyproject.toml",
|
||||||
|
"pylib/hatch_build.py"
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
|
@ -7,87 +7,69 @@ use anyhow::Result;
|
||||||
use ninja_gen::action::BuildAction;
|
use ninja_gen::action::BuildAction;
|
||||||
use ninja_gen::archives::Platform;
|
use ninja_gen::archives::Platform;
|
||||||
use ninja_gen::build::FilesHandle;
|
use ninja_gen::build::FilesHandle;
|
||||||
use ninja_gen::command::RunCommand;
|
|
||||||
use ninja_gen::copy::CopyFiles;
|
use ninja_gen::copy::CopyFiles;
|
||||||
use ninja_gen::glob;
|
use ninja_gen::glob;
|
||||||
use ninja_gen::hashmap;
|
|
||||||
use ninja_gen::input::BuildInput;
|
use ninja_gen::input::BuildInput;
|
||||||
use ninja_gen::inputs;
|
use ninja_gen::inputs;
|
||||||
use ninja_gen::python::python_format;
|
use ninja_gen::python::python_format;
|
||||||
use ninja_gen::python::PythonEnvironment;
|
use ninja_gen::python::PythonEnvironment;
|
||||||
use ninja_gen::python::PythonLint;
|
|
||||||
use ninja_gen::python::PythonTypecheck;
|
use ninja_gen::python::PythonTypecheck;
|
||||||
use ninja_gen::rsync::RsyncFiles;
|
use ninja_gen::python::RuffCheck;
|
||||||
use ninja_gen::Build;
|
use ninja_gen::Build;
|
||||||
|
|
||||||
// When updating Qt, make sure to update the .txt file in bundle.rs as well.
|
/// Normalize version string by removing leading zeros from numeric parts
|
||||||
|
/// while preserving pre-release markers (b1, rc2, a3, etc.)
|
||||||
|
fn normalize_version(version: &str) -> String {
|
||||||
|
version
|
||||||
|
.split('.')
|
||||||
|
.map(|part| {
|
||||||
|
// Check if the part contains only digits
|
||||||
|
if part.chars().all(|c| c.is_ascii_digit()) {
|
||||||
|
// Numeric part: remove leading zeros
|
||||||
|
part.parse::<u32>().unwrap_or(0).to_string()
|
||||||
|
} else {
|
||||||
|
// Mixed part (contains both numbers and pre-release markers)
|
||||||
|
// Split on first non-digit character and normalize the numeric prefix
|
||||||
|
let chars = part.chars();
|
||||||
|
let mut numeric_prefix = String::new();
|
||||||
|
let mut rest = String::new();
|
||||||
|
let mut found_non_digit = false;
|
||||||
|
|
||||||
|
for ch in chars {
|
||||||
|
if ch.is_ascii_digit() && !found_non_digit {
|
||||||
|
numeric_prefix.push(ch);
|
||||||
|
} else {
|
||||||
|
found_non_digit = true;
|
||||||
|
rest.push(ch);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if numeric_prefix.is_empty() {
|
||||||
|
part.to_string()
|
||||||
|
} else {
|
||||||
|
let normalized_prefix = numeric_prefix.parse::<u32>().unwrap_or(0).to_string();
|
||||||
|
format!("{normalized_prefix}{rest}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(".")
|
||||||
|
}
|
||||||
|
|
||||||
pub fn setup_venv(build: &mut Build) -> Result<()> {
|
pub fn setup_venv(build: &mut Build) -> Result<()> {
|
||||||
let platform_deps = if cfg!(windows) {
|
let extra_binary_exports = &["mypy", "ruff", "pytest", "protoc-gen-mypy"];
|
||||||
inputs![
|
|
||||||
"python/requirements.qt6_6.txt",
|
|
||||||
"python/requirements.win.txt",
|
|
||||||
]
|
|
||||||
} else if cfg!(target_os = "macos") {
|
|
||||||
inputs!["python/requirements.qt6_6.txt",]
|
|
||||||
} else if std::env::var("PYTHONPATH").is_ok() {
|
|
||||||
// assume we have a system-provided Qt
|
|
||||||
inputs![]
|
|
||||||
} else if cfg!(target_arch = "aarch64") {
|
|
||||||
inputs!["python/requirements.qt6_8.txt"]
|
|
||||||
} else {
|
|
||||||
inputs!["python/requirements.qt6_6.txt"]
|
|
||||||
};
|
|
||||||
let requirements_txt = inputs!["python/requirements.dev.txt", platform_deps];
|
|
||||||
build.add_action(
|
build.add_action(
|
||||||
"pyenv",
|
"pyenv",
|
||||||
PythonEnvironment {
|
PythonEnvironment {
|
||||||
folder: "pyenv",
|
venv_folder: "pyenv",
|
||||||
base_requirements_txt: inputs!["python/requirements.base.txt"],
|
deps: inputs![
|
||||||
requirements_txt,
|
"pyproject.toml",
|
||||||
extra_binary_exports: &[
|
"pylib/pyproject.toml",
|
||||||
"pip-compile",
|
"qt/pyproject.toml",
|
||||||
"pip-sync",
|
"uv.lock"
|
||||||
"mypy",
|
|
||||||
"black", // Required for offline build
|
|
||||||
"isort",
|
|
||||||
"pylint",
|
|
||||||
"pytest",
|
|
||||||
"protoc-gen-mypy", // ditto
|
|
||||||
],
|
],
|
||||||
},
|
extra_args: "--all-packages --extra qt --extra audio",
|
||||||
)?;
|
extra_binary_exports,
|
||||||
|
|
||||||
// optional venvs for testing other Qt versions
|
|
||||||
let mut venv_reqs = inputs!["python/requirements.bundle.txt"];
|
|
||||||
if cfg!(windows) {
|
|
||||||
venv_reqs = inputs![venv_reqs, "python/requirements.win.txt"];
|
|
||||||
}
|
|
||||||
|
|
||||||
build.add_action(
|
|
||||||
"pyenv-qt6.8",
|
|
||||||
PythonEnvironment {
|
|
||||||
folder: "pyenv-qt6.8",
|
|
||||||
base_requirements_txt: inputs!["python/requirements.base.txt"],
|
|
||||||
requirements_txt: inputs![&venv_reqs, "python/requirements.qt6_8.txt"],
|
|
||||||
extra_binary_exports: &[],
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
build.add_action(
|
|
||||||
"pyenv-qt5.15",
|
|
||||||
PythonEnvironment {
|
|
||||||
folder: "pyenv-qt5.15",
|
|
||||||
base_requirements_txt: inputs!["python/requirements.base.txt"],
|
|
||||||
requirements_txt: inputs![&venv_reqs, "python/requirements.qt5_15.txt"],
|
|
||||||
extra_binary_exports: &[],
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
build.add_action(
|
|
||||||
"pyenv-qt5.14",
|
|
||||||
PythonEnvironment {
|
|
||||||
folder: "pyenv-qt5.14",
|
|
||||||
base_requirements_txt: inputs!["python/requirements.base.txt"],
|
|
||||||
requirements_txt: inputs![venv_reqs, "python/requirements.qt5_14.txt"],
|
|
||||||
extra_binary_exports: &[],
|
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
@ -133,45 +115,59 @@ impl BuildAction for GenPythonProto {
|
||||||
pub struct BuildWheel {
|
pub struct BuildWheel {
|
||||||
pub name: &'static str,
|
pub name: &'static str,
|
||||||
pub version: String,
|
pub version: String,
|
||||||
pub src_folder: &'static str,
|
|
||||||
pub gen_folder: &'static str,
|
|
||||||
pub platform: Option<Platform>,
|
pub platform: Option<Platform>,
|
||||||
pub deps: BuildInput,
|
pub deps: BuildInput,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuildAction for BuildWheel {
|
impl BuildAction for BuildWheel {
|
||||||
fn command(&self) -> &str {
|
fn command(&self) -> &str {
|
||||||
"$pyenv_bin $script $src $gen $out"
|
"$uv build --wheel --out-dir=$out_dir --project=$project_dir"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn files(&mut self, build: &mut impl FilesHandle) {
|
fn files(&mut self, build: &mut impl FilesHandle) {
|
||||||
build.add_inputs("pyenv_bin", inputs![":pyenv:bin"]);
|
build.add_inputs("uv", inputs![":uv_binary"]);
|
||||||
build.add_inputs("script", inputs!["python/write_wheel.py"]);
|
|
||||||
build.add_inputs("", &self.deps);
|
build.add_inputs("", &self.deps);
|
||||||
build.add_variable("src", self.src_folder);
|
|
||||||
build.add_variable("gen", self.gen_folder);
|
|
||||||
|
|
||||||
|
// Set the project directory based on which package we're building
|
||||||
|
let project_dir = if self.name == "anki" { "pylib" } else { "qt" };
|
||||||
|
build.add_variable("project_dir", project_dir);
|
||||||
|
|
||||||
|
// Set environment variable for uv to use our pyenv
|
||||||
|
build.add_variable("pyenv_path", "$builddir/pyenv");
|
||||||
|
build.add_env_var("UV_PROJECT_ENVIRONMENT", "$pyenv_path");
|
||||||
|
|
||||||
|
// Set output directory
|
||||||
|
build.add_variable("out_dir", "$builddir/wheels/");
|
||||||
|
|
||||||
|
// Calculate the wheel filename that uv will generate
|
||||||
let tag = if let Some(platform) = self.platform {
|
let tag = if let Some(platform) = self.platform {
|
||||||
let platform = match platform {
|
let platform_tag = match platform {
|
||||||
Platform::LinuxX64 => "manylinux_2_35_x86_64",
|
Platform::LinuxX64 => "manylinux_2_36_x86_64",
|
||||||
Platform::LinuxArm => "manylinux_2_35_aarch64",
|
Platform::LinuxArm => "manylinux_2_36_aarch64",
|
||||||
Platform::MacX64 => "macosx_12_0_x86_64",
|
Platform::MacX64 => "macosx_12_0_x86_64",
|
||||||
Platform::MacArm => "macosx_12_0_arm64",
|
Platform::MacArm => "macosx_12_0_arm64",
|
||||||
Platform::WindowsX64 => "win_amd64",
|
Platform::WindowsX64 => "win_amd64",
|
||||||
|
Platform::WindowsArm => "win_arm64",
|
||||||
};
|
};
|
||||||
format!("cp39-abi3-{platform}")
|
format!("cp39-abi3-{platform_tag}")
|
||||||
} else {
|
} else {
|
||||||
"py3-none-any".into()
|
"py3-none-any".into()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Set environment variable for hatch_build.py to use the correct platform tag
|
||||||
|
build.add_variable("wheel_tag", &tag);
|
||||||
|
build.add_env_var("ANKI_WHEEL_TAG", "$wheel_tag");
|
||||||
|
|
||||||
let name = self.name;
|
let name = self.name;
|
||||||
let version = &self.version;
|
|
||||||
let wheel_path = format!("wheels/{name}-{version}-{tag}.whl");
|
let normalized_version = normalize_version(&self.version);
|
||||||
|
|
||||||
|
let wheel_path = format!("wheels/{name}-{normalized_version}-{tag}.whl");
|
||||||
build.add_outputs("out", vec![wheel_path]);
|
build.add_outputs("out", vec![wheel_path]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_python(build: &mut Build) -> Result<()> {
|
pub fn check_python(build: &mut Build) -> Result<()> {
|
||||||
python_format(build, "ftl", inputs![glob!("ftl/**/*.py")])?;
|
|
||||||
python_format(build, "tools", inputs![glob!("tools/**/*.py")])?;
|
python_format(build, "tools", inputs![glob!("tools/**/*.py")])?;
|
||||||
|
|
||||||
build.add_action(
|
build.add_action(
|
||||||
|
@ -183,7 +179,6 @@ pub fn check_python(build: &mut Build) -> Result<()> {
|
||||||
"qt/tools",
|
"qt/tools",
|
||||||
"out/pylib/anki",
|
"out/pylib/anki",
|
||||||
"out/qt/_aqt",
|
"out/qt/_aqt",
|
||||||
"ftl",
|
|
||||||
"python",
|
"python",
|
||||||
"tools",
|
"tools",
|
||||||
],
|
],
|
||||||
|
@ -195,60 +190,26 @@ pub fn check_python(build: &mut Build) -> Result<()> {
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
add_pylint(build)?;
|
let ruff_folders = &["qt/aqt", "ftl", "pylib/tools", "tools", "python"];
|
||||||
|
let ruff_deps = inputs![
|
||||||
Ok(())
|
glob!["{pylib,ftl,qt,python,tools}/**/*.py"],
|
||||||
}
|
":pylib:anki",
|
||||||
|
":qt:aqt"
|
||||||
fn add_pylint(build: &mut Build) -> Result<()> {
|
];
|
||||||
// pylint does not support PEP420 implicit namespaces split across import paths,
|
|
||||||
// so we need to merge our pylib sources and generated files before invoking it,
|
|
||||||
// and add a top-level __init__.py
|
|
||||||
build.add_action(
|
build.add_action(
|
||||||
"check:pylint:copy_pylib",
|
"check:ruff",
|
||||||
RsyncFiles {
|
RuffCheck {
|
||||||
inputs: inputs![":pylib:anki"],
|
folders: ruff_folders,
|
||||||
target_folder: "pylint/anki",
|
deps: ruff_deps.clone(),
|
||||||
strip_prefix: "$builddir/pylib/anki",
|
check_only: true,
|
||||||
// avoid copying our large rsbridge binary
|
|
||||||
extra_args: "--links",
|
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
build.add_action(
|
build.add_action(
|
||||||
"check:pylint:copy_pylib",
|
"fix:ruff",
|
||||||
RsyncFiles {
|
RuffCheck {
|
||||||
inputs: inputs![glob!["pylib/anki/**"]],
|
folders: ruff_folders,
|
||||||
target_folder: "pylint/anki",
|
deps: ruff_deps,
|
||||||
strip_prefix: "pylib/anki",
|
check_only: false,
|
||||||
extra_args: "",
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
build.add_action(
|
|
||||||
"check:pylint:copy_pylib",
|
|
||||||
RunCommand {
|
|
||||||
command: ":pyenv:bin",
|
|
||||||
args: "$script $out",
|
|
||||||
inputs: hashmap! { "script" => inputs!["python/mkempty.py"] },
|
|
||||||
outputs: hashmap! { "out" => vec!["pylint/anki/__init__.py"] },
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
build.add_action(
|
|
||||||
"check:pylint",
|
|
||||||
PythonLint {
|
|
||||||
folders: &[
|
|
||||||
"$builddir/pylint/anki",
|
|
||||||
"qt/aqt",
|
|
||||||
"ftl",
|
|
||||||
"pylib/tools",
|
|
||||||
"tools",
|
|
||||||
"python",
|
|
||||||
],
|
|
||||||
pylint_ini: inputs![".pylintrc"],
|
|
||||||
deps: inputs![
|
|
||||||
":check:pylint:copy_pylib",
|
|
||||||
":qt:aqt",
|
|
||||||
glob!("{pylib/tools,ftl,qt,python,tools}/**/*.py")
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
@ -262,8 +223,7 @@ struct Sphinx {
|
||||||
impl BuildAction for Sphinx {
|
impl BuildAction for Sphinx {
|
||||||
fn command(&self) -> &str {
|
fn command(&self) -> &str {
|
||||||
if env::var("OFFLINE_BUILD").is_err() {
|
if env::var("OFFLINE_BUILD").is_err() {
|
||||||
"$pip install sphinx sphinx_rtd_theme sphinx-autoapi \
|
"$uv sync --extra sphinx && $python python/sphinx/build.py"
|
||||||
&& $python python/sphinx/build.py"
|
|
||||||
} else {
|
} else {
|
||||||
"$python python/sphinx/build.py"
|
"$python python/sphinx/build.py"
|
||||||
}
|
}
|
||||||
|
@ -271,7 +231,10 @@ impl BuildAction for Sphinx {
|
||||||
|
|
||||||
fn files(&mut self, build: &mut impl FilesHandle) {
|
fn files(&mut self, build: &mut impl FilesHandle) {
|
||||||
if env::var("OFFLINE_BUILD").is_err() {
|
if env::var("OFFLINE_BUILD").is_err() {
|
||||||
build.add_inputs("pip", inputs![":pyenv:pip"]);
|
build.add_inputs("uv", inputs![":uv_binary"]);
|
||||||
|
// Set environment variable to use the existing pyenv
|
||||||
|
build.add_variable("pyenv_path", "$builddir/pyenv");
|
||||||
|
build.add_env_var("UV_PROJECT_ENVIRONMENT", "$pyenv_path");
|
||||||
}
|
}
|
||||||
build.add_inputs("python", inputs![":pyenv:bin"]);
|
build.add_inputs("python", inputs![":pyenv:bin"]);
|
||||||
build.add_inputs("", &self.deps);
|
build.add_inputs("", &self.deps);
|
||||||
|
@ -294,8 +257,35 @@ pub(crate) fn setup_sphinx(build: &mut Build) -> Result<()> {
|
||||||
build.add_action(
|
build.add_action(
|
||||||
"python:sphinx",
|
"python:sphinx",
|
||||||
Sphinx {
|
Sphinx {
|
||||||
deps: inputs![":pylib", ":qt", ":python:sphinx:copy_conf"],
|
deps: inputs![
|
||||||
|
":pylib",
|
||||||
|
":qt",
|
||||||
|
":python:sphinx:copy_conf",
|
||||||
|
"pyproject.toml"
|
||||||
|
],
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_version_basic() {
|
||||||
|
assert_eq!(normalize_version("1.2.3"), "1.2.3");
|
||||||
|
assert_eq!(normalize_version("01.02.03"), "1.2.3");
|
||||||
|
assert_eq!(normalize_version("1.0.0"), "1.0.0");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_normalize_version_with_prerelease() {
|
||||||
|
assert_eq!(normalize_version("1.2.3b1"), "1.2.3b1");
|
||||||
|
assert_eq!(normalize_version("01.02.03b1"), "1.2.3b1");
|
||||||
|
assert_eq!(normalize_version("1.0.0rc2"), "1.0.0rc2");
|
||||||
|
assert_eq!(normalize_version("2.1.0a3"), "2.1.0a3");
|
||||||
|
assert_eq!(normalize_version("1.2.3beta1"), "1.2.3beta1");
|
||||||
|
assert_eq!(normalize_version("1.2.3alpha1"), "1.2.3alpha1");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -154,7 +154,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
|
||||||
"$builddir/buildhash",
|
"$builddir/buildhash",
|
||||||
// building on Windows requires python3.lib
|
// building on Windows requires python3.lib
|
||||||
if cfg!(windows) {
|
if cfg!(windows) {
|
||||||
inputs![":extract:python"]
|
inputs![":pyenv:bin"]
|
||||||
} else {
|
} else {
|
||||||
inputs![]
|
inputs![]
|
||||||
}
|
}
|
||||||
|
@ -169,7 +169,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
|
||||||
|
|
||||||
pub fn check_rust(build: &mut Build) -> Result<()> {
|
pub fn check_rust(build: &mut Build) -> Result<()> {
|
||||||
let inputs = inputs![
|
let inputs = inputs![
|
||||||
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,tools/workspace-hack/**}"),
|
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**}"),
|
||||||
"Cargo.lock",
|
"Cargo.lock",
|
||||||
"Cargo.toml",
|
"Cargo.toml",
|
||||||
"rust-toolchain.toml",
|
"rust-toolchain.toml",
|
||||||
|
@ -247,7 +247,7 @@ pub fn check_minilints(build: &mut Build) -> Result<()> {
|
||||||
let files = inputs![
|
let files = inputs![
|
||||||
glob![
|
glob![
|
||||||
"**/*.{py,rs,ts,svelte,mjs,md}",
|
"**/*.{py,rs,ts,svelte,mjs,md}",
|
||||||
"{node_modules,qt/bundle/PyOxidizer,ts/.svelte-kit}/**"
|
"{node_modules,ts/.svelte-kit}/**"
|
||||||
],
|
],
|
||||||
"Cargo.lock"
|
"Cargo.lock"
|
||||||
];
|
];
|
||||||
|
|
|
@ -16,5 +16,26 @@ globset.workspace = true
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
maplit.workspace = true
|
maplit.workspace = true
|
||||||
num_cpus.workspace = true
|
num_cpus.workspace = true
|
||||||
|
regex.workspace = true
|
||||||
|
serde_json.workspace = true
|
||||||
|
sha2.workspace = true
|
||||||
walkdir.workspace = true
|
walkdir.workspace = true
|
||||||
which.workspace = true
|
which.workspace = true
|
||||||
|
|
||||||
|
[target.'cfg(windows)'.dependencies]
|
||||||
|
reqwest = { workspace = true, features = ["blocking", "json", "native-tls"] }
|
||||||
|
|
||||||
|
[target.'cfg(not(windows))'.dependencies]
|
||||||
|
reqwest = { workspace = true, features = ["blocking", "json", "rustls-tls"] }
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "update_uv"
|
||||||
|
path = "src/bin/update_uv.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "update_protoc"
|
||||||
|
path = "src/bin/update_protoc.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "update_node"
|
||||||
|
path = "src/bin/update_node.rs"
|
||||||
|
|
|
@ -26,22 +26,21 @@ pub enum Platform {
|
||||||
MacX64,
|
MacX64,
|
||||||
MacArm,
|
MacArm,
|
||||||
WindowsX64,
|
WindowsX64,
|
||||||
|
WindowsArm,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Platform {
|
impl Platform {
|
||||||
pub fn current() -> Self {
|
pub fn current() -> Self {
|
||||||
if cfg!(windows) {
|
let os = std::env::consts::OS;
|
||||||
Self::WindowsX64
|
let arch = std::env::consts::ARCH;
|
||||||
} else {
|
match (os, arch) {
|
||||||
let os = std::env::consts::OS;
|
("linux", "x86_64") => Self::LinuxX64,
|
||||||
let arch = std::env::consts::ARCH;
|
("linux", "aarch64") => Self::LinuxArm,
|
||||||
match (os, arch) {
|
("macos", "x86_64") => Self::MacX64,
|
||||||
("linux", "x86_64") => Self::LinuxX64,
|
("macos", "aarch64") => Self::MacArm,
|
||||||
("linux", "aarch64") => Self::LinuxArm,
|
("windows", "x86_64") => Self::WindowsX64,
|
||||||
("macos", "x86_64") => Self::MacX64,
|
("windows", "aarch64") => Self::WindowsArm,
|
||||||
("macos", "aarch64") => Self::MacArm,
|
_ => panic!("unsupported os/arch {os} {arch} - PR welcome!"),
|
||||||
_ => panic!("unsupported os/arch {os} {arch} - PR welcome!"),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,6 +61,7 @@ impl Platform {
|
||||||
Platform::MacX64 => "x86_64-apple-darwin",
|
Platform::MacX64 => "x86_64-apple-darwin",
|
||||||
Platform::MacArm => "aarch64-apple-darwin",
|
Platform::MacArm => "aarch64-apple-darwin",
|
||||||
Platform::WindowsX64 => "x86_64-pc-windows-msvc",
|
Platform::WindowsX64 => "x86_64-pc-windows-msvc",
|
||||||
|
Platform::WindowsArm => "aarch64-pc-windows-msvc",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
268
build/ninja_gen/src/bin/update_node.rs
Normal file
268
build/ninja_gen/src/bin/update_node.rs
Normal file
|
@ -0,0 +1,268 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use std::error::Error;
|
||||||
|
use std::fs;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use regex::Regex;
|
||||||
|
use reqwest::blocking::Client;
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct NodeRelease {
|
||||||
|
version: String,
|
||||||
|
files: Vec<NodeFile>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct NodeFile {
|
||||||
|
filename: String,
|
||||||
|
url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
let release_info = fetch_node_release_info()?;
|
||||||
|
let new_text = generate_node_archive_function(&release_info)?;
|
||||||
|
update_node_text(&new_text)?;
|
||||||
|
println!("Node.js archive function updated successfully!");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_node_release_info() -> Result<NodeRelease, Box<dyn Error>> {
|
||||||
|
let client = Client::new();
|
||||||
|
|
||||||
|
// Get the Node.js release info
|
||||||
|
let response = client
|
||||||
|
.get("https://nodejs.org/dist/index.json")
|
||||||
|
.header("User-Agent", "anki-build-updater")
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let releases: Vec<Value> = response.json()?;
|
||||||
|
|
||||||
|
// Find the latest LTS release
|
||||||
|
let latest = releases
|
||||||
|
.iter()
|
||||||
|
.find(|release| {
|
||||||
|
// LTS releases have a non-false "lts" field
|
||||||
|
release["lts"].as_str().is_some() && release["lts"] != false
|
||||||
|
})
|
||||||
|
.ok_or("No LTS releases found")?;
|
||||||
|
|
||||||
|
let version = latest["version"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or("Version not found")?
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let files = latest["files"]
|
||||||
|
.as_array()
|
||||||
|
.ok_or("Files array not found")?
|
||||||
|
.iter()
|
||||||
|
.map(|f| f.as_str().unwrap_or(""))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let lts_name = latest["lts"].as_str().unwrap_or("unknown");
|
||||||
|
println!("Found Node.js LTS version: {version} ({lts_name})");
|
||||||
|
|
||||||
|
// Map platforms to their expected file keys and full filenames
|
||||||
|
let platform_mapping = vec![
|
||||||
|
(
|
||||||
|
"linux-x64",
|
||||||
|
"linux-x64",
|
||||||
|
format!("node-{version}-linux-x64.tar.xz"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"linux-arm64",
|
||||||
|
"linux-arm64",
|
||||||
|
format!("node-{version}-linux-arm64.tar.xz"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"darwin-x64",
|
||||||
|
"osx-x64-tar",
|
||||||
|
format!("node-{version}-darwin-x64.tar.xz"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"darwin-arm64",
|
||||||
|
"osx-arm64-tar",
|
||||||
|
format!("node-{version}-darwin-arm64.tar.xz"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"win-x64",
|
||||||
|
"win-x64-zip",
|
||||||
|
format!("node-{version}-win-x64.zip"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"win-arm64",
|
||||||
|
"win-arm64-zip",
|
||||||
|
format!("node-{version}-win-arm64.zip"),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let mut node_files = Vec::new();
|
||||||
|
|
||||||
|
for (platform, file_key, filename) in platform_mapping {
|
||||||
|
// Check if this file exists in the release
|
||||||
|
if files.contains(&file_key) {
|
||||||
|
let url = format!("https://nodejs.org/dist/{version}/{filename}");
|
||||||
|
node_files.push(NodeFile {
|
||||||
|
filename: filename.clone(),
|
||||||
|
url,
|
||||||
|
});
|
||||||
|
println!("Found file for {platform}: {filename} (key: {file_key})");
|
||||||
|
} else {
|
||||||
|
return Err(
|
||||||
|
format!("File not found for {platform} (key: {file_key}): {filename}").into(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(NodeRelease {
|
||||||
|
version,
|
||||||
|
files: node_files,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_node_archive_function(release: &NodeRelease) -> Result<String, Box<dyn Error>> {
|
||||||
|
let client = Client::new();
|
||||||
|
|
||||||
|
// Fetch the SHASUMS256.txt file once
|
||||||
|
println!("Fetching SHA256 checksums...");
|
||||||
|
let shasums_url = format!("https://nodejs.org/dist/{}/SHASUMS256.txt", release.version);
|
||||||
|
let shasums_response = client
|
||||||
|
.get(&shasums_url)
|
||||||
|
.header("User-Agent", "anki-build-updater")
|
||||||
|
.send()?;
|
||||||
|
let shasums_text = shasums_response.text()?;
|
||||||
|
|
||||||
|
// Create a mapping from filename patterns to platform names - using the exact
|
||||||
|
// patterns we stored in files
|
||||||
|
let platform_mapping = vec![
|
||||||
|
("linux-x64.tar.xz", "LinuxX64"),
|
||||||
|
("linux-arm64.tar.xz", "LinuxArm"),
|
||||||
|
("darwin-x64.tar.xz", "MacX64"),
|
||||||
|
("darwin-arm64.tar.xz", "MacArm"),
|
||||||
|
("win-x64.zip", "WindowsX64"),
|
||||||
|
("win-arm64.zip", "WindowsArm"),
|
||||||
|
];
|
||||||
|
|
||||||
|
let mut platform_blocks = Vec::new();
|
||||||
|
|
||||||
|
for (file_pattern, platform_name) in platform_mapping {
|
||||||
|
// Find the file that ends with this pattern
|
||||||
|
if let Some(file) = release
|
||||||
|
.files
|
||||||
|
.iter()
|
||||||
|
.find(|f| f.filename.ends_with(file_pattern))
|
||||||
|
{
|
||||||
|
// Find the SHA256 for this file
|
||||||
|
let sha256 = shasums_text
|
||||||
|
.lines()
|
||||||
|
.find(|line| line.contains(&file.filename))
|
||||||
|
.and_then(|line| line.split_whitespace().next())
|
||||||
|
.ok_or_else(|| format!("SHA256 not found for {}", file.filename))?;
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"Found SHA256 for {}: {} => {}",
|
||||||
|
platform_name, file.filename, sha256
|
||||||
|
);
|
||||||
|
|
||||||
|
let block = format!(
|
||||||
|
" Platform::{} => OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }},",
|
||||||
|
platform_name, file.url, sha256
|
||||||
|
);
|
||||||
|
platform_blocks.push(block);
|
||||||
|
} else {
|
||||||
|
return Err(format!(
|
||||||
|
"File not found for platform {platform_name}: no file ending with {file_pattern}"
|
||||||
|
)
|
||||||
|
.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let function = format!(
|
||||||
|
"pub fn node_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}",
|
||||||
|
platform_blocks.join("\n")
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(function)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_node_text(new_function: &str) -> Result<(), Box<dyn Error>> {
|
||||||
|
let node_rs_content = read_node_rs()?;
|
||||||
|
|
||||||
|
// Regex to match the entire node_archive function with proper multiline
|
||||||
|
// matching
|
||||||
|
let re = Regex::new(
|
||||||
|
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let updated_content = re.replace(&node_rs_content, new_function);
|
||||||
|
|
||||||
|
write_node_rs(&updated_content)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_node_rs() -> Result<String, Box<dyn Error>> {
|
||||||
|
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
|
||||||
|
let manifest_dir =
|
||||||
|
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
|
||||||
|
let path = Path::new(&manifest_dir).join("src").join("node.rs");
|
||||||
|
Ok(fs::read_to_string(path)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_node_rs(content: &str) -> Result<(), Box<dyn Error>> {
|
||||||
|
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
|
||||||
|
let manifest_dir =
|
||||||
|
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
|
||||||
|
let path = Path::new(&manifest_dir).join("src").join("node.rs");
|
||||||
|
fs::write(path, content)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_regex_replacement() {
|
||||||
|
let sample_content = r#"Some other code
|
||||||
|
pub fn node_archive(platform: Platform) -> OnlineArchive {
|
||||||
|
match platform {
|
||||||
|
Platform::LinuxX64 => OnlineArchive {
|
||||||
|
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz",
|
||||||
|
sha256: "old_hash",
|
||||||
|
},
|
||||||
|
Platform::MacX64 => OnlineArchive {
|
||||||
|
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz",
|
||||||
|
sha256: "old_hash",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
More code here"#;
|
||||||
|
|
||||||
|
let new_function = r#"pub fn node_archive(platform: Platform) -> OnlineArchive {
|
||||||
|
match platform {
|
||||||
|
Platform::LinuxX64 => OnlineArchive {
|
||||||
|
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-linux-x64.tar.xz",
|
||||||
|
sha256: "new_hash",
|
||||||
|
},
|
||||||
|
Platform::MacX64 => OnlineArchive {
|
||||||
|
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-darwin-x64.tar.xz",
|
||||||
|
sha256: "new_hash",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}"#;
|
||||||
|
|
||||||
|
let re = Regex::new(
|
||||||
|
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}"
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let result = re.replace(sample_content, new_function);
|
||||||
|
assert!(result.contains("v21.0.0"));
|
||||||
|
assert!(result.contains("new_hash"));
|
||||||
|
assert!(!result.contains("old_hash"));
|
||||||
|
assert!(result.contains("Some other code"));
|
||||||
|
assert!(result.contains("More code here"));
|
||||||
|
}
|
||||||
|
}
|
125
build/ninja_gen/src/bin/update_protoc.rs
Normal file
125
build/ninja_gen/src/bin/update_protoc.rs
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use std::error::Error;
|
||||||
|
use std::fs;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use regex::Regex;
|
||||||
|
use reqwest::blocking::Client;
|
||||||
|
use serde_json::Value;
|
||||||
|
use sha2::Digest;
|
||||||
|
use sha2::Sha256;
|
||||||
|
|
||||||
|
fn fetch_protoc_release_info() -> Result<String, Box<dyn Error>> {
|
||||||
|
let client = Client::new();
|
||||||
|
|
||||||
|
println!("Fetching latest protoc release info from GitHub...");
|
||||||
|
// Fetch latest release info
|
||||||
|
let response = client
|
||||||
|
.get("https://api.github.com/repos/protocolbuffers/protobuf/releases/latest")
|
||||||
|
.header("User-Agent", "Anki-Build-Script")
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let release_info: Value = response.json()?;
|
||||||
|
let assets = release_info["assets"]
|
||||||
|
.as_array()
|
||||||
|
.expect("assets should be an array");
|
||||||
|
|
||||||
|
// Map platform names to their corresponding asset patterns
|
||||||
|
let platform_patterns = [
|
||||||
|
("LinuxX64", "linux-x86_64"),
|
||||||
|
("LinuxArm", "linux-aarch_64"),
|
||||||
|
("MacX64", "osx-universal_binary"), // Mac uses universal binary for both
|
||||||
|
("MacArm", "osx-universal_binary"),
|
||||||
|
("WindowsX64", "win64"), // Windows uses x86 binary for both archs
|
||||||
|
("WindowsArm", "win64"),
|
||||||
|
];
|
||||||
|
|
||||||
|
let mut match_blocks = Vec::new();
|
||||||
|
|
||||||
|
for (platform, pattern) in platform_patterns {
|
||||||
|
// Find the asset matching the platform pattern
|
||||||
|
let asset = assets.iter().find(|asset| {
|
||||||
|
let name = asset["name"].as_str().unwrap_or("");
|
||||||
|
name.starts_with("protoc-") && name.contains(pattern) && name.ends_with(".zip")
|
||||||
|
});
|
||||||
|
|
||||||
|
if asset.is_none() {
|
||||||
|
eprintln!("No asset found for platform {platform} pattern {pattern}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let asset = asset.unwrap();
|
||||||
|
let download_url = asset["browser_download_url"].as_str().unwrap();
|
||||||
|
let asset_name = asset["name"].as_str().unwrap();
|
||||||
|
|
||||||
|
// Download the file and calculate SHA256 locally
|
||||||
|
println!("Downloading and checksumming {asset_name} for {platform}...");
|
||||||
|
let response = client
|
||||||
|
.get(download_url)
|
||||||
|
.header("User-Agent", "Anki-Build-Script")
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let bytes = response.bytes()?;
|
||||||
|
let mut hasher = Sha256::new();
|
||||||
|
hasher.update(&bytes);
|
||||||
|
let sha256 = format!("{:x}", hasher.finalize());
|
||||||
|
|
||||||
|
// Handle platform-specific match patterns
|
||||||
|
let match_pattern = match platform {
|
||||||
|
"MacX64" => "Platform::MacX64 | Platform::MacArm",
|
||||||
|
"MacArm" => continue, // Skip MacArm since it's handled with MacX64
|
||||||
|
"WindowsX64" => "Platform::WindowsX64 | Platform::WindowsArm",
|
||||||
|
"WindowsArm" => continue, // Skip WindowsArm since it's handled with WindowsX64
|
||||||
|
_ => &format!("Platform::{platform}"),
|
||||||
|
};
|
||||||
|
|
||||||
|
match_blocks.push(format!(
|
||||||
|
" {match_pattern} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(format!(
|
||||||
|
"pub fn protoc_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}",
|
||||||
|
match_blocks.join(",\n")
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_protobuf_rs() -> Result<String, Box<dyn Error>> {
|
||||||
|
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||||
|
let path = Path::new(&manifest_dir).join("src/protobuf.rs");
|
||||||
|
println!("Reading {}", path.display());
|
||||||
|
let content = fs::read_to_string(path)?;
|
||||||
|
Ok(content)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_protoc_text(old_text: &str, new_protoc_text: &str) -> Result<String, Box<dyn Error>> {
|
||||||
|
let re =
|
||||||
|
Regex::new(r"(?ms)^pub fn protoc_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\}")
|
||||||
|
.unwrap();
|
||||||
|
if !re.is_match(old_text) {
|
||||||
|
return Err("Could not find protoc_archive function block to replace".into());
|
||||||
|
}
|
||||||
|
let new_content = re.replace(old_text, new_protoc_text).to_string();
|
||||||
|
println!("Original lines: {}", old_text.lines().count());
|
||||||
|
println!("Updated lines: {}", new_content.lines().count());
|
||||||
|
Ok(new_content)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_protobuf_rs(content: &str) -> Result<(), Box<dyn Error>> {
|
||||||
|
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||||
|
let path = Path::new(&manifest_dir).join("src/protobuf.rs");
|
||||||
|
println!("Writing to {}", path.display());
|
||||||
|
fs::write(path, content)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
let new_protoc_archive = fetch_protoc_release_info()?;
|
||||||
|
let content = read_protobuf_rs()?;
|
||||||
|
let updated_content = update_protoc_text(&content, &new_protoc_archive)?;
|
||||||
|
write_protobuf_rs(&updated_content)?;
|
||||||
|
println!("Successfully updated protoc_archive function in protobuf.rs");
|
||||||
|
Ok(())
|
||||||
|
}
|
140
build/ninja_gen/src/bin/update_uv.rs
Normal file
140
build/ninja_gen/src/bin/update_uv.rs
Normal file
|
@ -0,0 +1,140 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use std::error::Error;
|
||||||
|
use std::fs;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use regex::Regex;
|
||||||
|
use reqwest::blocking::Client;
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
fn fetch_uv_release_info() -> Result<String, Box<dyn Error>> {
|
||||||
|
let client = Client::new();
|
||||||
|
|
||||||
|
println!("Fetching latest uv release info from GitHub...");
|
||||||
|
// Fetch latest release info
|
||||||
|
let response = client
|
||||||
|
.get("https://api.github.com/repos/astral-sh/uv/releases/latest")
|
||||||
|
.header("User-Agent", "Anki-Build-Script")
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
let release_info: Value = response.json()?;
|
||||||
|
let assets = release_info["assets"]
|
||||||
|
.as_array()
|
||||||
|
.expect("assets should be an array");
|
||||||
|
|
||||||
|
// Map platform names to their corresponding asset patterns
|
||||||
|
let platform_patterns = [
|
||||||
|
("LinuxX64", "x86_64-unknown-linux-gnu"),
|
||||||
|
("LinuxArm", "aarch64-unknown-linux-gnu"),
|
||||||
|
("MacX64", "x86_64-apple-darwin"),
|
||||||
|
("MacArm", "aarch64-apple-darwin"),
|
||||||
|
("WindowsX64", "x86_64-pc-windows-msvc"),
|
||||||
|
("WindowsArm", "aarch64-pc-windows-msvc"),
|
||||||
|
];
|
||||||
|
|
||||||
|
let mut match_blocks = Vec::new();
|
||||||
|
|
||||||
|
for (platform, pattern) in platform_patterns {
|
||||||
|
// Find the asset matching the platform pattern (the binary)
|
||||||
|
let asset = assets.iter().find(|asset| {
|
||||||
|
let name = asset["name"].as_str().unwrap_or("");
|
||||||
|
name.contains(pattern) && (name.ends_with(".tar.gz") || name.ends_with(".zip"))
|
||||||
|
});
|
||||||
|
if asset.is_none() {
|
||||||
|
eprintln!("No asset found for platform {platform} pattern {pattern}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let asset = asset.unwrap();
|
||||||
|
let download_url = asset["browser_download_url"].as_str().unwrap();
|
||||||
|
let asset_name = asset["name"].as_str().unwrap();
|
||||||
|
|
||||||
|
// Find the corresponding .sha256 or .sha256sum asset
|
||||||
|
let sha_asset = assets.iter().find(|a| {
|
||||||
|
let name = a["name"].as_str().unwrap_or("");
|
||||||
|
name == format!("{asset_name}.sha256") || name == format!("{asset_name}.sha256sum")
|
||||||
|
});
|
||||||
|
if sha_asset.is_none() {
|
||||||
|
eprintln!("No sha256 asset found for {asset_name}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let sha_asset = sha_asset.unwrap();
|
||||||
|
let sha_url = sha_asset["browser_download_url"].as_str().unwrap();
|
||||||
|
println!("Fetching SHA256 for {platform}...");
|
||||||
|
let sha_text = client
|
||||||
|
.get(sha_url)
|
||||||
|
.header("User-Agent", "Anki-Build-Script")
|
||||||
|
.send()?
|
||||||
|
.text()?;
|
||||||
|
// The sha file is usually of the form: "<sha256> <filename>"
|
||||||
|
let sha256 = sha_text.split_whitespace().next().unwrap_or("");
|
||||||
|
|
||||||
|
match_blocks.push(format!(
|
||||||
|
" Platform::{platform} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(format!(
|
||||||
|
"pub fn uv_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}",
|
||||||
|
match_blocks.join(",\n")
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_python_rs() -> Result<String, Box<dyn Error>> {
|
||||||
|
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||||
|
let path = Path::new(&manifest_dir).join("src/python.rs");
|
||||||
|
println!("Reading {}", path.display());
|
||||||
|
let content = fs::read_to_string(path)?;
|
||||||
|
Ok(content)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_uv_text(old_text: &str, new_uv_text: &str) -> Result<String, Box<dyn Error>> {
|
||||||
|
let re = Regex::new(r"(?ms)^pub fn uv_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}\s*\n\s*\}").unwrap();
|
||||||
|
if !re.is_match(old_text) {
|
||||||
|
return Err("Could not find uv_archive function block to replace".into());
|
||||||
|
}
|
||||||
|
let new_content = re.replace(old_text, new_uv_text).to_string();
|
||||||
|
println!("Original lines: {}", old_text.lines().count());
|
||||||
|
println!("Updated lines: {}", new_content.lines().count());
|
||||||
|
Ok(new_content)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_python_rs(content: &str) -> Result<(), Box<dyn Error>> {
|
||||||
|
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||||
|
let path = Path::new(&manifest_dir).join("src/python.rs");
|
||||||
|
println!("Writing to {}", path.display());
|
||||||
|
fs::write(path, content)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
let new_uv_archive = fetch_uv_release_info()?;
|
||||||
|
let content = read_python_rs()?;
|
||||||
|
let updated_content = update_uv_text(&content, &new_uv_archive)?;
|
||||||
|
write_python_rs(&updated_content)?;
|
||||||
|
println!("Successfully updated uv_archive function in python.rs");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_update_uv_text_with_actual_file() {
|
||||||
|
let content = fs::read_to_string("src/python.rs").unwrap();
|
||||||
|
let original_lines = content.lines().count();
|
||||||
|
|
||||||
|
const EXPECTED_LINES_REMOVED: usize = 38;
|
||||||
|
|
||||||
|
let updated = update_uv_text(&content, "").unwrap();
|
||||||
|
let updated_lines = updated.lines().count();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
updated_lines,
|
||||||
|
original_lines - EXPECTED_LINES_REMOVED,
|
||||||
|
"Expected line count to decrease by exactly {EXPECTED_LINES_REMOVED} lines (original: {original_lines}, updated: {updated_lines})"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -300,7 +300,7 @@ impl BuildStatement<'_> {
|
||||||
|
|
||||||
writeln!(buf, "build {outputs_str}: {action_name} {inputs_str}").unwrap();
|
writeln!(buf, "build {outputs_str}: {action_name} {inputs_str}").unwrap();
|
||||||
for (key, value) in self.variables.iter().sorted() {
|
for (key, value) in self.variables.iter().sorted() {
|
||||||
writeln!(buf, " {key} = {}", value).unwrap();
|
writeln!(buf, " {key} = {value}").unwrap();
|
||||||
}
|
}
|
||||||
writeln!(buf).unwrap();
|
writeln!(buf).unwrap();
|
||||||
|
|
||||||
|
@ -476,7 +476,7 @@ impl FilesHandle for BuildStatement<'_> {
|
||||||
let outputs = outputs.into_iter().map(|v| {
|
let outputs = outputs.into_iter().map(|v| {
|
||||||
let v = v.as_ref();
|
let v = v.as_ref();
|
||||||
let v = if !v.starts_with("$builddir/") && !v.starts_with("$builddir\\") {
|
let v = if !v.starts_with("$builddir/") && !v.starts_with("$builddir\\") {
|
||||||
format!("$builddir/{}", v)
|
format!("$builddir/{v}")
|
||||||
} else {
|
} else {
|
||||||
v.to_owned()
|
v.to_owned()
|
||||||
};
|
};
|
||||||
|
|
|
@ -162,7 +162,7 @@ impl BuildAction for CargoTest {
|
||||||
"cargo-nextest",
|
"cargo-nextest",
|
||||||
CargoInstall {
|
CargoInstall {
|
||||||
binary_name: "cargo-nextest",
|
binary_name: "cargo-nextest",
|
||||||
args: "cargo-nextest --version 0.9.57 --locked",
|
args: "cargo-nextest --version 0.9.99 --locked --no-default-features --features default-no-update",
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
setup_flags(build)
|
setup_flags(build)
|
||||||
|
|
|
@ -19,24 +19,28 @@ use crate::input::BuildInput;
|
||||||
pub fn node_archive(platform: Platform) -> OnlineArchive {
|
pub fn node_archive(platform: Platform) -> OnlineArchive {
|
||||||
match platform {
|
match platform {
|
||||||
Platform::LinuxX64 => OnlineArchive {
|
Platform::LinuxX64 => OnlineArchive {
|
||||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz",
|
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-x64.tar.xz",
|
||||||
sha256: "822780369d0ea309e7d218e41debbd1a03f8cdf354ebf8a4420e89f39cc2e612",
|
sha256: "325c0f1261e0c61bcae369a1274028e9cfb7ab7949c05512c5b1e630f7e80e12",
|
||||||
},
|
},
|
||||||
Platform::LinuxArm => OnlineArchive {
|
Platform::LinuxArm => OnlineArchive {
|
||||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-arm64.tar.xz",
|
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-arm64.tar.xz",
|
||||||
sha256: "f6df68c6793244071f69023a9b43a0cf0b13d65cbe86d55925c28e4134d9aafb",
|
sha256: "140aee84be6774f5fb3f404be72adbe8420b523f824de82daeb5ab218dab7b18",
|
||||||
},
|
},
|
||||||
Platform::MacX64 => OnlineArchive {
|
Platform::MacX64 => OnlineArchive {
|
||||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz",
|
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-x64.tar.xz",
|
||||||
sha256: "d4b4ab81ebf1f7aab09714f834992f27270ad0079600da00c8110f8950ca6c5a",
|
sha256: "f79de1f64df4ac68493a344bb5ab7d289d0275271e87b543d1278392c9de778a",
|
||||||
},
|
},
|
||||||
Platform::MacArm => OnlineArchive {
|
Platform::MacArm => OnlineArchive {
|
||||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-arm64.tar.xz",
|
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-arm64.tar.xz",
|
||||||
sha256: "f18a7438723d48417f5e9be211a2f3c0520ffbf8e02703469e5153137ca0f328",
|
sha256: "cc9cc294eaf782dd93c8c51f460da610cc35753c6a9947411731524d16e97914",
|
||||||
},
|
},
|
||||||
Platform::WindowsX64 => OnlineArchive {
|
Platform::WindowsX64 => OnlineArchive {
|
||||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-x64.zip",
|
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-x64.zip",
|
||||||
sha256: "893115cd92ad27bf178802f15247115e93c0ef0c753b93dca96439240d64feb5",
|
sha256: "721ab118a3aac8584348b132767eadf51379e0616f0db802cc1e66d7f0d98f85",
|
||||||
|
},
|
||||||
|
Platform::WindowsArm => OnlineArchive {
|
||||||
|
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-arm64.zip",
|
||||||
|
sha256: "78355dc9ca117bb71d3f081e4b1b281855e2b134f3939bb0ca314f7567b0e621",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,26 +21,26 @@ pub fn protoc_archive(platform: Platform) -> OnlineArchive {
|
||||||
match platform {
|
match platform {
|
||||||
Platform::LinuxX64 => {
|
Platform::LinuxX64 => {
|
||||||
OnlineArchive {
|
OnlineArchive {
|
||||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-linux-x86_64.zip",
|
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-linux-x86_64.zip",
|
||||||
sha256: "f90d0dd59065fef94374745627336d622702b67f0319f96cee894d41a974d47a",
|
sha256: "96553041f1a91ea0efee963cb16f462f5985b4d65365f3907414c360044d8065",
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
Platform::LinuxArm => {
|
Platform::LinuxArm => {
|
||||||
OnlineArchive {
|
OnlineArchive {
|
||||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-linux-aarch_64.zip",
|
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-linux-aarch_64.zip",
|
||||||
sha256: "f3d8eb5839d6186392d8c7b54fbeabbb6fcdd90618a500b77cb2e24faa245cad",
|
sha256: "6c554de11cea04c56ebf8e45b54434019b1cd85223d4bbd25c282425e306ecc2",
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
Platform::MacX64 | Platform::MacArm => {
|
Platform::MacX64 | Platform::MacArm => {
|
||||||
OnlineArchive {
|
OnlineArchive {
|
||||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-osx-universal_binary.zip",
|
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-osx-universal_binary.zip",
|
||||||
sha256: "e3324d3bc2e9bc967a0bec2472e0ec73b26f952c7c87f2403197414f780c3c6c",
|
sha256: "99ea004549c139f46da5638187a85bbe422d78939be0fa01af1aa8ab672e395f",
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
Platform::WindowsX64 => {
|
Platform::WindowsX64 | Platform::WindowsArm => {
|
||||||
OnlineArchive {
|
OnlineArchive {
|
||||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-win64.zip",
|
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-win64.zip",
|
||||||
sha256: "3657053024faa439ff5f8c1dd2ee06bac0f9b9a3d660e99944f015a7451e87ec",
|
sha256: "70381b116ab0d71cb6a5177d9b17c7c13415866603a0fd40d513dafe32d56c35",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -67,7 +67,7 @@ fn clang_format_archive(platform: Platform) -> OnlineArchive {
|
||||||
sha256: "238be68d9478163a945754f06a213483473044f5a004c4125d3d9d8d3556466e",
|
sha256: "238be68d9478163a945754f06a213483473044f5a004c4125d3d9d8d3556466e",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Platform::WindowsX64 => {
|
Platform::WindowsX64 | Platform::WindowsArm=> {
|
||||||
OnlineArchive {
|
OnlineArchive {
|
||||||
url: "https://github.com/ankitects/clang-format-binaries/releases/download/anki-2021-01-09/clang-format_windows_x86_64.zip",
|
url: "https://github.com/ankitects/clang-format-binaries/releases/download/anki-2021-01-09/clang-format_windows_x86_64.zip",
|
||||||
sha256: "7d9f6915e3f0fb72407830f0fc37141308d2e6915daba72987a52f309fbeaccc",
|
sha256: "7d9f6915e3f0fb72407830f0fc37141308d2e6915daba72987a52f309fbeaccc",
|
||||||
|
|
|
@ -9,6 +9,7 @@ use maplit::hashmap;
|
||||||
|
|
||||||
use crate::action::BuildAction;
|
use crate::action::BuildAction;
|
||||||
use crate::archives::download_and_extract;
|
use crate::archives::download_and_extract;
|
||||||
|
use crate::archives::with_exe;
|
||||||
use crate::archives::OnlineArchive;
|
use crate::archives::OnlineArchive;
|
||||||
use crate::archives::Platform;
|
use crate::archives::Platform;
|
||||||
use crate::hash::simple_hash;
|
use crate::hash::simple_hash;
|
||||||
|
@ -16,82 +17,113 @@ use crate::input::BuildInput;
|
||||||
use crate::inputs;
|
use crate::inputs;
|
||||||
use crate::Build;
|
use crate::Build;
|
||||||
|
|
||||||
/// When updating this, pyoxidizer.bzl needs updating too, but it uses different
|
// To update, run 'cargo run --bin update_uv'.
|
||||||
/// files.
|
// You'll need to do this when bumping Python versions, as uv bakes in
|
||||||
pub fn python_archive(platform: Platform) -> OnlineArchive {
|
// the latest known version.
|
||||||
|
// When updating Python version, make sure to update version tag in BuildWheel
|
||||||
|
// too.
|
||||||
|
pub fn uv_archive(platform: Platform) -> OnlineArchive {
|
||||||
match platform {
|
match platform {
|
||||||
Platform::LinuxX64 => {
|
Platform::LinuxX64 => {
|
||||||
OnlineArchive {
|
OnlineArchive {
|
||||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64_v2-unknown-linux-gnu-install_only.tar.gz",
|
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-unknown-linux-gnu.tar.gz",
|
||||||
sha256: "9426bca501ae0a257392b10719e2e20ff5fa5e22a3ce4599d6ad0b3139f86417",
|
sha256: "909278eb197c5ed0e9b5f16317d1255270d1f9ea4196e7179ce934d48c4c2545",
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
Platform::LinuxArm => {
|
Platform::LinuxArm => {
|
||||||
OnlineArchive {
|
OnlineArchive {
|
||||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-aarch64-unknown-linux-gnu-install_only.tar.gz",
|
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-unknown-linux-gnu.tar.gz",
|
||||||
sha256: "7d19e1ecd6e582423f7c74a0c67491eaa982ce9d5c5f35f0e4289f83127abcb8",
|
sha256: "0b2ad9fe4295881615295add8cc5daa02549d29cc9a61f0578e397efcf12f08f",
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
Platform::MacX64 => {
|
Platform::MacX64 => {
|
||||||
OnlineArchive {
|
OnlineArchive {
|
||||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64-apple-darwin-install_only.tar.gz",
|
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-apple-darwin.tar.gz",
|
||||||
sha256: "5a0bf895a5cb08d6d008140abb41bb2c8cd638a665273f7d8eb258bc89de439b",
|
sha256: "d785753ac092e25316180626aa691c5dfe1fb075290457ba4fdb72c7c5661321",
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
Platform::MacArm => {
|
Platform::MacArm => {
|
||||||
OnlineArchive {
|
OnlineArchive {
|
||||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-aarch64-apple-darwin-install_only.tar.gz",
|
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-apple-darwin.tar.gz",
|
||||||
sha256: "bf0cd90204a2cc6da48cae1e4b32f48c9f7031fbe1238c5972104ccb0155d368",
|
sha256: "721f532b73171586574298d4311a91d5ea2c802ef4db3ebafc434239330090c6",
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
Platform::WindowsX64 => {
|
Platform::WindowsX64 => {
|
||||||
OnlineArchive {
|
OnlineArchive {
|
||||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64-pc-windows-msvc-shared-install_only.tar.gz",
|
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-pc-windows-msvc.zip",
|
||||||
sha256: "8f0544cd593984f7ecb90c685931249c579302124b9821064873f3a14ed07005",
|
sha256: "e199b10bef1a7cc540014483e7f60f825a174988f41020e9d2a6b01bd60f0669",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Platform::WindowsArm => {
|
||||||
|
OnlineArchive {
|
||||||
|
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-pc-windows-msvc.zip",
|
||||||
|
sha256: "bb40708ad549ad6a12209cb139dd751bf0ede41deb679ce7513ce197bd9ef234",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the Python binary, which can be used to create venvs.
|
pub fn setup_uv(build: &mut Build, platform: Platform) -> Result<()> {
|
||||||
/// Downloads if missing.
|
let uv_binary = match env::var("UV_BINARY") {
|
||||||
pub fn setup_python(build: &mut Build) -> Result<()> {
|
|
||||||
// if changing this, make sure you remove out/pyenv
|
|
||||||
let python_binary = match env::var("PYTHON_BINARY") {
|
|
||||||
Ok(path) => {
|
Ok(path) => {
|
||||||
assert!(
|
assert!(
|
||||||
Utf8Path::new(&path).is_absolute(),
|
Utf8Path::new(&path).is_absolute(),
|
||||||
"PYTHON_BINARY must be absolute"
|
"UV_BINARY must be absolute"
|
||||||
);
|
);
|
||||||
path.into()
|
path.into()
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
download_and_extract(
|
download_and_extract(
|
||||||
build,
|
build,
|
||||||
"python",
|
"uv",
|
||||||
python_archive(build.host_platform),
|
uv_archive(platform),
|
||||||
hashmap! { "bin" => [
|
hashmap! { "bin" => [
|
||||||
if cfg!(windows) { "python.exe" } else { "bin/python3"}
|
with_exe("uv")
|
||||||
] },
|
] },
|
||||||
)?;
|
)?;
|
||||||
inputs![":extract:python:bin"]
|
inputs![":extract:uv:bin"]
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
build.add_dependency("python_binary", python_binary);
|
build.add_dependency("uv_binary", uv_binary);
|
||||||
|
|
||||||
|
// Our macOS packaging needs access to the x86 binary on ARM.
|
||||||
|
if cfg!(target_arch = "aarch64") {
|
||||||
|
download_and_extract(
|
||||||
|
build,
|
||||||
|
"uv_mac_x86",
|
||||||
|
uv_archive(Platform::MacX64),
|
||||||
|
hashmap! { "bin" => [
|
||||||
|
with_exe("uv")
|
||||||
|
] },
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
// Our Linux packaging needs access to the ARM binary on x86
|
||||||
|
if cfg!(target_arch = "x86_64") {
|
||||||
|
download_and_extract(
|
||||||
|
build,
|
||||||
|
"uv_lin_arm",
|
||||||
|
uv_archive(Platform::LinuxArm),
|
||||||
|
hashmap! { "bin" => [
|
||||||
|
with_exe("uv")
|
||||||
|
] },
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct PythonEnvironment {
|
pub struct PythonEnvironment {
|
||||||
pub folder: &'static str,
|
pub deps: BuildInput,
|
||||||
pub base_requirements_txt: BuildInput,
|
// todo: rename
|
||||||
pub requirements_txt: BuildInput,
|
pub venv_folder: &'static str,
|
||||||
|
pub extra_args: &'static str,
|
||||||
pub extra_binary_exports: &'static [&'static str],
|
pub extra_binary_exports: &'static [&'static str],
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuildAction for PythonEnvironment {
|
impl BuildAction for PythonEnvironment {
|
||||||
fn command(&self) -> &str {
|
fn command(&self) -> &str {
|
||||||
if env::var("OFFLINE_BUILD").is_err() {
|
if env::var("OFFLINE_BUILD").is_err() {
|
||||||
"$runner pyenv $python_binary $builddir/$pyenv_folder $system_pkgs $base_requirements $requirements"
|
"$runner pyenv $uv_binary $builddir/$pyenv_folder -- $extra_args"
|
||||||
} else {
|
} else {
|
||||||
"echo 'OFFLINE_BUILD is set. Using the existing PythonEnvironment.'"
|
"echo 'OFFLINE_BUILD is set. Using the existing PythonEnvironment.'"
|
||||||
}
|
}
|
||||||
|
@ -99,7 +131,7 @@ impl BuildAction for PythonEnvironment {
|
||||||
|
|
||||||
fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
|
fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
|
||||||
let bin_path = |binary: &str| -> Vec<String> {
|
let bin_path = |binary: &str| -> Vec<String> {
|
||||||
let folder = self.folder;
|
let folder = self.venv_folder;
|
||||||
let path = if cfg!(windows) {
|
let path = if cfg!(windows) {
|
||||||
format!("{folder}/scripts/{binary}.exe")
|
format!("{folder}/scripts/{binary}.exe")
|
||||||
} else {
|
} else {
|
||||||
|
@ -108,17 +140,24 @@ impl BuildAction for PythonEnvironment {
|
||||||
vec![path]
|
vec![path]
|
||||||
};
|
};
|
||||||
|
|
||||||
|
build.add_inputs("", &self.deps);
|
||||||
|
build.add_variable("pyenv_folder", self.venv_folder);
|
||||||
if env::var("OFFLINE_BUILD").is_err() {
|
if env::var("OFFLINE_BUILD").is_err() {
|
||||||
build.add_inputs("python_binary", inputs![":python_binary"]);
|
build.add_inputs("uv_binary", inputs![":uv_binary"]);
|
||||||
build.add_variable("pyenv_folder", self.folder);
|
|
||||||
build.add_inputs("base_requirements", &self.base_requirements_txt);
|
// Add --python flag to extra_args if PYTHON_BINARY is set
|
||||||
build.add_inputs("requirements", &self.requirements_txt);
|
let mut args = self.extra_args.to_string();
|
||||||
build.add_outputs_ext("pip", bin_path("pip"), true);
|
if let Ok(python_binary) = env::var("PYTHON_BINARY") {
|
||||||
|
args = format!("--python {python_binary} {args}");
|
||||||
|
}
|
||||||
|
build.add_variable("extra_args", args);
|
||||||
}
|
}
|
||||||
|
|
||||||
build.add_outputs_ext("bin", bin_path("python"), true);
|
build.add_outputs_ext("bin", bin_path("python"), true);
|
||||||
for binary in self.extra_binary_exports {
|
for binary in self.extra_binary_exports {
|
||||||
build.add_outputs_ext(*binary, bin_path(binary), true);
|
build.add_outputs_ext(*binary, bin_path(binary), true);
|
||||||
}
|
}
|
||||||
|
build.add_output_stamp(format!("{}/.stamp", self.venv_folder));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_output_timestamps(&self) -> bool {
|
fn check_output_timestamps(&self) -> bool {
|
||||||
|
@ -154,31 +193,19 @@ impl BuildAction for PythonTypecheck {
|
||||||
struct PythonFormat<'a> {
|
struct PythonFormat<'a> {
|
||||||
pub inputs: &'a BuildInput,
|
pub inputs: &'a BuildInput,
|
||||||
pub check_only: bool,
|
pub check_only: bool,
|
||||||
pub isort_ini: &'a BuildInput,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuildAction for PythonFormat<'_> {
|
impl BuildAction for PythonFormat<'_> {
|
||||||
fn command(&self) -> &str {
|
fn command(&self) -> &str {
|
||||||
"$black -t py39 -q $check --color $in && $
|
"$ruff format $mode $in && $ruff check --select I --fix $in"
|
||||||
$isort --color --settings-path $isort_ini $check $in"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
|
fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
|
||||||
build.add_inputs("in", self.inputs);
|
build.add_inputs("in", self.inputs);
|
||||||
build.add_inputs("black", inputs![":pyenv:black"]);
|
build.add_inputs("ruff", inputs![":pyenv:ruff"]);
|
||||||
build.add_inputs("isort", inputs![":pyenv:isort"]);
|
|
||||||
|
|
||||||
let hash = simple_hash(self.inputs);
|
let hash = simple_hash(self.inputs);
|
||||||
build.add_env_var("BLACK_CACHE_DIR", "out/python/black.cache.{hash}");
|
build.add_variable("mode", if self.check_only { "--check" } else { "" });
|
||||||
build.add_inputs("isort_ini", self.isort_ini);
|
|
||||||
build.add_variable(
|
|
||||||
"check",
|
|
||||||
if self.check_only {
|
|
||||||
"--diff --check"
|
|
||||||
} else {
|
|
||||||
""
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
build.add_output_stamp(format!(
|
build.add_output_stamp(format!(
|
||||||
"tests/python_format.{}.{hash}",
|
"tests/python_format.{}.{hash}",
|
||||||
|
@ -188,13 +215,11 @@ impl BuildAction for PythonFormat<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Result<()> {
|
pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Result<()> {
|
||||||
let isort_ini = &inputs![".isort.cfg"];
|
|
||||||
build.add_action(
|
build.add_action(
|
||||||
format!("check:format:python:{group}"),
|
format!("check:format:python:{group}"),
|
||||||
PythonFormat {
|
PythonFormat {
|
||||||
inputs: &inputs,
|
inputs: &inputs,
|
||||||
check_only: true,
|
check_only: true,
|
||||||
isort_ini,
|
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
@ -203,34 +228,39 @@ pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Resu
|
||||||
PythonFormat {
|
PythonFormat {
|
||||||
inputs: &inputs,
|
inputs: &inputs,
|
||||||
check_only: false,
|
check_only: false,
|
||||||
isort_ini,
|
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct PythonLint {
|
pub struct RuffCheck {
|
||||||
pub folders: &'static [&'static str],
|
pub folders: &'static [&'static str],
|
||||||
pub pylint_ini: BuildInput,
|
|
||||||
pub deps: BuildInput,
|
pub deps: BuildInput,
|
||||||
|
pub check_only: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuildAction for PythonLint {
|
impl BuildAction for RuffCheck {
|
||||||
fn command(&self) -> &str {
|
fn command(&self) -> &str {
|
||||||
"$pylint --rcfile $pylint_ini -sn -j $cpus $folders"
|
"$ruff check $folders $mode"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
|
fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
|
||||||
build.add_inputs("", &self.deps);
|
build.add_inputs("", &self.deps);
|
||||||
build.add_inputs("pylint", inputs![":pyenv:pylint"]);
|
build.add_inputs("", inputs![".ruff.toml"]);
|
||||||
build.add_inputs("pylint_ini", &self.pylint_ini);
|
build.add_inputs("ruff", inputs![":pyenv:ruff"]);
|
||||||
build.add_variable("folders", self.folders.join(" "));
|
build.add_variable("folders", self.folders.join(" "));
|
||||||
// On a 16 core system, values above 10 do not improve wall clock time,
|
build.add_variable(
|
||||||
// but waste extra cores that could be working on other tests.
|
"mode",
|
||||||
build.add_variable("cpus", num_cpus::get().min(10).to_string());
|
if self.check_only {
|
||||||
|
""
|
||||||
|
} else {
|
||||||
|
"--fix --unsafe-fixes"
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
let hash = simple_hash(&self.deps);
|
let hash = simple_hash(&self.deps);
|
||||||
build.add_output_stamp(format!("tests/python_lint.{hash}"));
|
let kind = if self.check_only { "check" } else { "fix" };
|
||||||
|
build.add_output_stamp(format!("tests/python_ruff.{kind}.{hash}"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,12 +30,12 @@ impl Build {
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
for (key, value) in &self.variables {
|
for (key, value) in &self.variables {
|
||||||
writeln!(&mut buf, "{} = {}", key, value).unwrap();
|
writeln!(&mut buf, "{key} = {value}").unwrap();
|
||||||
}
|
}
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
|
|
||||||
for (key, value) in &self.pools {
|
for (key, value) in &self.pools {
|
||||||
writeln!(&mut buf, "pool {}\n depth = {}", key, value).unwrap();
|
writeln!(&mut buf, "pool {key}\n depth = {value}").unwrap();
|
||||||
}
|
}
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,6 @@ camino.workspace = true
|
||||||
clap.workspace = true
|
clap.workspace = true
|
||||||
flate2.workspace = true
|
flate2.workspace = true
|
||||||
junction.workspace = true
|
junction.workspace = true
|
||||||
reqwest = { workspace = true, features = ["rustls-tls", "rustls-tls-native-roots"] }
|
|
||||||
sha2.workspace = true
|
sha2.workspace = true
|
||||||
tar.workspace = true
|
tar.workspace = true
|
||||||
termcolor.workspace = true
|
termcolor.workspace = true
|
||||||
|
@ -24,3 +23,9 @@ which.workspace = true
|
||||||
xz2.workspace = true
|
xz2.workspace = true
|
||||||
zip.workspace = true
|
zip.workspace = true
|
||||||
zstd.workspace = true
|
zstd.workspace = true
|
||||||
|
|
||||||
|
[target.'cfg(windows)'.dependencies]
|
||||||
|
reqwest = { workspace = true, features = ["native-tls"] }
|
||||||
|
|
||||||
|
[target.'cfg(not(windows))'.dependencies]
|
||||||
|
reqwest = { workspace = true, features = ["rustls-tls", "rustls-tls-native-roots"] }
|
||||||
|
|
|
@ -65,7 +65,7 @@ fn sha2_data(data: &[u8]) -> String {
|
||||||
let mut digest = sha2::Sha256::new();
|
let mut digest = sha2::Sha256::new();
|
||||||
digest.update(data);
|
digest.update(data);
|
||||||
let result = digest.finalize();
|
let result = digest.finalize();
|
||||||
format!("{:x}", result)
|
format!("{result:x}")
|
||||||
}
|
}
|
||||||
|
|
||||||
enum CompressionKind {
|
enum CompressionKind {
|
||||||
|
|
|
@ -67,7 +67,10 @@ pub fn run_build(args: BuildArgs) {
|
||||||
"MYPY_CACHE_DIR",
|
"MYPY_CACHE_DIR",
|
||||||
build_root.join("tests").join("mypy").into_string(),
|
build_root.join("tests").join("mypy").into_string(),
|
||||||
)
|
)
|
||||||
.env("PYTHONPYCACHEPREFIX", build_root.join("pycache"))
|
.env(
|
||||||
|
"PYTHONPYCACHEPREFIX",
|
||||||
|
std::path::absolute(build_root.join("pycache")).unwrap(),
|
||||||
|
)
|
||||||
// commands will not show colors by default, as we do not provide a tty
|
// commands will not show colors by default, as we do not provide a tty
|
||||||
.env("FORCE_COLOR", "1")
|
.env("FORCE_COLOR", "1")
|
||||||
.env("MYPY_FORCE_COLOR", "1")
|
.env("MYPY_FORCE_COLOR", "1")
|
||||||
|
@ -135,7 +138,7 @@ fn setup_build_root() -> Utf8PathBuf {
|
||||||
true
|
true
|
||||||
};
|
};
|
||||||
if create {
|
if create {
|
||||||
println!("Switching build root to {}", new_target);
|
println!("Switching build root to {new_target}");
|
||||||
std::os::unix::fs::symlink(new_target, build_root).unwrap();
|
std::os::unix::fs::symlink(new_target, build_root).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,62 +0,0 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
|
|
||||||
use std::env;
|
|
||||||
use std::fs;
|
|
||||||
use std::process::Command;
|
|
||||||
|
|
||||||
use camino::Utf8PathBuf;
|
|
||||||
use clap::Args;
|
|
||||||
|
|
||||||
use crate::run::run_command;
|
|
||||||
|
|
||||||
#[derive(Args, Debug)]
|
|
||||||
pub struct BuildArtifactsArgs {
|
|
||||||
bundle_root: Utf8PathBuf,
|
|
||||||
pyoxidizer_bin: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build_artifacts(args: BuildArtifactsArgs) {
|
|
||||||
// build.rs doesn't declare inputs from venv, so we need to force a rebuild to
|
|
||||||
// ensure changes to our libs/the venv get included
|
|
||||||
let artifacts = args.bundle_root.join("artifacts");
|
|
||||||
if artifacts.exists() {
|
|
||||||
fs::remove_dir_all(&artifacts).unwrap();
|
|
||||||
}
|
|
||||||
let bundle_root = args.bundle_root.canonicalize_utf8().unwrap();
|
|
||||||
let build_folder = bundle_root.join("build");
|
|
||||||
if build_folder.exists() {
|
|
||||||
fs::remove_dir_all(&build_folder).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
run_command(
|
|
||||||
Command::new(&args.pyoxidizer_bin)
|
|
||||||
.args([
|
|
||||||
"--system-rust",
|
|
||||||
"run-build-script",
|
|
||||||
"qt/bundle/build.rs",
|
|
||||||
"--var",
|
|
||||||
"venv",
|
|
||||||
"out/bundle/pyenv",
|
|
||||||
"--var",
|
|
||||||
"build",
|
|
||||||
build_folder.as_str(),
|
|
||||||
])
|
|
||||||
.env("CARGO_MANIFEST_DIR", "qt/bundle")
|
|
||||||
.env("CARGO_TARGET_DIR", "out/bundle/rust")
|
|
||||||
.env("PROFILE", "release")
|
|
||||||
.env("OUT_DIR", &artifacts)
|
|
||||||
.env("TARGET", env!("TARGET"))
|
|
||||||
.env("SDKROOT", "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk")
|
|
||||||
.env("MACOSX_DEPLOYMENT_TARGET", macos_deployment_target())
|
|
||||||
.env("CARGO_BUILD_TARGET", env!("TARGET")),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn macos_deployment_target() -> &'static str {
|
|
||||||
if env!("TARGET") == "x86_64-apple-darwin" {
|
|
||||||
"10.13.4"
|
|
||||||
} else {
|
|
||||||
"11"
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,53 +0,0 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
|
|
||||||
use std::process::Command;
|
|
||||||
|
|
||||||
use anki_process::CommandExt;
|
|
||||||
use camino::Utf8Path;
|
|
||||||
use camino::Utf8PathBuf;
|
|
||||||
|
|
||||||
use super::artifacts::macos_deployment_target;
|
|
||||||
use crate::run::run_command;
|
|
||||||
|
|
||||||
pub fn build_bundle_binary() {
|
|
||||||
let mut features = String::from("build-mode-prebuilt-artifacts");
|
|
||||||
if cfg!(target_os = "linux") || cfg!(target_os = "macos") {
|
|
||||||
features.push_str(",global-allocator-jemalloc,allocator-jemalloc");
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut command = Command::new("cargo");
|
|
||||||
command
|
|
||||||
.args([
|
|
||||||
"build",
|
|
||||||
"--manifest-path=qt/bundle/Cargo.toml",
|
|
||||||
"--target-dir=out/bundle/rust",
|
|
||||||
"--release",
|
|
||||||
"--no-default-features",
|
|
||||||
])
|
|
||||||
.arg(format!("--features={features}"))
|
|
||||||
.env(
|
|
||||||
"DEFAULT_PYTHON_CONFIG_RS",
|
|
||||||
// included in main.rs, so relative to qt/bundle/src
|
|
||||||
"../../../out/bundle/artifacts/",
|
|
||||||
)
|
|
||||||
.env(
|
|
||||||
"PYO3_CONFIG_FILE",
|
|
||||||
Utf8Path::new("out/bundle/artifacts/pyo3-build-config-file.txt")
|
|
||||||
.canonicalize_utf8()
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.env("MACOSX_DEPLOYMENT_TARGET", macos_deployment_target())
|
|
||||||
.env("SDKROOT", "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk")
|
|
||||||
.env("CARGO_BUILD_TARGET", env!("TARGET"));
|
|
||||||
if env!("TARGET") == "x86_64-apple-darwin" {
|
|
||||||
let xcode_path = Command::run_with_output(["xcode-select", "-p"]).unwrap();
|
|
||||||
let ld_classic = Utf8PathBuf::from(xcode_path.stdout.trim())
|
|
||||||
.join("Toolchains/XcodeDefault.xctoolchain/usr/bin/ld-classic");
|
|
||||||
if ld_classic.exists() {
|
|
||||||
// work around XCode 15's default linker not supporting macOS 10.15-12.
|
|
||||||
command.env("RUSTFLAGS", format!("-Clink-arg=-fuse-ld={ld_classic}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
run_command(&mut command);
|
|
||||||
}
|
|
|
@ -1,156 +0,0 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
|
|
||||||
use std::env;
|
|
||||||
use std::fs;
|
|
||||||
use std::process::Command;
|
|
||||||
|
|
||||||
use camino::Utf8Path;
|
|
||||||
use camino::Utf8PathBuf;
|
|
||||||
use clap::Args;
|
|
||||||
use clap::ValueEnum;
|
|
||||||
|
|
||||||
use crate::paths::absolute_msys_path;
|
|
||||||
use crate::paths::unix_path;
|
|
||||||
use crate::run::run_command;
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, ValueEnum, Debug)]
|
|
||||||
enum DistKind {
|
|
||||||
Standard,
|
|
||||||
Alternate,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Args, Debug)]
|
|
||||||
pub struct BuildDistFolderArgs {
|
|
||||||
kind: DistKind,
|
|
||||||
folder_root: Utf8PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build_dist_folder(args: BuildDistFolderArgs) {
|
|
||||||
let BuildDistFolderArgs { kind, folder_root } = args;
|
|
||||||
fs::create_dir_all(&folder_root).unwrap();
|
|
||||||
// Start with Qt, as it's the largest, and we use --delete to ensure there are
|
|
||||||
// no stale files in lib/. Skipped on macOS as Qt is handled later.
|
|
||||||
if !cfg!(target_os = "macos") {
|
|
||||||
copy_qt_from_venv(kind, &folder_root);
|
|
||||||
}
|
|
||||||
clean_top_level_files(&folder_root);
|
|
||||||
copy_binary_and_pylibs(&folder_root);
|
|
||||||
if cfg!(target_os = "linux") {
|
|
||||||
copy_linux_extras(kind, &folder_root);
|
|
||||||
} else if cfg!(windows) {
|
|
||||||
copy_windows_extras(&folder_root);
|
|
||||||
}
|
|
||||||
fs::write(folder_root.with_extension("stamp"), b"").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn copy_qt_from_venv(kind: DistKind, folder_root: &Utf8Path) {
|
|
||||||
let python39 = if cfg!(windows) { "" } else { "python3.9/" };
|
|
||||||
let qt_root = match kind {
|
|
||||||
DistKind::Standard => {
|
|
||||||
folder_root.join(format!("../pyenv/lib/{python39}site-packages/PyQt6"))
|
|
||||||
}
|
|
||||||
DistKind::Alternate => {
|
|
||||||
folder_root.join(format!("../pyenv-qt5/lib/{python39}site-packages/PyQt5"))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let src_path = absolute_msys_path(&qt_root);
|
|
||||||
let lib_path = folder_root.join("lib");
|
|
||||||
fs::create_dir_all(&lib_path).unwrap();
|
|
||||||
let dst_path = with_slash(absolute_msys_path(&lib_path));
|
|
||||||
run_command(Command::new("rsync").args([
|
|
||||||
"-a",
|
|
||||||
"--delete",
|
|
||||||
"--exclude-from",
|
|
||||||
"qt/bundle/qt.exclude",
|
|
||||||
&src_path,
|
|
||||||
&dst_path,
|
|
||||||
]));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn copy_linux_extras(kind: DistKind, folder_root: &Utf8Path) {
|
|
||||||
// add README, installer, etc
|
|
||||||
run_command(Command::new("rsync").args(["-a", "qt/bundle/lin/", &with_slash(folder_root)]));
|
|
||||||
|
|
||||||
// add extra IME plugins from download
|
|
||||||
let lib_path = folder_root.join("lib");
|
|
||||||
let src_path = folder_root
|
|
||||||
.join("../../extracted/linux_qt_plugins")
|
|
||||||
.join(match kind {
|
|
||||||
DistKind::Standard => "qt6",
|
|
||||||
DistKind::Alternate => "qt5",
|
|
||||||
});
|
|
||||||
let dst_path = lib_path.join(match kind {
|
|
||||||
DistKind::Standard => "PyQt6/Qt6/plugins",
|
|
||||||
DistKind::Alternate => "PyQt5/Qt5/plugins",
|
|
||||||
});
|
|
||||||
run_command(Command::new("rsync").args(["-a", &with_slash(src_path), &with_slash(dst_path)]));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn copy_windows_extras(folder_root: &Utf8Path) {
|
|
||||||
run_command(Command::new("rsync").args([
|
|
||||||
"-a",
|
|
||||||
"out/extracted/win_amd64_audio/",
|
|
||||||
&with_slash(folder_root),
|
|
||||||
]));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn clean_top_level_files(folder_root: &Utf8Path) {
|
|
||||||
let mut to_remove = vec![];
|
|
||||||
for entry in fs::read_dir(folder_root).unwrap() {
|
|
||||||
let entry = entry.unwrap();
|
|
||||||
if entry.file_name() == "lib" {
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
to_remove.push(entry.path());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for path in to_remove {
|
|
||||||
if path.is_dir() {
|
|
||||||
fs::remove_dir_all(path).unwrap()
|
|
||||||
} else {
|
|
||||||
fs::remove_file(path).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_slash<P>(path: P) -> String
|
|
||||||
where
|
|
||||||
P: AsRef<str>,
|
|
||||||
{
|
|
||||||
format!("{}/", path.as_ref())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn copy_binary_and_pylibs(folder_root: &Utf8Path) {
|
|
||||||
let binary = folder_root
|
|
||||||
.join("../rust")
|
|
||||||
.join(env!("TARGET"))
|
|
||||||
.join("release")
|
|
||||||
.join(if cfg!(windows) { "anki.exe" } else { "anki" });
|
|
||||||
let extra_files = folder_root
|
|
||||||
.join("../build")
|
|
||||||
.join(env!("TARGET"))
|
|
||||||
.join("release/resources/extra_files");
|
|
||||||
run_command(Command::new("rsync").args([
|
|
||||||
"-a",
|
|
||||||
"--exclude",
|
|
||||||
"PyQt6",
|
|
||||||
// misleading, as it misses the GPL PyQt, and our Rust/JS
|
|
||||||
// dependencies
|
|
||||||
"--exclude",
|
|
||||||
"COPYING.txt",
|
|
||||||
&unix_path(&binary),
|
|
||||||
&with_slash(unix_path(&extra_files)),
|
|
||||||
&with_slash(unix_path(folder_root)),
|
|
||||||
]));
|
|
||||||
let google_py = if cfg!(windows) {
|
|
||||||
folder_root.join("../pyenv/lib/site-packages/google")
|
|
||||||
} else {
|
|
||||||
folder_root.join("../pyenv/lib/python3.9/site-packages/google")
|
|
||||||
};
|
|
||||||
run_command(Command::new("rsync").args([
|
|
||||||
"-a",
|
|
||||||
&unix_path(&google_py),
|
|
||||||
&with_slash(unix_path(&folder_root.join("lib"))),
|
|
||||||
]));
|
|
||||||
}
|
|
|
@ -1,6 +0,0 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
|
|
||||||
pub mod artifacts;
|
|
||||||
pub mod binary;
|
|
||||||
pub mod folder;
|
|
|
@ -7,7 +7,6 @@
|
||||||
|
|
||||||
mod archive;
|
mod archive;
|
||||||
mod build;
|
mod build;
|
||||||
mod bundle;
|
|
||||||
mod paths;
|
mod paths;
|
||||||
mod pyenv;
|
mod pyenv;
|
||||||
mod rsync;
|
mod rsync;
|
||||||
|
@ -19,11 +18,6 @@ use archive::archive_command;
|
||||||
use archive::ArchiveArgs;
|
use archive::ArchiveArgs;
|
||||||
use build::run_build;
|
use build::run_build;
|
||||||
use build::BuildArgs;
|
use build::BuildArgs;
|
||||||
use bundle::artifacts::build_artifacts;
|
|
||||||
use bundle::artifacts::BuildArtifactsArgs;
|
|
||||||
use bundle::binary::build_bundle_binary;
|
|
||||||
use bundle::folder::build_dist_folder;
|
|
||||||
use bundle::folder::BuildDistFolderArgs;
|
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use clap::Subcommand;
|
use clap::Subcommand;
|
||||||
use pyenv::setup_pyenv;
|
use pyenv::setup_pyenv;
|
||||||
|
@ -48,9 +42,6 @@ enum Command {
|
||||||
Rsync(RsyncArgs),
|
Rsync(RsyncArgs),
|
||||||
Run(RunArgs),
|
Run(RunArgs),
|
||||||
Build(BuildArgs),
|
Build(BuildArgs),
|
||||||
BuildArtifacts(BuildArtifactsArgs),
|
|
||||||
BuildBundleBinary,
|
|
||||||
BuildDistFolder(BuildDistFolderArgs),
|
|
||||||
#[clap(subcommand)]
|
#[clap(subcommand)]
|
||||||
Archive(ArchiveArgs),
|
Archive(ArchiveArgs),
|
||||||
}
|
}
|
||||||
|
@ -62,9 +53,6 @@ fn main() -> Result<()> {
|
||||||
Command::Rsync(args) => rsync_files(args),
|
Command::Rsync(args) => rsync_files(args),
|
||||||
Command::Yarn(args) => setup_yarn(args),
|
Command::Yarn(args) => setup_yarn(args),
|
||||||
Command::Build(args) => run_build(args),
|
Command::Build(args) => run_build(args),
|
||||||
Command::BuildArtifacts(args) => build_artifacts(args),
|
|
||||||
Command::BuildBundleBinary => build_bundle_binary(),
|
|
||||||
Command::BuildDistFolder(args) => build_dist_folder(args),
|
|
||||||
Command::Archive(args) => archive_command(args)?,
|
Command::Archive(args) => archive_command(args)?,
|
||||||
};
|
};
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -16,8 +16,3 @@ pub fn absolute_msys_path(path: &Utf8Path) -> String {
|
||||||
// and \ -> /
|
// and \ -> /
|
||||||
format!("/{drive}/{}", path[7..].replace('\\', "/"))
|
format!("/{drive}/{}", path[7..].replace('\\', "/"))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts backslashes to forward slashes
|
|
||||||
pub fn unix_path(path: &Utf8Path) -> String {
|
|
||||||
path.as_str().replace('\\', "/")
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use std::fs;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use camino::Utf8Path;
|
use camino::Utf8Path;
|
||||||
|
@ -10,12 +11,10 @@ use crate::run::run_command;
|
||||||
|
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
pub struct PyenvArgs {
|
pub struct PyenvArgs {
|
||||||
python_bin: String,
|
uv_bin: String,
|
||||||
pyenv_folder: String,
|
pyenv_folder: String,
|
||||||
initial_reqs: String,
|
#[arg(trailing_var_arg = true)]
|
||||||
reqs: Vec<String>,
|
extra_args: Vec<String>,
|
||||||
#[arg(long, allow_hyphen_values(true))]
|
|
||||||
venv_args: Vec<String>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set up a venv if one doesn't already exist, and then sync packages with
|
/// Set up a venv if one doesn't already exist, and then sync packages with
|
||||||
|
@ -23,42 +22,23 @@ pub struct PyenvArgs {
|
||||||
pub fn setup_pyenv(args: PyenvArgs) {
|
pub fn setup_pyenv(args: PyenvArgs) {
|
||||||
let pyenv_folder = Utf8Path::new(&args.pyenv_folder);
|
let pyenv_folder = Utf8Path::new(&args.pyenv_folder);
|
||||||
|
|
||||||
let pyenv_bin_folder = pyenv_folder.join(if cfg!(windows) { "scripts" } else { "bin" });
|
// On first run, ninja creates an empty bin/ folder which breaks the initial
|
||||||
let pyenv_python = pyenv_bin_folder.join("python");
|
// install. But we don't want to indiscriminately remove the folder, or
|
||||||
let pip_sync = pyenv_bin_folder.join("pip-sync");
|
// macOS Gatekeeper needs to rescan the files each time.
|
||||||
|
if pyenv_folder.exists() {
|
||||||
// Ensure the venv gets recreated properly if it was created by our uv branch
|
let cache_tag = pyenv_folder.join("CACHEDIR.TAG");
|
||||||
let cache_tag = pyenv_folder.join("CACHEDIR.TAG");
|
if !cache_tag.exists() {
|
||||||
if cache_tag.exists() {
|
fs::remove_dir_all(pyenv_folder).expect("Failed to remove existing pyenv folder");
|
||||||
println!("Cleaning up uv pyenv...");
|
|
||||||
std::fs::remove_dir_all(pyenv_folder).expect("Failed to remove pyenv folder");
|
|
||||||
}
|
|
||||||
|
|
||||||
if !pyenv_python.exists() {
|
|
||||||
run_command(
|
|
||||||
Command::new(&args.python_bin)
|
|
||||||
.args(["-m", "venv"])
|
|
||||||
.args(args.venv_args)
|
|
||||||
.arg(pyenv_folder),
|
|
||||||
);
|
|
||||||
|
|
||||||
if cfg!(windows) {
|
|
||||||
// the first install on Windows throws an error the first time pip is upgraded,
|
|
||||||
// so we install it twice and swallow the first error
|
|
||||||
let _output = Command::new(&pyenv_python)
|
|
||||||
.args(["-m", "pip", "install", "-r", &args.initial_reqs])
|
|
||||||
.output()
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
run_command(Command::new(pyenv_python).args([
|
|
||||||
"-m",
|
|
||||||
"pip",
|
|
||||||
"install",
|
|
||||||
"-r",
|
|
||||||
&args.initial_reqs,
|
|
||||||
]));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
run_command(Command::new(pip_sync).args(&args.reqs));
|
run_command(
|
||||||
|
Command::new(args.uv_bin)
|
||||||
|
.env("UV_PROJECT_ENVIRONMENT", args.pyenv_folder.clone())
|
||||||
|
.args(["sync", "--locked"])
|
||||||
|
.args(args.extra_args),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Write empty stamp file
|
||||||
|
fs::write(pyenv_folder.join(".stamp"), "").expect("Failed to write stamp file");
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use std::io::ErrorKind;
|
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use anki_io::create_dir_all;
|
use anki_io::create_dir_all;
|
||||||
|
@ -44,7 +43,7 @@ fn split_env(s: &str) -> Result<(String, String), std::io::Error> {
|
||||||
if let Some((k, v)) = s.split_once('=') {
|
if let Some((k, v)) = s.split_once('=') {
|
||||||
Ok((k.into(), v.into()))
|
Ok((k.into(), v.into()))
|
||||||
} else {
|
} else {
|
||||||
Err(std::io::Error::new(ErrorKind::Other, "invalid env var"))
|
Err(std::io::Error::other("invalid env var"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,7 +83,7 @@ fn split_args(args: Vec<String>) -> Vec<Vec<String>> {
|
||||||
|
|
||||||
pub fn run_command(command: &mut Command) {
|
pub fn run_command(command: &mut Command) {
|
||||||
if let Err(err) = command.ensure_success() {
|
if let Err(err) = command.ensure_success() {
|
||||||
println!("{}", err);
|
println!("{err}");
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -85,7 +85,7 @@ When formatting issues are reported, they can be fixed with
|
||||||
./ninja format
|
./ninja format
|
||||||
```
|
```
|
||||||
|
|
||||||
## Fixing eslint/copyright header issues
|
## Fixing ruff/eslint/copyright header issues
|
||||||
|
|
||||||
```
|
```
|
||||||
./ninja fix
|
./ninja fix
|
||||||
|
@ -190,13 +190,10 @@ in the collection2.log file will also be printed on stdout.
|
||||||
|
|
||||||
If ANKI_PROFILE_CODE is set, Python profiling data will be written on exit.
|
If ANKI_PROFILE_CODE is set, Python profiling data will be written on exit.
|
||||||
|
|
||||||
# Binary Bundles
|
# Installer/launcher
|
||||||
|
|
||||||
Anki's official binary packages are created with `./ninja bundle`. The bundling
|
- The anki-release package is created/published with the scripts in qt/release.
|
||||||
process was created specifically for the official builds, and is provided as-is;
|
- The installer/launcher is created with the build scripts in qt/launcher/{platform}.
|
||||||
we are unfortunately not able to provide assistance with any issues you may run
|
|
||||||
into when using it. You'll need to run
|
|
||||||
`git submodule update --checkout qt/bundle/PyOxidizer` first.
|
|
||||||
|
|
||||||
## Mixing development and study
|
## Mixing development and study
|
||||||
|
|
||||||
|
|
|
@ -51,13 +51,8 @@ Anki requires a recent glibc.
|
||||||
|
|
||||||
If you are using a distro that uses musl, Anki will not work.
|
If you are using a distro that uses musl, Anki will not work.
|
||||||
|
|
||||||
If your glibc version is 2.35+ on AMD64 or 2.39+ on ARM64, you can skip the rest of this section.
|
You can use your system's Qt libraries if they are Qt 6.2 or later, if
|
||||||
|
you wish. After installing the system libraries (eg:
|
||||||
If your system has an older glibc, you won't be able to use the PyQt wheels that are
|
|
||||||
available in pip/PyPy, and will need to use your system-installed PyQt instead.
|
|
||||||
Your distro will also need to have Python 3.9 or later.
|
|
||||||
|
|
||||||
After installing the system libraries (eg:
|
|
||||||
'sudo apt install python3-pyqt6.qt{quick,webengine} python3-venv pyqt6-dev-tools'),
|
'sudo apt install python3-pyqt6.qt{quick,webengine} python3-venv pyqt6-dev-tools'),
|
||||||
find the place they are installed (eg '/usr/lib/python3/dist-packages'). On modern Ubuntu, you'll
|
find the place they are installed (eg '/usr/lib/python3/dist-packages'). On modern Ubuntu, you'll
|
||||||
also need 'sudo apt remove python3-protobuf'. Then before running any commands like './run', tell Anki where
|
also need 'sudo apt remove python3-protobuf'. Then before running any commands like './run', tell Anki where
|
||||||
|
@ -68,12 +63,6 @@ export PYTHONPATH=/usr/lib/python3/dist-packages
|
||||||
export PYTHON_BINARY=/usr/bin/python3
|
export PYTHON_BINARY=/usr/bin/python3
|
||||||
```
|
```
|
||||||
|
|
||||||
There are a few things to be aware of:
|
|
||||||
|
|
||||||
- You should use ./run and not tools/run-qt5\*, even if your system libraries are Qt5.
|
|
||||||
- If your system libraries are Qt5, when creating an aqt wheel, the wheel will not work
|
|
||||||
on Qt6 environments.
|
|
||||||
|
|
||||||
## Packaging considerations
|
## Packaging considerations
|
||||||
|
|
||||||
Python, node and protoc are downloaded as part of the build. You can optionally define
|
Python, node and protoc are downloaded as part of the build. You can optionally define
|
||||||
|
|
|
@ -98,12 +98,6 @@ should preferably be assigned a number between 1 and 15. If a message contains
|
||||||
|
|
||||||
Protobuf has an official Python implementation with an extensive [reference](https://developers.google.com/protocol-buffers/docs/reference/python-generated).
|
Protobuf has an official Python implementation with an extensive [reference](https://developers.google.com/protocol-buffers/docs/reference/python-generated).
|
||||||
|
|
||||||
- Every message used in aqt or pylib must be added to the respective `.pylintrc`
|
|
||||||
to avoid failing type checks. The unqualified protobuf message's name must be
|
|
||||||
used, not an alias from `collection.py` for example. This should be taken into
|
|
||||||
account when choosing a message name in order to prevent skipping typechecking
|
|
||||||
a Python class of the same name.
|
|
||||||
|
|
||||||
### Typescript
|
### Typescript
|
||||||
|
|
||||||
Anki uses [protobuf-es](https://github.com/bufbuild/protobuf-es), which offers
|
Anki uses [protobuf-es](https://github.com/bufbuild/protobuf-es), which offers
|
||||||
|
|
|
@ -9,7 +9,12 @@ You must be running 64 bit Windows 10, version 1703 or newer.
|
||||||
**Rustup**:
|
**Rustup**:
|
||||||
|
|
||||||
As mentioned in development.md, rustup must be installed. If you're on
|
As mentioned in development.md, rustup must be installed. If you're on
|
||||||
ARM Windows, you must set the default target to x86_64-pc-windows-msvc.
|
ARM Windows and install the ARM64 version of rust-up, from this project folder,
|
||||||
|
run
|
||||||
|
|
||||||
|
```
|
||||||
|
rustup target add x86_64-pc-windows-msvc
|
||||||
|
```
|
||||||
|
|
||||||
**Visual Studio**:
|
**Visual Studio**:
|
||||||
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 78412ce163d4dc50dd82f5b27cde3119086a2eb7
|
Subproject commit a9216499ba1fb1538cfd740c698adaaa3410fd4b
|
|
@ -60,7 +60,6 @@ card-templates-this-will-create-card-proceed =
|
||||||
}
|
}
|
||||||
card-templates-type-boxes-warning = Only one typing box per card template is supported.
|
card-templates-type-boxes-warning = Only one typing box per card template is supported.
|
||||||
card-templates-restore-to-default = Restore to Default
|
card-templates-restore-to-default = Restore to Default
|
||||||
card-templates-restore-to-default-confirmation = This will reset all fields and templates in this note type to their default
|
card-templates-restore-to-default-confirmation = This will reset all fields and templates in this note type to their default values, removing any extra fields/templates and their content, and any custom styling. Do you wish to proceed?
|
||||||
values, removing any extra fields/templates and their content, and any custom styling. Do you wish to proceed?
|
|
||||||
card-templates-restored-to-default = Note type has been restored to its original state.
|
card-templates-restored-to-default = Note type has been restored to its original state.
|
||||||
|
|
||||||
|
|
|
@ -425,6 +425,8 @@ deck-config-desired-retention-tooltip =
|
||||||
less frequently, and you will forget more of them. Be conservative when adjusting this - higher
|
less frequently, and you will forget more of them. Be conservative when adjusting this - higher
|
||||||
values will greatly increase your workload, and lower values can be demoralizing when you forget
|
values will greatly increase your workload, and lower values can be demoralizing when you forget
|
||||||
a lot of material.
|
a lot of material.
|
||||||
|
deck-config-desired-retention-tooltip2 =
|
||||||
|
The workload values provided by the info box are a rough approximation. For a greater level of accuracy, use the simulator.
|
||||||
deck-config-historical-retention-tooltip =
|
deck-config-historical-retention-tooltip =
|
||||||
When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will
|
When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will
|
||||||
assume that when you did those old reviews, you remembered 90% of the material. If your old retention
|
assume that when you did those old reviews, you remembered 90% of the material. If your old retention
|
||||||
|
|
|
@ -65,7 +65,6 @@ importing-with-deck-configs-help =
|
||||||
If enabled, any deck options that the deck sharer included will also be imported.
|
If enabled, any deck options that the deck sharer included will also be imported.
|
||||||
Otherwise, all decks will be assigned the default preset.
|
Otherwise, all decks will be assigned the default preset.
|
||||||
importing-packaged-anki-deckcollection-apkg-colpkg-zip = Packaged Anki Deck/Collection (*.apkg *.colpkg *.zip)
|
importing-packaged-anki-deckcollection-apkg-colpkg-zip = Packaged Anki Deck/Collection (*.apkg *.colpkg *.zip)
|
||||||
importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz)
|
|
||||||
# the '|' character
|
# the '|' character
|
||||||
importing-pipe = Pipe
|
importing-pipe = Pipe
|
||||||
# Warning displayed when the csv import preview table is clipped (some columns were hidden)
|
# Warning displayed when the csv import preview table is clipped (some columns were hidden)
|
||||||
|
@ -78,7 +77,6 @@ importing-rows-had-num1d-fields-expected-num2d = '{ $row }' had { $found } field
|
||||||
importing-selected-file-was-not-in-utf8 = Selected file was not in UTF-8 format. Please see the importing section of the manual.
|
importing-selected-file-was-not-in-utf8 = Selected file was not in UTF-8 format. Please see the importing section of the manual.
|
||||||
importing-semicolon = Semicolon
|
importing-semicolon = Semicolon
|
||||||
importing-skipped = Skipped
|
importing-skipped = Skipped
|
||||||
importing-supermemo-xml-export-xml = Supermemo XML export (*.xml)
|
|
||||||
importing-tab = Tab
|
importing-tab = Tab
|
||||||
importing-tag-modified-notes = Tag modified notes:
|
importing-tag-modified-notes = Tag modified notes:
|
||||||
importing-text-separated-by-tabs-or-semicolons = Text separated by tabs or semicolons (*)
|
importing-text-separated-by-tabs-or-semicolons = Text separated by tabs or semicolons (*)
|
||||||
|
@ -252,3 +250,5 @@ importing-importing-collection = Importing collection...
|
||||||
importing-unable-to-import-filename = Unable to import { $filename }: file type not supported
|
importing-unable-to-import-filename = Unable to import { $filename }: file type not supported
|
||||||
importing-notes-that-could-not-be-imported = Notes that could not be imported as note type has changed: { $val }
|
importing-notes-that-could-not-be-imported = Notes that could not be imported as note type has changed: { $val }
|
||||||
importing-added = Added
|
importing-added = Added
|
||||||
|
importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz)
|
||||||
|
importing-supermemo-xml-export-xml = Supermemo XML export (*.xml)
|
||||||
|
|
|
@ -99,9 +99,9 @@ statistics-counts-relearning-cards = Relearning
|
||||||
statistics-counts-title = Card Counts
|
statistics-counts-title = Card Counts
|
||||||
statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards
|
statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards
|
||||||
|
|
||||||
## True Retention represents your actual retention rate from past reviews, in
|
## Retention rate represents your actual retention rate from past reviews, in
|
||||||
## comparison to the "desired retention" parameter of FSRS, which forecasts
|
## comparison to the "desired retention" setting of FSRS, which forecasts
|
||||||
## future retention. True Retention is the percentage of all reviewed cards
|
## future retention. Retention rate is the percentage of all reviewed cards
|
||||||
## that were marked as "Hard," "Good," or "Easy" within a specific time period.
|
## that were marked as "Hard," "Good," or "Easy" within a specific time period.
|
||||||
##
|
##
|
||||||
## Most of these strings are used as column / row headings in a table.
|
## Most of these strings are used as column / row headings in a table.
|
||||||
|
@ -112,9 +112,9 @@ statistics-counts-separate-suspended-buried-cards = Separate suspended/buried ca
|
||||||
## N.B. Stats cards may be very small on mobile devices and when the Stats
|
## N.B. Stats cards may be very small on mobile devices and when the Stats
|
||||||
## window is certain sizes.
|
## window is certain sizes.
|
||||||
|
|
||||||
statistics-true-retention-title = True Retention
|
statistics-true-retention-title = Retention rate
|
||||||
statistics-true-retention-subtitle = Pass rate of cards with an interval ≥ 1 day.
|
statistics-true-retention-subtitle = Pass rate of cards with an interval ≥ 1 day.
|
||||||
statistics-true-retention-tooltip = If you are using FSRS, your true retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data.
|
statistics-true-retention-tooltip = If you are using FSRS, your retention rate is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data.
|
||||||
statistics-true-retention-range = Range
|
statistics-true-retention-range = Range
|
||||||
statistics-true-retention-pass = Pass
|
statistics-true-retention-pass = Pass
|
||||||
statistics-true-retention-fail = Fail
|
statistics-true-retention-fail = Fail
|
||||||
|
|
|
@ -1,36 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
|
|
||||||
"""
|
|
||||||
Tool to extract core strings and keys from .ftl files.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import glob
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
from fluent.syntax import parse
|
|
||||||
from fluent.syntax.ast import Junk, Message
|
|
||||||
from fluent.syntax.serializer import serialize_element
|
|
||||||
|
|
||||||
root = ".."
|
|
||||||
ftl_files = glob.glob(os.path.join(root, "ftl", "core", "*.ftl"), recursive=True)
|
|
||||||
keys_by_value: dict[str, list[str]] = {}
|
|
||||||
|
|
||||||
for path in ftl_files:
|
|
||||||
obj = parse(open(path, encoding="utf8").read(), with_spans=False)
|
|
||||||
for ent in obj.body:
|
|
||||||
if isinstance(ent, Junk):
|
|
||||||
raise Exception(f"file had junk! {path} {ent}")
|
|
||||||
if isinstance(ent, Message):
|
|
||||||
key = ent.id.name
|
|
||||||
val = "".join(serialize_element(elem) for elem in ent.value.elements)
|
|
||||||
if val in keys_by_value:
|
|
||||||
print("duplicate found:", keys_by_value[val], key)
|
|
||||||
keys_by_value.setdefault(val, []).append(key)
|
|
||||||
|
|
||||||
json.dump(
|
|
||||||
keys_by_value, open(os.path.join(root, "keys_by_value.json"), "w", encoding="utf8")
|
|
||||||
)
|
|
||||||
print("keys:", len(keys_by_value))
|
|
|
@ -1,99 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
|
|
||||||
"""
|
|
||||||
Parse and re-serialize ftl files to get them in a consistent form.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import difflib
|
|
||||||
import glob
|
|
||||||
import os
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
from compare_locales import parser
|
|
||||||
from compare_locales.checks.fluent import ReferenceMessageVisitor
|
|
||||||
from compare_locales.paths import File
|
|
||||||
from fluent.syntax import parse, serialize
|
|
||||||
from fluent.syntax.ast import Junk
|
|
||||||
|
|
||||||
|
|
||||||
def check_missing_terms(path: str) -> bool:
|
|
||||||
"True if file is ok."
|
|
||||||
file = File(path, os.path.basename(path))
|
|
||||||
content = open(path, "rb").read()
|
|
||||||
p = parser.getParser(file.file)
|
|
||||||
p.readContents(content)
|
|
||||||
refList = p.parse()
|
|
||||||
|
|
||||||
p.readContents(content)
|
|
||||||
for e in p.parse():
|
|
||||||
ref_data = ReferenceMessageVisitor()
|
|
||||||
ref_data.visit(e.entry)
|
|
||||||
|
|
||||||
for attr_or_val, refs in ref_data.entry_refs.items():
|
|
||||||
for ref, ref_type in refs.items():
|
|
||||||
if ref not in refList:
|
|
||||||
print(f"In {path}:{e}, missing '{ref}'")
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def check_file(path: str, fix: bool) -> bool:
|
|
||||||
"True if file is ok."
|
|
||||||
orig_text = open(path, encoding="utf8").read()
|
|
||||||
obj = parse(orig_text, with_spans=False)
|
|
||||||
# make sure there's no junk
|
|
||||||
for ent in obj.body:
|
|
||||||
if isinstance(ent, Junk):
|
|
||||||
raise Exception(f"file had junk! {path} {ent}")
|
|
||||||
# serialize
|
|
||||||
new_text = serialize(obj)
|
|
||||||
# make sure serializing did not introduce new junk
|
|
||||||
obj = parse(new_text, with_spans=False)
|
|
||||||
for ent in obj.body:
|
|
||||||
if isinstance(ent, Junk):
|
|
||||||
raise Exception(f"file introduced junk! {path} {ent}")
|
|
||||||
|
|
||||||
if new_text == orig_text:
|
|
||||||
return check_missing_terms(path)
|
|
||||||
|
|
||||||
if fix:
|
|
||||||
print(f"Fixing {path}")
|
|
||||||
open(path, "w", newline="\n", encoding="utf8").write(new_text)
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Bad formatting in {path}")
|
|
||||||
print(
|
|
||||||
"\n".join(
|
|
||||||
difflib.unified_diff(
|
|
||||||
orig_text.splitlines(),
|
|
||||||
new_text.splitlines(),
|
|
||||||
fromfile="bad",
|
|
||||||
tofile="good",
|
|
||||||
lineterm="",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def check_files(files: List[str], fix: bool) -> bool:
|
|
||||||
"True if files ok."
|
|
||||||
|
|
||||||
found_bad = False
|
|
||||||
for path in files:
|
|
||||||
ok = check_file(path, fix)
|
|
||||||
if not ok:
|
|
||||||
found_bad = True
|
|
||||||
return not found_bad
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
template_root = os.environ["BUILD_WORKSPACE_DIRECTORY"]
|
|
||||||
template_files = glob.glob(
|
|
||||||
os.path.join(template_root, "ftl", "*", "*.ftl"), recursive=True
|
|
||||||
)
|
|
||||||
|
|
||||||
check_files(template_files, fix=True)
|
|
|
@ -1,14 +0,0 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
|
|
||||||
import glob
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import format
|
|
||||||
|
|
||||||
template_root = os.path.dirname(sys.argv[1])
|
|
||||||
template_files = glob.glob(os.path.join(template_root, "*", "*.ftl"), recursive=True)
|
|
||||||
|
|
||||||
if not format.check_files(template_files, fix=False):
|
|
||||||
sys.exit(1)
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit fbe9d1c731f7ad09953e63fdb0c455a6d3a3b6be
|
Subproject commit a1134ab59d3d23468af2968741aa1f21d16ff308
|
|
@ -1,4 +1,5 @@
|
||||||
qt-accel-about = &About
|
qt-accel-about = &About
|
||||||
|
qt-accel-about-mac = About Anki...
|
||||||
qt-accel-cards = &Cards
|
qt-accel-cards = &Cards
|
||||||
qt-accel-check-database = &Check Database
|
qt-accel-check-database = &Check Database
|
||||||
qt-accel-check-media = Check &Media
|
qt-accel-check-media = Check &Media
|
||||||
|
@ -45,3 +46,4 @@ qt-accel-zoom-editor-in = Zoom Editor &In
|
||||||
qt-accel-zoom-editor-out = Zoom Editor &Out
|
qt-accel-zoom-editor-out = Zoom Editor &Out
|
||||||
qt-accel-create-backup = Create &Backup
|
qt-accel-create-backup = Create &Backup
|
||||||
qt-accel-load-backup = &Revert to Backup
|
qt-accel-load-backup = &Revert to Backup
|
||||||
|
qt-accel-upgrade-downgrade = Upgrade/Downgrade
|
||||||
|
|
|
@ -73,6 +73,7 @@ qt-misc-second =
|
||||||
qt-misc-layout-auto-enabled = Responsive layout enabled
|
qt-misc-layout-auto-enabled = Responsive layout enabled
|
||||||
qt-misc-layout-vertical-enabled = Vertical layout enabled
|
qt-misc-layout-vertical-enabled = Vertical layout enabled
|
||||||
qt-misc-layout-horizontal-enabled = Horizontal layout enabled
|
qt-misc-layout-horizontal-enabled = Horizontal layout enabled
|
||||||
|
qt-misc-open-anki-launcher = Change to a different Anki version?
|
||||||
|
|
||||||
## deprecated- these strings will be removed in the future, and do not need
|
## deprecated- these strings will be removed in the future, and do not need
|
||||||
## to be translated
|
## to be translated
|
||||||
|
|
|
@ -435,7 +435,7 @@ impl TextWriter {
|
||||||
item = item.trim_start_matches(' ');
|
item = item.trim_start_matches(' ');
|
||||||
}
|
}
|
||||||
|
|
||||||
write!(self.buffer, "{}", item)
|
write!(self.buffer, "{item}")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_char_into_indent(&mut self, ch: char) {
|
fn write_char_into_indent(&mut self, ch: char) {
|
||||||
|
|
|
@ -67,7 +67,7 @@ fn additional_template_folder(dst_folder: &Utf8Path) -> Option<Utf8PathBuf> {
|
||||||
|
|
||||||
fn all_langs(lang_folder: &Utf8Path) -> Result<Vec<Utf8PathBuf>> {
|
fn all_langs(lang_folder: &Utf8Path) -> Result<Vec<Utf8PathBuf>> {
|
||||||
std::fs::read_dir(lang_folder)
|
std::fs::read_dir(lang_folder)
|
||||||
.with_context(|| format!("reading {:?}", lang_folder))?
|
.with_context(|| format!("reading {lang_folder:?}"))?
|
||||||
.filter_map(Result::ok)
|
.filter_map(Result::ok)
|
||||||
.map(|e| Ok(e.path().utf8()?))
|
.map(|e| Ok(e.path().utf8()?))
|
||||||
.collect()
|
.collect()
|
||||||
|
|
2
ninja
2
ninja
|
@ -8,7 +8,7 @@ else
|
||||||
out="$BUILD_ROOT"
|
out="$BUILD_ROOT"
|
||||||
fi
|
fi
|
||||||
export CARGO_TARGET_DIR=$out/rust
|
export CARGO_TARGET_DIR=$out/rust
|
||||||
export RECONFIGURE_KEY="${MAC_X86};${SOURCEMAP};${HMR}"
|
export RECONFIGURE_KEY="${MAC_X86};${LIN_ARM64};${SOURCEMAP};${HMR}"
|
||||||
|
|
||||||
if [ "$SKIP_RUNNER_BUILD" = "1" ]; then
|
if [ "$SKIP_RUNNER_BUILD" = "1" ]; then
|
||||||
echo "Runner not rebuilt."
|
echo "Runner not rebuilt."
|
||||||
|
|
21
package.json
21
package.json
|
@ -19,8 +19,8 @@
|
||||||
"@poppanator/sveltekit-svg": "^5.0.0",
|
"@poppanator/sveltekit-svg": "^5.0.0",
|
||||||
"@sqltools/formatter": "^1.2.2",
|
"@sqltools/formatter": "^1.2.2",
|
||||||
"@sveltejs/adapter-static": "^3.0.0",
|
"@sveltejs/adapter-static": "^3.0.0",
|
||||||
"@sveltejs/kit": "^2.20.7",
|
"@sveltejs/kit": "^2.22.2",
|
||||||
"@sveltejs/vite-plugin-svelte": "4.0.0",
|
"@sveltejs/vite-plugin-svelte": "5.1",
|
||||||
"@types/bootstrap": "^5.0.12",
|
"@types/bootstrap": "^5.0.12",
|
||||||
"@types/codemirror": "^5.60.0",
|
"@types/codemirror": "^5.60.0",
|
||||||
"@types/d3": "^7.0.0",
|
"@types/d3": "^7.0.0",
|
||||||
|
@ -30,7 +30,7 @@
|
||||||
"@types/jqueryui": "^1.12.13",
|
"@types/jqueryui": "^1.12.13",
|
||||||
"@types/lodash-es": "^4.17.4",
|
"@types/lodash-es": "^4.17.4",
|
||||||
"@types/marked": "^5.0.0",
|
"@types/marked": "^5.0.0",
|
||||||
"@types/node": "^20",
|
"@types/node": "^22",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.60.1",
|
"@typescript-eslint/eslint-plugin": "^5.60.1",
|
||||||
"@typescript-eslint/parser": "^5.60.1",
|
"@typescript-eslint/parser": "^5.60.1",
|
||||||
"caniuse-lite": "^1.0.30001431",
|
"caniuse-lite": "^1.0.30001431",
|
||||||
|
@ -48,16 +48,16 @@
|
||||||
"prettier": "^3.4.2",
|
"prettier": "^3.4.2",
|
||||||
"prettier-plugin-svelte": "^3.3.2",
|
"prettier-plugin-svelte": "^3.3.2",
|
||||||
"sass": "<1.77",
|
"sass": "<1.77",
|
||||||
"svelte": "^5.17.3",
|
"svelte": "^5.34.9",
|
||||||
"svelte-check": "^3.4.4",
|
"svelte-check": "^4.2.2",
|
||||||
"svelte-preprocess": "^5.0.4",
|
"svelte-preprocess": "^6.0.3",
|
||||||
"svelte-preprocess-esbuild": "^3.0.1",
|
"svelte-preprocess-esbuild": "^3.0.1",
|
||||||
"svgo": "^3.2.0",
|
"svgo": "^3.2.0",
|
||||||
"tslib": "^2.0.3",
|
"tslib": "^2.0.3",
|
||||||
"tsx": "^3.12.0",
|
"tsx": "^4.8.1",
|
||||||
"typescript": "^5.0.4",
|
"typescript": "^5.0.4",
|
||||||
"vite": "5.4.19",
|
"vite": "6",
|
||||||
"vitest": "^2"
|
"vitest": "^3"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@bufbuild/protobuf": "^1.2.1",
|
"@bufbuild/protobuf": "^1.2.1",
|
||||||
|
@ -81,7 +81,8 @@
|
||||||
},
|
},
|
||||||
"resolutions": {
|
"resolutions": {
|
||||||
"canvas": "npm:empty-npm-package@1.0.0",
|
"canvas": "npm:empty-npm-package@1.0.0",
|
||||||
"cookie": "0.7.0"
|
"cookie": "0.7.0",
|
||||||
|
"vite": "6"
|
||||||
},
|
},
|
||||||
"browserslist": [
|
"browserslist": [
|
||||||
"defaults",
|
"defaults",
|
||||||
|
|
|
@ -56,6 +56,7 @@ message ConfigKey {
|
||||||
RENDER_LATEX = 25;
|
RENDER_LATEX = 25;
|
||||||
LOAD_BALANCER_ENABLED = 26;
|
LOAD_BALANCER_ENABLED = 26;
|
||||||
FSRS_SHORT_TERM_WITH_STEPS_ENABLED = 27;
|
FSRS_SHORT_TERM_WITH_STEPS_ENABLED = 27;
|
||||||
|
FSRS_LEGACY_EVALUATE = 28;
|
||||||
}
|
}
|
||||||
enum String {
|
enum String {
|
||||||
SET_DUE_BROWSER = 0;
|
SET_DUE_BROWSER = 0;
|
||||||
|
|
|
@ -236,6 +236,7 @@ message DeckConfigsForUpdate {
|
||||||
bool new_cards_ignore_review_limit = 7;
|
bool new_cards_ignore_review_limit = 7;
|
||||||
bool fsrs = 8;
|
bool fsrs = 8;
|
||||||
bool fsrs_health_check = 11;
|
bool fsrs_health_check = 11;
|
||||||
|
bool fsrs_legacy_evaluate = 12;
|
||||||
bool apply_all_parent_limits = 9;
|
bool apply_all_parent_limits = 9;
|
||||||
uint32 days_since_last_fsrs_optimize = 10;
|
uint32 days_since_last_fsrs_optimize = 10;
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,6 +56,8 @@ service SchedulerService {
|
||||||
rpc SimulateFsrsReview(SimulateFsrsReviewRequest)
|
rpc SimulateFsrsReview(SimulateFsrsReviewRequest)
|
||||||
returns (SimulateFsrsReviewResponse);
|
returns (SimulateFsrsReviewResponse);
|
||||||
rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse);
|
rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse);
|
||||||
|
rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest)
|
||||||
|
returns (EvaluateParamsResponse);
|
||||||
rpc ComputeMemoryState(cards.CardId) returns (ComputeMemoryStateResponse);
|
rpc ComputeMemoryState(cards.CardId) returns (ComputeMemoryStateResponse);
|
||||||
// The number of days the calculated interval was fuzzed by on the previous
|
// The number of days the calculated interval was fuzzed by on the previous
|
||||||
// review (if any). Utilized by the FSRS add-on.
|
// review (if any). Utilized by the FSRS add-on.
|
||||||
|
@ -442,6 +444,12 @@ message EvaluateParamsRequest {
|
||||||
uint32 num_of_relearning_steps = 3;
|
uint32 num_of_relearning_steps = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message EvaluateParamsLegacyRequest {
|
||||||
|
repeated float params = 1;
|
||||||
|
string search = 2;
|
||||||
|
int64 ignore_revlogs_before_ms = 3;
|
||||||
|
}
|
||||||
|
|
||||||
message EvaluateParamsResponse {
|
message EvaluateParamsResponse {
|
||||||
float log_loss = 1;
|
float log_loss = 1;
|
||||||
float rmse_bins = 2;
|
float rmse_bins = 2;
|
||||||
|
@ -450,6 +458,7 @@ message EvaluateParamsResponse {
|
||||||
message ComputeMemoryStateResponse {
|
message ComputeMemoryStateResponse {
|
||||||
optional cards.FsrsMemoryState state = 1;
|
optional cards.FsrsMemoryState state = 1;
|
||||||
float desired_retention = 2;
|
float desired_retention = 2;
|
||||||
|
float decay = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message FuzzDeltaRequest {
|
message FuzzDeltaRequest {
|
||||||
|
|
|
@ -46,7 +46,6 @@ from .errors import (
|
||||||
|
|
||||||
# the following comment is required to suppress a warning that only shows up
|
# the following comment is required to suppress a warning that only shows up
|
||||||
# when there are other pylint failures
|
# when there are other pylint failures
|
||||||
# pylint: disable=c-extension-no-member
|
|
||||||
if _rsbridge.buildhash() != anki.buildinfo.buildhash:
|
if _rsbridge.buildhash() != anki.buildinfo.buildhash:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"""rsbridge and anki build hashes do not match:
|
f"""rsbridge and anki build hashes do not match:
|
||||||
|
@ -164,7 +163,7 @@ class RustBackend(RustBackendGenerated):
|
||||||
finally:
|
finally:
|
||||||
elapsed = time.time() - start
|
elapsed = time.time() - start
|
||||||
if current_thread() is main_thread() and elapsed > 0.2:
|
if current_thread() is main_thread() and elapsed > 0.2:
|
||||||
print(f"blocked main thread for {int(elapsed*1000)}ms:")
|
print(f"blocked main thread for {int(elapsed * 1000)}ms:")
|
||||||
print("".join(traceback.format_stack()))
|
print("".join(traceback.format_stack()))
|
||||||
|
|
||||||
err = backend_pb2.BackendError()
|
err = backend_pb2.BackendError()
|
||||||
|
|
|
@ -7,7 +7,7 @@ import pprint
|
||||||
import time
|
import time
|
||||||
from typing import NewType
|
from typing import NewType
|
||||||
|
|
||||||
import anki # pylint: disable=unused-import
|
import anki
|
||||||
import anki.collection
|
import anki.collection
|
||||||
import anki.decks
|
import anki.decks
|
||||||
import anki.notes
|
import anki.notes
|
||||||
|
|
|
@ -122,6 +122,7 @@ class ComputedMemoryState:
|
||||||
desired_retention: float
|
desired_retention: float
|
||||||
stability: float | None = None
|
stability: float | None = None
|
||||||
difficulty: float | None = None
|
difficulty: float | None = None
|
||||||
|
decay: float | None = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -157,7 +158,7 @@ class Collection(DeprecatedNamesMixin):
|
||||||
self.tags = TagManager(self)
|
self.tags = TagManager(self)
|
||||||
self.conf = ConfigManager(self)
|
self.conf = ConfigManager(self)
|
||||||
self._load_scheduler()
|
self._load_scheduler()
|
||||||
self._startReps = 0 # pylint: disable=invalid-name
|
self._startReps = 0
|
||||||
|
|
||||||
def name(self) -> Any:
|
def name(self) -> Any:
|
||||||
return os.path.splitext(os.path.basename(self.path))[0]
|
return os.path.splitext(os.path.basename(self.path))[0]
|
||||||
|
@ -510,9 +511,7 @@ class Collection(DeprecatedNamesMixin):
|
||||||
# Utils
|
# Utils
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
def nextID( # pylint: disable=invalid-name
|
def nextID(self, type: str, inc: bool = True) -> Any:
|
||||||
self, type: str, inc: bool = True
|
|
||||||
) -> Any:
|
|
||||||
type = f"next{type.capitalize()}"
|
type = f"next{type.capitalize()}"
|
||||||
id = self.conf.get(type, 1)
|
id = self.conf.get(type, 1)
|
||||||
if inc:
|
if inc:
|
||||||
|
@ -848,7 +847,6 @@ class Collection(DeprecatedNamesMixin):
|
||||||
)
|
)
|
||||||
|
|
||||||
def _pb_search_separator(self, operator: SearchJoiner) -> SearchNode.Group.Joiner.V:
|
def _pb_search_separator(self, operator: SearchJoiner) -> SearchNode.Group.Joiner.V:
|
||||||
# pylint: disable=no-member
|
|
||||||
if operator == "AND":
|
if operator == "AND":
|
||||||
return SearchNode.Group.Joiner.AND
|
return SearchNode.Group.Joiner.AND
|
||||||
else:
|
else:
|
||||||
|
@ -866,7 +864,9 @@ class Collection(DeprecatedNamesMixin):
|
||||||
return column
|
return column
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def browser_row_for_id(self, id_: int) -> tuple[
|
def browser_row_for_id(
|
||||||
|
self, id_: int
|
||||||
|
) -> tuple[
|
||||||
Generator[tuple[str, bool, BrowserRow.Cell.TextElideMode.V], None, None],
|
Generator[tuple[str, bool, BrowserRow.Cell.TextElideMode.V], None, None],
|
||||||
BrowserRow.Color.V,
|
BrowserRow.Color.V,
|
||||||
str,
|
str,
|
||||||
|
@ -1189,9 +1189,13 @@ class Collection(DeprecatedNamesMixin):
|
||||||
desired_retention=resp.desired_retention,
|
desired_retention=resp.desired_retention,
|
||||||
stability=resp.state.stability,
|
stability=resp.state.stability,
|
||||||
difficulty=resp.state.difficulty,
|
difficulty=resp.state.difficulty,
|
||||||
|
decay=resp.decay,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return ComputedMemoryState(desired_retention=resp.desired_retention)
|
return ComputedMemoryState(
|
||||||
|
desired_retention=resp.desired_retention,
|
||||||
|
decay=resp.decay,
|
||||||
|
)
|
||||||
|
|
||||||
def fuzz_delta(self, card_id: CardId, interval: int) -> int:
|
def fuzz_delta(self, card_id: CardId, interval: int) -> int:
|
||||||
"The delta days of fuzz applied if reviewing the card in v3."
|
"The delta days of fuzz applied if reviewing the card in v3."
|
||||||
|
@ -1207,8 +1211,6 @@ class Collection(DeprecatedNamesMixin):
|
||||||
# the count on things like edits, which we probably could do by checking
|
# the count on things like edits, which we probably could do by checking
|
||||||
# the previous state in moveToState.
|
# the previous state in moveToState.
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
def startTimebox(self) -> None:
|
def startTimebox(self) -> None:
|
||||||
self._startTime = time.time()
|
self._startTime = time.time()
|
||||||
self._startReps = self.sched.reps
|
self._startReps = self.sched.reps
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
@ -351,7 +350,7 @@ class AnkiPackageExporter(AnkiExporter):
|
||||||
colfile = path.replace(".apkg", ".anki2")
|
colfile = path.replace(".apkg", ".anki2")
|
||||||
AnkiExporter.exportInto(self, colfile)
|
AnkiExporter.exportInto(self, colfile)
|
||||||
# prevent older clients from accessing
|
# prevent older clients from accessing
|
||||||
# pylint: disable=unreachable
|
|
||||||
self._addDummyCollection(z)
|
self._addDummyCollection(z)
|
||||||
z.write(colfile, "collection.anki21")
|
z.write(colfile, "collection.anki21")
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
"""Helpers for serializing third-party collections to a common JSON form.
|
"""Helpers for serializing third-party collections to a common JSON form."""
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Tools for extending Anki.
|
Tools for extending Anki.
|
||||||
|
|
|
@ -11,8 +11,6 @@ from anki.importing.apkg import AnkiPackageImporter
|
||||||
from anki.importing.base import Importer
|
from anki.importing.base import Importer
|
||||||
from anki.importing.csvfile import TextImporter
|
from anki.importing.csvfile import TextImporter
|
||||||
from anki.importing.mnemo import MnemosyneImporter
|
from anki.importing.mnemo import MnemosyneImporter
|
||||||
from anki.importing.pauker import PaukerImporter
|
|
||||||
from anki.importing.supermemo_xml import SupermemoXmlImporter # type: ignore
|
|
||||||
from anki.lang import TR
|
from anki.lang import TR
|
||||||
|
|
||||||
|
|
||||||
|
@ -24,8 +22,6 @@ def importers(col: Collection) -> Sequence[tuple[str, type[Importer]]]:
|
||||||
AnkiPackageImporter,
|
AnkiPackageImporter,
|
||||||
),
|
),
|
||||||
(col.tr.importing_mnemosyne_20_deck_db(), MnemosyneImporter),
|
(col.tr.importing_mnemosyne_20_deck_db(), MnemosyneImporter),
|
||||||
(col.tr.importing_supermemo_xml_export_xml(), SupermemoXmlImporter),
|
|
||||||
(col.tr.importing_pauker_18_lesson_paugz(), PaukerImporter),
|
|
||||||
]
|
]
|
||||||
anki.hooks.importing_importers(importers)
|
anki.hooks.importing_importers(importers)
|
||||||
return importers
|
return importers
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
@ -144,7 +143,6 @@ class TextImporter(NoteImporter):
|
||||||
self.close()
|
self.close()
|
||||||
zuper = super()
|
zuper = super()
|
||||||
if hasattr(zuper, "__del__"):
|
if hasattr(zuper, "__del__"):
|
||||||
# pylint: disable=no-member
|
|
||||||
zuper.__del__(self) # type: ignore
|
zuper.__del__(self) # type: ignore
|
||||||
|
|
||||||
def noteFromFields(self, fields: list[str]) -> ForeignNote:
|
def noteFromFields(self, fields: list[str]) -> ForeignNote:
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
@ -35,7 +34,6 @@ f._id=d._fact_id"""
|
||||||
):
|
):
|
||||||
if id != curid:
|
if id != curid:
|
||||||
if note:
|
if note:
|
||||||
# pylint: disable=unsubscriptable-object
|
|
||||||
notes[note["_id"]] = note
|
notes[note["_id"]] = note
|
||||||
note = {"_id": _id}
|
note = {"_id": _id}
|
||||||
curid = id
|
curid = id
|
||||||
|
@ -185,7 +183,6 @@ acq_reps+ret_reps, lapses, card_type_id from cards"""
|
||||||
state = dict(n=1)
|
state = dict(n=1)
|
||||||
|
|
||||||
def repl(match):
|
def repl(match):
|
||||||
# pylint: disable=cell-var-from-loop
|
|
||||||
# replace [...] with cloze refs
|
# replace [...] with cloze refs
|
||||||
res = "{{c%d::%s}}" % (state["n"], match.group(1))
|
res = "{{c%d::%s}}" % (state["n"], match.group(1))
|
||||||
state["n"] += 1
|
state["n"] += 1
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
@ -167,9 +166,9 @@ class NoteImporter(Importer):
|
||||||
firsts[fld0] = True
|
firsts[fld0] = True
|
||||||
# already exists?
|
# already exists?
|
||||||
found = False
|
found = False
|
||||||
if csum in csums:
|
if csum in csums: # type: ignore[comparison-overlap]
|
||||||
# csum is not a guarantee; have to check
|
# csum is not a guarantee; have to check
|
||||||
for id in csums[csum]:
|
for id in csums[csum]: # type: ignore[index]
|
||||||
flds = self.col.db.scalar("select flds from notes where id = ?", id)
|
flds = self.col.db.scalar("select flds from notes where id = ?", id)
|
||||||
sflds = split_fields(flds)
|
sflds = split_fields(flds)
|
||||||
if fld0 == sflds[0]:
|
if fld0 == sflds[0]:
|
||||||
|
|
|
@ -1,94 +0,0 @@
|
||||||
# Copyright: Andreas Klauer <Andreas.Klauer@metamorpher.de>
|
|
||||||
# License: BSD-3
|
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
import gzip
|
|
||||||
import html
|
|
||||||
import math
|
|
||||||
import random
|
|
||||||
import time
|
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
|
|
||||||
from anki.importing.noteimp import ForeignCard, ForeignNote, NoteImporter
|
|
||||||
from anki.stdmodels import _legacy_add_forward_reverse
|
|
||||||
|
|
||||||
ONE_DAY = 60 * 60 * 24
|
|
||||||
|
|
||||||
|
|
||||||
class PaukerImporter(NoteImporter):
|
|
||||||
"""Import Pauker 1.8 Lesson (*.pau.gz)"""
|
|
||||||
|
|
||||||
needMapper = False
|
|
||||||
allowHTML = True
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
model = _legacy_add_forward_reverse(self.col)
|
|
||||||
model["name"] = "Pauker"
|
|
||||||
self.col.models.save(model, updateReqs=False)
|
|
||||||
self.col.models.set_current(model)
|
|
||||||
self.model = model
|
|
||||||
self.initMapping()
|
|
||||||
NoteImporter.run(self)
|
|
||||||
|
|
||||||
def fields(self):
|
|
||||||
"""Pauker is Front/Back"""
|
|
||||||
return 2
|
|
||||||
|
|
||||||
def foreignNotes(self):
|
|
||||||
"""Build and return a list of notes."""
|
|
||||||
notes = []
|
|
||||||
|
|
||||||
try:
|
|
||||||
f = gzip.open(self.file)
|
|
||||||
tree = ET.parse(f) # type: ignore
|
|
||||||
lesson = tree.getroot()
|
|
||||||
assert lesson.tag == "Lesson"
|
|
||||||
finally:
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
index = -4
|
|
||||||
|
|
||||||
for batch in lesson.findall("./Batch"):
|
|
||||||
index += 1
|
|
||||||
|
|
||||||
for card in batch.findall("./Card"):
|
|
||||||
# Create a note for this card.
|
|
||||||
front = card.findtext("./FrontSide/Text")
|
|
||||||
back = card.findtext("./ReverseSide/Text")
|
|
||||||
note = ForeignNote()
|
|
||||||
assert front and back
|
|
||||||
note.fields = [
|
|
||||||
html.escape(x.strip())
|
|
||||||
.replace("\n", "<br>")
|
|
||||||
.replace(" ", " ")
|
|
||||||
for x in [front, back]
|
|
||||||
]
|
|
||||||
notes.append(note)
|
|
||||||
|
|
||||||
# Determine due date for cards.
|
|
||||||
frontdue = card.find("./FrontSide[@LearnedTimestamp]")
|
|
||||||
backdue = card.find("./ReverseSide[@Batch][@LearnedTimestamp]")
|
|
||||||
|
|
||||||
if frontdue is not None:
|
|
||||||
note.cards[0] = self._learnedCard(
|
|
||||||
index, int(frontdue.attrib["LearnedTimestamp"])
|
|
||||||
)
|
|
||||||
|
|
||||||
if backdue is not None:
|
|
||||||
note.cards[1] = self._learnedCard(
|
|
||||||
int(backdue.attrib["Batch"]),
|
|
||||||
int(backdue.attrib["LearnedTimestamp"]),
|
|
||||||
)
|
|
||||||
|
|
||||||
return notes
|
|
||||||
|
|
||||||
def _learnedCard(self, batch, timestamp):
|
|
||||||
ivl = math.exp(batch)
|
|
||||||
now = time.time()
|
|
||||||
due = ivl - (now - timestamp / 1000.0) / ONE_DAY
|
|
||||||
fc = ForeignCard()
|
|
||||||
fc.due = self.col.sched.today + int(due + 0.5)
|
|
||||||
fc.ivl = random.randint(int(ivl * 0.90), int(ivl + 0.5))
|
|
||||||
fc.factor = random.randint(1500, 2500)
|
|
||||||
return fc
|
|
|
@ -1,484 +0,0 @@
|
||||||
# Copyright: petr.michalec@gmail.com
|
|
||||||
# License: GNU GPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
# pytype: disable=attribute-error
|
|
||||||
# type: ignore
|
|
||||||
# pylint: disable=C
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import unicodedata
|
|
||||||
from string import capwords
|
|
||||||
from xml.dom import minidom
|
|
||||||
from xml.dom.minidom import Element, Text
|
|
||||||
|
|
||||||
from anki.collection import Collection
|
|
||||||
from anki.importing.noteimp import ForeignCard, ForeignNote, NoteImporter
|
|
||||||
from anki.stdmodels import _legacy_add_basic_model
|
|
||||||
|
|
||||||
|
|
||||||
class SmartDict(dict):
|
|
||||||
"""
|
|
||||||
See http://www.peterbe.com/plog/SmartDict
|
|
||||||
Copyright 2005, Peter Bengtsson, peter@fry-it.com
|
|
||||||
0BSD
|
|
||||||
|
|
||||||
A smart dict can be instantiated either from a pythonic dict
|
|
||||||
or an instance object (eg. SQL recordsets) but it ensures that you can
|
|
||||||
do all the convenient lookups such as x.first_name, x['first_name'] or
|
|
||||||
x.get('first_name').
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *a, **kw) -> None:
|
|
||||||
if a:
|
|
||||||
if isinstance(type(a[0]), dict):
|
|
||||||
kw.update(a[0])
|
|
||||||
elif isinstance(type(a[0]), object):
|
|
||||||
kw.update(a[0].__dict__)
|
|
||||||
elif hasattr(a[0], "__class__") and a[0].__class__.__name__ == "SmartDict":
|
|
||||||
kw.update(a[0].__dict__)
|
|
||||||
|
|
||||||
dict.__init__(self, **kw)
|
|
||||||
self.__dict__ = self
|
|
||||||
|
|
||||||
|
|
||||||
class SuperMemoElement(SmartDict):
|
|
||||||
"SmartDict wrapper to store SM Element data"
|
|
||||||
|
|
||||||
def __init__(self, *a, **kw) -> None:
|
|
||||||
SmartDict.__init__(self, *a, **kw)
|
|
||||||
# default content
|
|
||||||
self.__dict__["lTitle"] = None
|
|
||||||
self.__dict__["Title"] = None
|
|
||||||
self.__dict__["Question"] = None
|
|
||||||
self.__dict__["Answer"] = None
|
|
||||||
self.__dict__["Count"] = None
|
|
||||||
self.__dict__["Type"] = None
|
|
||||||
self.__dict__["ID"] = None
|
|
||||||
self.__dict__["Interval"] = None
|
|
||||||
self.__dict__["Lapses"] = None
|
|
||||||
self.__dict__["Repetitions"] = None
|
|
||||||
self.__dict__["LastRepetiton"] = None
|
|
||||||
self.__dict__["AFactor"] = None
|
|
||||||
self.__dict__["UFactor"] = None
|
|
||||||
|
|
||||||
|
|
||||||
# This is an AnkiImporter
|
|
||||||
class SupermemoXmlImporter(NoteImporter):
|
|
||||||
needMapper = False
|
|
||||||
allowHTML = True
|
|
||||||
|
|
||||||
"""
|
|
||||||
Supermemo XML export's to Anki parser.
|
|
||||||
Goes through a SM collection and fetch all elements.
|
|
||||||
|
|
||||||
My SM collection was a big mess where topics and items were mixed.
|
|
||||||
I was unable to parse my content in a regular way like for loop on
|
|
||||||
minidom.getElementsByTagName() etc. My collection had also an
|
|
||||||
limitation, topics were splited into branches with max 100 items
|
|
||||||
on each. Learning themes were in deep structure. I wanted to have
|
|
||||||
full title on each element to be stored in tags.
|
|
||||||
|
|
||||||
Code should be upgrade to support importing of SM2006 exports.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, col: Collection, file: str) -> None:
|
|
||||||
"""Initialize internal variables.
|
|
||||||
Pameters to be exposed to GUI are stored in self.META"""
|
|
||||||
NoteImporter.__init__(self, col, file)
|
|
||||||
m = _legacy_add_basic_model(self.col)
|
|
||||||
m["name"] = "Supermemo"
|
|
||||||
self.col.models.save(m)
|
|
||||||
self.initMapping()
|
|
||||||
|
|
||||||
self.lines = None
|
|
||||||
self.numFields = int(2)
|
|
||||||
|
|
||||||
# SmXmlParse VARIABLES
|
|
||||||
self.xmldoc = None
|
|
||||||
self.pieces = []
|
|
||||||
self.cntBuf = [] # to store last parsed data
|
|
||||||
self.cntElm = [] # to store SM Elements data
|
|
||||||
self.cntCol = [] # to store SM Colections data
|
|
||||||
|
|
||||||
# store some meta info related to parse algorithm
|
|
||||||
# SmartDict works like dict / class wrapper
|
|
||||||
self.cntMeta = SmartDict()
|
|
||||||
self.cntMeta.popTitles = False
|
|
||||||
self.cntMeta.title = []
|
|
||||||
|
|
||||||
# META stores controls of import script, should be
|
|
||||||
# exposed to import dialog. These are default values.
|
|
||||||
self.META = SmartDict()
|
|
||||||
self.META.resetLearningData = False # implemented
|
|
||||||
self.META.onlyMemorizedItems = False # implemented
|
|
||||||
self.META.loggerLevel = 2 # implemented 0no,1info,2error,3debug
|
|
||||||
self.META.tagAllTopics = True
|
|
||||||
self.META.pathsToBeTagged = [
|
|
||||||
"English for beginners",
|
|
||||||
"Advanced English 97",
|
|
||||||
"Phrasal Verbs",
|
|
||||||
] # path patterns to be tagged - in gui entered like 'Advanced English 97|My Vocablary'
|
|
||||||
self.META.tagMemorizedItems = True # implemented
|
|
||||||
self.META.logToStdOutput = False # implemented
|
|
||||||
|
|
||||||
self.notes = []
|
|
||||||
|
|
||||||
## TOOLS
|
|
||||||
|
|
||||||
def _fudgeText(self, text: str) -> str:
|
|
||||||
"Replace sm syntax to Anki syntax"
|
|
||||||
text = text.replace("\n\r", "<br>")
|
|
||||||
text = text.replace("\n", "<br>")
|
|
||||||
return text
|
|
||||||
|
|
||||||
def _unicode2ascii(self, str: str) -> str:
|
|
||||||
"Remove diacritic punctuation from strings (titles)"
|
|
||||||
return "".join(
|
|
||||||
[
|
|
||||||
c
|
|
||||||
for c in unicodedata.normalize("NFKD", str)
|
|
||||||
if not unicodedata.combining(c)
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
def _decode_htmlescapes(self, html: str) -> str:
|
|
||||||
"""Unescape HTML code."""
|
|
||||||
# In case of bad formatted html you can import MinimalSoup etc.. see BeautifulSoup source code
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
|
|
||||||
# my sm2004 also ecaped & char in escaped sequences.
|
|
||||||
html = re.sub("&", "&", html)
|
|
||||||
|
|
||||||
# https://anki.tenderapp.com/discussions/ankidesktop/39543-anki-is-replacing-the-character-by-when-i-exit-the-html-edit-mode-ctrlshiftx
|
|
||||||
if html.find(">") < 0:
|
|
||||||
return html
|
|
||||||
|
|
||||||
# unescaped solitary chars < or > that were ok for minidom confuse btfl soup
|
|
||||||
# html = re.sub(u'>',u'>',html)
|
|
||||||
# html = re.sub(u'<',u'<',html)
|
|
||||||
|
|
||||||
return str(BeautifulSoup(html, "html.parser"))
|
|
||||||
|
|
||||||
def _afactor2efactor(self, af: float) -> float:
|
|
||||||
# Adapted from <http://www.supermemo.com/beta/xml/xml-core.htm>
|
|
||||||
|
|
||||||
# Ranges for A-factors and E-factors
|
|
||||||
af_min = 1.2
|
|
||||||
af_max = 6.9
|
|
||||||
ef_min = 1.3
|
|
||||||
ef_max = 3.3
|
|
||||||
|
|
||||||
# Sanity checks for the A-factor
|
|
||||||
if af < af_min:
|
|
||||||
af = af_min
|
|
||||||
elif af > af_max:
|
|
||||||
af = af_max
|
|
||||||
|
|
||||||
# Scale af to the range 0..1
|
|
||||||
af_scaled = (af - af_min) / (af_max - af_min)
|
|
||||||
# Rescale to the interval ef_min..ef_max
|
|
||||||
ef = ef_min + af_scaled * (ef_max - ef_min)
|
|
||||||
|
|
||||||
return ef
|
|
||||||
|
|
||||||
## DEFAULT IMPORTER METHODS
|
|
||||||
|
|
||||||
def foreignNotes(self) -> list[ForeignNote]:
|
|
||||||
# Load file and parse it by minidom
|
|
||||||
self.loadSource(self.file)
|
|
||||||
|
|
||||||
# Migrating content / time consuming part
|
|
||||||
# addItemToCards is called for each sm element
|
|
||||||
self.logger("Parsing started.")
|
|
||||||
self.parse()
|
|
||||||
self.logger("Parsing done.")
|
|
||||||
|
|
||||||
# Return imported cards
|
|
||||||
self.total = len(self.notes)
|
|
||||||
self.log.append("%d cards imported." % self.total)
|
|
||||||
return self.notes
|
|
||||||
|
|
||||||
def fields(self) -> int:
|
|
||||||
return 2
|
|
||||||
|
|
||||||
## PARSER METHODS
|
|
||||||
|
|
||||||
def addItemToCards(self, item: SuperMemoElement) -> None:
|
|
||||||
"This method actually do conversion"
|
|
||||||
|
|
||||||
# new anki card
|
|
||||||
note = ForeignNote()
|
|
||||||
|
|
||||||
# clean Q and A
|
|
||||||
note.fields.append(self._fudgeText(self._decode_htmlescapes(item.Question)))
|
|
||||||
note.fields.append(self._fudgeText(self._decode_htmlescapes(item.Answer)))
|
|
||||||
note.tags = []
|
|
||||||
|
|
||||||
# pre-process scheduling data
|
|
||||||
# convert learning data
|
|
||||||
if (
|
|
||||||
not self.META.resetLearningData
|
|
||||||
and int(item.Interval) >= 1
|
|
||||||
and getattr(item, "LastRepetition", None)
|
|
||||||
):
|
|
||||||
# migration of LearningData algorithm
|
|
||||||
tLastrep = time.mktime(time.strptime(item.LastRepetition, "%d.%m.%Y"))
|
|
||||||
tToday = time.time()
|
|
||||||
card = ForeignCard()
|
|
||||||
card.ivl = int(item.Interval)
|
|
||||||
card.lapses = int(item.Lapses)
|
|
||||||
card.reps = int(item.Repetitions) + int(item.Lapses)
|
|
||||||
nextDue = tLastrep + (float(item.Interval) * 86400.0)
|
|
||||||
remDays = int((nextDue - time.time()) / 86400)
|
|
||||||
card.due = self.col.sched.today + remDays
|
|
||||||
card.factor = int(
|
|
||||||
self._afactor2efactor(float(item.AFactor.replace(",", "."))) * 1000
|
|
||||||
)
|
|
||||||
note.cards[0] = card
|
|
||||||
|
|
||||||
# categories & tags
|
|
||||||
# it's worth to have every theme (tree structure of sm collection) stored in tags, but sometimes not
|
|
||||||
# you can deceide if you are going to tag all toppics or just that containing some pattern
|
|
||||||
tTaggTitle = False
|
|
||||||
for pattern in self.META.pathsToBeTagged:
|
|
||||||
if (
|
|
||||||
item.lTitle is not None
|
|
||||||
and pattern.lower() in " ".join(item.lTitle).lower()
|
|
||||||
):
|
|
||||||
tTaggTitle = True
|
|
||||||
break
|
|
||||||
if tTaggTitle or self.META.tagAllTopics:
|
|
||||||
# normalize - remove diacritic punctuation from unicode chars to ascii
|
|
||||||
item.lTitle = [self._unicode2ascii(topic) for topic in item.lTitle]
|
|
||||||
|
|
||||||
# Transform xyz / aaa / bbb / ccc on Title path to Tag xyzAaaBbbCcc
|
|
||||||
# clean things like [999] or [111-2222] from title path, example: xyz / [1000-1200] zyx / xyz
|
|
||||||
# clean whitespaces
|
|
||||||
# set Capital letters for first char of the word
|
|
||||||
tmp = list(
|
|
||||||
{re.sub(r"(\[[0-9]+\])", " ", i).replace("_", " ") for i in item.lTitle}
|
|
||||||
)
|
|
||||||
tmp = list({re.sub(r"(\W)", " ", i) for i in tmp})
|
|
||||||
tmp = list({re.sub("^[0-9 ]+$", "", i) for i in tmp})
|
|
||||||
tmp = list({capwords(i).replace(" ", "") for i in tmp})
|
|
||||||
tags = [j[0].lower() + j[1:] for j in tmp if j.strip() != ""]
|
|
||||||
|
|
||||||
note.tags += tags
|
|
||||||
|
|
||||||
if self.META.tagMemorizedItems and int(item.Interval) > 0:
|
|
||||||
note.tags.append("Memorized")
|
|
||||||
|
|
||||||
self.logger("Element tags\t- " + repr(note.tags), level=3)
|
|
||||||
|
|
||||||
self.notes.append(note)
|
|
||||||
|
|
||||||
def logger(self, text: str, level: int = 1) -> None:
|
|
||||||
"Wrapper for Anki logger"
|
|
||||||
|
|
||||||
dLevels = {0: "", 1: "Info", 2: "Verbose", 3: "Debug"}
|
|
||||||
if level <= self.META.loggerLevel:
|
|
||||||
# self.deck.updateProgress(_(text))
|
|
||||||
|
|
||||||
if self.META.logToStdOutput:
|
|
||||||
print(
|
|
||||||
self.__class__.__name__
|
|
||||||
+ " - "
|
|
||||||
+ dLevels[level].ljust(9)
|
|
||||||
+ " -\t"
|
|
||||||
+ text
|
|
||||||
)
|
|
||||||
|
|
||||||
# OPEN AND LOAD
|
|
||||||
def openAnything(self, source):
|
|
||||||
"""Open any source / actually only opening of files is used
|
|
||||||
@return an open handle which must be closed after use, i.e., handle.close()"""
|
|
||||||
|
|
||||||
if source == "-":
|
|
||||||
return sys.stdin
|
|
||||||
|
|
||||||
# try to open with urllib (if source is http, ftp, or file URL)
|
|
||||||
import urllib.error
|
|
||||||
import urllib.parse
|
|
||||||
import urllib.request
|
|
||||||
|
|
||||||
try:
|
|
||||||
return urllib.request.urlopen(source)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# try to open with native open function (if source is pathname)
|
|
||||||
try:
|
|
||||||
return open(source, encoding="utf8")
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# treat source as string
|
|
||||||
import io
|
|
||||||
|
|
||||||
return io.StringIO(str(source))
|
|
||||||
|
|
||||||
def loadSource(self, source: str) -> None:
|
|
||||||
"""Load source file and parse with xml.dom.minidom"""
|
|
||||||
self.source = source
|
|
||||||
self.logger("Load started...")
|
|
||||||
sock = open(self.source, encoding="utf8")
|
|
||||||
self.xmldoc = minidom.parse(sock).documentElement
|
|
||||||
sock.close()
|
|
||||||
self.logger("Load done.")
|
|
||||||
|
|
||||||
# PARSE
|
|
||||||
def parse(self, node: Text | Element | None = None) -> None:
|
|
||||||
"Parse method - parses document elements"
|
|
||||||
|
|
||||||
if node is None and self.xmldoc is not None:
|
|
||||||
node = self.xmldoc
|
|
||||||
|
|
||||||
_method = "parse_%s" % node.__class__.__name__
|
|
||||||
if hasattr(self, _method):
|
|
||||||
parseMethod = getattr(self, _method)
|
|
||||||
parseMethod(node)
|
|
||||||
else:
|
|
||||||
self.logger("No handler for method %s" % _method, level=3)
|
|
||||||
|
|
||||||
def parse_Document(self, node):
|
|
||||||
"Parse XML document"
|
|
||||||
|
|
||||||
self.parse(node.documentElement)
|
|
||||||
|
|
||||||
def parse_Element(self, node: Element) -> None:
|
|
||||||
"Parse XML element"
|
|
||||||
|
|
||||||
_method = "do_%s" % node.tagName
|
|
||||||
if hasattr(self, _method):
|
|
||||||
handlerMethod = getattr(self, _method)
|
|
||||||
handlerMethod(node)
|
|
||||||
else:
|
|
||||||
self.logger("No handler for method %s" % _method, level=3)
|
|
||||||
# print traceback.print_exc()
|
|
||||||
|
|
||||||
def parse_Text(self, node: Text) -> None:
|
|
||||||
"Parse text inside elements. Text is stored into local buffer."
|
|
||||||
|
|
||||||
text = node.data
|
|
||||||
self.cntBuf.append(text)
|
|
||||||
|
|
||||||
# def parse_Comment(self, node):
|
|
||||||
# """
|
|
||||||
# Source can contain XML comments, but we ignore them
|
|
||||||
# """
|
|
||||||
# pass
|
|
||||||
|
|
||||||
# DO
|
|
||||||
def do_SuperMemoCollection(self, node: Element) -> None:
|
|
||||||
"Process SM Collection"
|
|
||||||
|
|
||||||
for child in node.childNodes:
|
|
||||||
self.parse(child)
|
|
||||||
|
|
||||||
def do_SuperMemoElement(self, node: Element) -> None:
|
|
||||||
"Process SM Element (Type - Title,Topics)"
|
|
||||||
|
|
||||||
self.logger("=" * 45, level=3)
|
|
||||||
|
|
||||||
self.cntElm.append(SuperMemoElement())
|
|
||||||
self.cntElm[-1]["lTitle"] = self.cntMeta["title"]
|
|
||||||
|
|
||||||
# parse all child elements
|
|
||||||
for child in node.childNodes:
|
|
||||||
self.parse(child)
|
|
||||||
|
|
||||||
# strip all saved strings, just for sure
|
|
||||||
for key in list(self.cntElm[-1].keys()):
|
|
||||||
if hasattr(self.cntElm[-1][key], "strip"):
|
|
||||||
self.cntElm[-1][key] = self.cntElm[-1][key].strip()
|
|
||||||
|
|
||||||
# pop current element
|
|
||||||
smel = self.cntElm.pop()
|
|
||||||
|
|
||||||
# Process cntElm if is valid Item (and not an Topic etc..)
|
|
||||||
# if smel.Lapses != None and smel.Interval != None and smel.Question != None and smel.Answer != None:
|
|
||||||
if smel.Title is None and smel.Question is not None and smel.Answer is not None:
|
|
||||||
if smel.Answer.strip() != "" and smel.Question.strip() != "":
|
|
||||||
# migrate only memorized otherway skip/continue
|
|
||||||
if self.META.onlyMemorizedItems and not (int(smel.Interval) > 0):
|
|
||||||
self.logger("Element skipped \t- not memorized ...", level=3)
|
|
||||||
else:
|
|
||||||
# import sm element data to Anki
|
|
||||||
self.addItemToCards(smel)
|
|
||||||
self.logger("Import element \t- " + smel["Question"], level=3)
|
|
||||||
|
|
||||||
# print element
|
|
||||||
self.logger("-" * 45, level=3)
|
|
||||||
for key in list(smel.keys()):
|
|
||||||
self.logger(
|
|
||||||
"\t{} {}".format((key + ":").ljust(15), smel[key]), level=3
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.logger("Element skipped \t- no valid Q and A ...", level=3)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# now we know that item was topic
|
|
||||||
# parsing of whole node is now finished
|
|
||||||
|
|
||||||
# test if it's really topic
|
|
||||||
if smel.Title is not None:
|
|
||||||
# remove topic from title list
|
|
||||||
t = self.cntMeta["title"].pop()
|
|
||||||
self.logger("End of topic \t- %s" % (t), level=2)
|
|
||||||
|
|
||||||
def do_Content(self, node: Element) -> None:
|
|
||||||
"Process SM element Content"
|
|
||||||
|
|
||||||
for child in node.childNodes:
|
|
||||||
if hasattr(child, "tagName") and child.firstChild is not None:
|
|
||||||
self.cntElm[-1][child.tagName] = child.firstChild.data
|
|
||||||
|
|
||||||
def do_LearningData(self, node: Element) -> None:
|
|
||||||
"Process SM element LearningData"
|
|
||||||
|
|
||||||
for child in node.childNodes:
|
|
||||||
if hasattr(child, "tagName") and child.firstChild is not None:
|
|
||||||
self.cntElm[-1][child.tagName] = child.firstChild.data
|
|
||||||
|
|
||||||
# It's being processed in do_Content now
|
|
||||||
# def do_Question(self, node):
|
|
||||||
# for child in node.childNodes: self.parse(child)
|
|
||||||
# self.cntElm[-1][node.tagName]=self.cntBuf.pop()
|
|
||||||
|
|
||||||
# It's being processed in do_Content now
|
|
||||||
# def do_Answer(self, node):
|
|
||||||
# for child in node.childNodes: self.parse(child)
|
|
||||||
# self.cntElm[-1][node.tagName]=self.cntBuf.pop()
|
|
||||||
|
|
||||||
def do_Title(self, node: Element) -> None:
|
|
||||||
"Process SM element Title"
|
|
||||||
|
|
||||||
t = self._decode_htmlescapes(node.firstChild.data)
|
|
||||||
self.cntElm[-1][node.tagName] = t
|
|
||||||
self.cntMeta["title"].append(t)
|
|
||||||
self.cntElm[-1]["lTitle"] = self.cntMeta["title"]
|
|
||||||
self.logger("Start of topic \t- " + " / ".join(self.cntMeta["title"]), level=2)
|
|
||||||
|
|
||||||
def do_Type(self, node: Element) -> None:
|
|
||||||
"Process SM element Type"
|
|
||||||
|
|
||||||
if len(self.cntBuf) >= 1:
|
|
||||||
self.cntElm[-1][node.tagName] = self.cntBuf.pop()
|
|
||||||
|
|
||||||
|
|
||||||
# if __name__ == '__main__':
|
|
||||||
|
|
||||||
# for testing you can start it standalone
|
|
||||||
|
|
||||||
# file = u'/home/epcim/hg2g/dev/python/sm2anki/ADVENG2EXP.xxe.esc.zaloha_FINAL.xml'
|
|
||||||
# file = u'/home/epcim/hg2g/dev/python/anki/libanki/tests/importing/supermemo/original_ENGLISHFORBEGGINERS_noOEM.xml'
|
|
||||||
# file = u'/home/epcim/hg2g/dev/python/anki/libanki/tests/importing/supermemo/original_ENGLISHFORBEGGINERS_oem_1250.xml'
|
|
||||||
# file = str(sys.argv[1])
|
|
||||||
# impo = SupermemoXmlImporter(Deck(),file)
|
|
||||||
# impo.foreignCards()
|
|
||||||
|
|
||||||
# sys.exit(1)
|
|
||||||
|
|
||||||
# vim: ts=4 sts=2 ft=python
|
|
|
@ -157,13 +157,13 @@ def lang_to_disk_lang(lang: str) -> str:
|
||||||
|
|
||||||
|
|
||||||
# the currently set interface language
|
# the currently set interface language
|
||||||
current_lang = "en" # pylint: disable=invalid-name
|
current_lang = "en"
|
||||||
|
|
||||||
# the current Fluent translation instance. Code in pylib/ should
|
# the current Fluent translation instance. Code in pylib/ should
|
||||||
# not reference this, and should use col.tr instead. The global
|
# not reference this, and should use col.tr instead. The global
|
||||||
# instance exists for legacy reasons, and as a convenience for the
|
# instance exists for legacy reasons, and as a convenience for the
|
||||||
# Qt code.
|
# Qt code.
|
||||||
current_i18n: anki._backend.RustBackend | None = None # pylint: disable=invalid-name
|
current_i18n: anki._backend.RustBackend | None = None
|
||||||
tr_legacyglobal = anki._backend.Translations(None)
|
tr_legacyglobal = anki._backend.Translations(None)
|
||||||
|
|
||||||
|
|
||||||
|
@ -178,7 +178,7 @@ def ngettext(single: str, plural: str, num: int) -> str:
|
||||||
|
|
||||||
|
|
||||||
def set_lang(lang: str) -> None:
|
def set_lang(lang: str) -> None:
|
||||||
global current_lang, current_i18n # pylint: disable=invalid-name
|
global current_lang, current_i18n
|
||||||
current_lang = lang
|
current_lang = lang
|
||||||
current_i18n = anki._backend.RustBackend(langs=[lang])
|
current_i18n = anki._backend.RustBackend(langs=[lang])
|
||||||
tr_legacyglobal.backend = weakref.ref(current_i18n)
|
tr_legacyglobal.backend = weakref.ref(current_i18n)
|
||||||
|
|
|
@ -10,7 +10,7 @@ import time
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
from typing import Any, NewType, Union
|
from typing import Any, NewType, Union
|
||||||
|
|
||||||
import anki # pylint: disable=unused-import
|
import anki
|
||||||
import anki.collection
|
import anki.collection
|
||||||
import anki.notes
|
import anki.notes
|
||||||
from anki import notetypes_pb2
|
from anki import notetypes_pb2
|
||||||
|
@ -419,7 +419,7 @@ and notes.mid = ? and cards.ord = ?""",
|
||||||
|
|
||||||
# legacy API - used by unit tests and add-ons
|
# legacy API - used by unit tests and add-ons
|
||||||
|
|
||||||
def change( # pylint: disable=invalid-name
|
def change(
|
||||||
self,
|
self,
|
||||||
notetype: NotetypeDict,
|
notetype: NotetypeDict,
|
||||||
nids: list[anki.notes.NoteId],
|
nids: list[anki.notes.NoteId],
|
||||||
|
@ -478,8 +478,6 @@ and notes.mid = ? and cards.ord = ?""",
|
||||||
# Legacy
|
# Legacy
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
@deprecated(info="use note.cloze_numbers_in_fields()")
|
@deprecated(info="use note.cloze_numbers_in_fields()")
|
||||||
def _availClozeOrds(
|
def _availClozeOrds(
|
||||||
self, notetype: NotetypeDict, flds: str, allow_empty: bool = True
|
self, notetype: NotetypeDict, flds: str, allow_empty: bool = True
|
||||||
|
|
|
@ -7,7 +7,7 @@ import copy
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
from typing import NewType
|
from typing import NewType
|
||||||
|
|
||||||
import anki # pylint: disable=unused-import
|
import anki
|
||||||
import anki.cards
|
import anki.cards
|
||||||
import anki.collection
|
import anki.collection
|
||||||
import anki.decks
|
import anki.decks
|
||||||
|
|
|
@ -4,10 +4,8 @@
|
||||||
# The backend code has moved into _backend; this file exists only to avoid breaking
|
# The backend code has moved into _backend; this file exists only to avoid breaking
|
||||||
# some add-ons. They should be updated to point to the correct location in the
|
# some add-ons. They should be updated to point to the correct location in the
|
||||||
# future.
|
# future.
|
||||||
#
|
|
||||||
# pylint: disable=unused-import
|
|
||||||
# pylint: enable=invalid-name
|
|
||||||
|
|
||||||
|
# ruff: noqa: F401
|
||||||
from anki.decks import DeckTreeNode
|
from anki.decks import DeckTreeNode
|
||||||
from anki.errors import InvalidInput, NotFoundError
|
from anki.errors import InvalidInput, NotFoundError
|
||||||
from anki.lang import TR
|
from anki.lang import TR
|
||||||
|
|
|
@ -42,6 +42,7 @@ from anki.utils import ids2str, int_time
|
||||||
|
|
||||||
class SchedulerBase(DeprecatedNamesMixin):
|
class SchedulerBase(DeprecatedNamesMixin):
|
||||||
"Actions shared between schedulers."
|
"Actions shared between schedulers."
|
||||||
|
|
||||||
version = 0
|
version = 0
|
||||||
|
|
||||||
def __init__(self, col: anki.collection.Collection) -> None:
|
def __init__(self, col: anki.collection.Collection) -> None:
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
The V3/2021 scheduler.
|
The V3/2021 scheduler.
|
||||||
|
@ -184,7 +183,7 @@ class Scheduler(SchedulerBaseWithLegacy):
|
||||||
return self._interval_for_filtered_state(state.filtered)
|
return self._interval_for_filtered_state(state.filtered)
|
||||||
else:
|
else:
|
||||||
assert_exhaustive(kind)
|
assert_exhaustive(kind)
|
||||||
return 0 # pylint: disable=unreachable
|
return 0
|
||||||
|
|
||||||
def _interval_for_normal_state(
|
def _interval_for_normal_state(
|
||||||
self, normal: scheduler_pb2.SchedulingState.Normal
|
self, normal: scheduler_pb2.SchedulingState.Normal
|
||||||
|
@ -200,7 +199,7 @@ class Scheduler(SchedulerBaseWithLegacy):
|
||||||
return normal.relearning.learning.scheduled_secs
|
return normal.relearning.learning.scheduled_secs
|
||||||
else:
|
else:
|
||||||
assert_exhaustive(kind)
|
assert_exhaustive(kind)
|
||||||
return 0 # pylint: disable=unreachable
|
return 0
|
||||||
|
|
||||||
def _interval_for_filtered_state(
|
def _interval_for_filtered_state(
|
||||||
self, filtered: scheduler_pb2.SchedulingState.Filtered
|
self, filtered: scheduler_pb2.SchedulingState.Filtered
|
||||||
|
@ -212,7 +211,7 @@ class Scheduler(SchedulerBaseWithLegacy):
|
||||||
return self._interval_for_normal_state(filtered.rescheduling.original_state)
|
return self._interval_for_normal_state(filtered.rescheduling.original_state)
|
||||||
else:
|
else:
|
||||||
assert_exhaustive(kind)
|
assert_exhaustive(kind)
|
||||||
return 0 # pylint: disable=unreachable
|
return 0
|
||||||
|
|
||||||
def nextIvl(self, card: Card, ease: int) -> Any:
|
def nextIvl(self, card: Card, ease: int) -> Any:
|
||||||
"Don't use this - it is only required by tests, and will be moved in the future."
|
"Don't use this - it is only required by tests, and will be moved in the future."
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
# pylint: disable=C
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
@ -27,7 +26,7 @@ def _legacy_card_stats(
|
||||||
col: anki.collection.Collection, card_id: anki.cards.CardId, include_revlog: bool
|
col: anki.collection.Collection, card_id: anki.cards.CardId, include_revlog: bool
|
||||||
) -> str:
|
) -> str:
|
||||||
"A quick hack to preserve compatibility with the old HTML string API."
|
"A quick hack to preserve compatibility with the old HTML string API."
|
||||||
random_id = f"cardinfo-{base62(random.randint(0, 2 ** 64 - 1))}"
|
random_id = f"cardinfo-{base62(random.randint(0, 2**64 - 1))}"
|
||||||
varName = random_id.replace("-", "")
|
varName = random_id.replace("-", "")
|
||||||
return f"""
|
return f"""
|
||||||
<div id="{random_id}"></div>
|
<div id="{random_id}"></div>
|
||||||
|
@ -174,7 +173,7 @@ from revlog where type != {REVLOG_RESCHED} and id > ? """
|
||||||
cards=cards, seconds=float(thetime)
|
cards=cards, seconds=float(thetime)
|
||||||
)
|
)
|
||||||
# again/pass count
|
# again/pass count
|
||||||
b += "<br>" + "Again count: %s" % bold(failed)
|
b += "<br>" + "Again count: %s" % bold(str(failed))
|
||||||
if cards:
|
if cards:
|
||||||
b += " " + "(%s correct)" % bold(
|
b += " " + "(%s correct)" % bold(
|
||||||
"%0.1f%%" % ((1 - failed / float(cards)) * 100)
|
"%0.1f%%" % ((1 - failed / float(cards)) * 100)
|
||||||
|
@ -182,7 +181,10 @@ from revlog where type != {REVLOG_RESCHED} and id > ? """
|
||||||
# type breakdown
|
# type breakdown
|
||||||
b += "<br>"
|
b += "<br>"
|
||||||
b += "Learn: %(a)s, Review: %(b)s, Relearn: %(c)s, Filtered: %(d)s" % dict(
|
b += "Learn: %(a)s, Review: %(b)s, Relearn: %(c)s, Filtered: %(d)s" % dict(
|
||||||
a=bold(lrn), b=bold(rev), c=bold(relrn), d=bold(filt)
|
a=bold(str(lrn)),
|
||||||
|
b=bold(str(rev)),
|
||||||
|
c=bold(str(relrn)),
|
||||||
|
d=bold(str(filt)),
|
||||||
)
|
)
|
||||||
# mature today
|
# mature today
|
||||||
mcnt, msum = self.col.db.first(
|
mcnt, msum = self.col.db.first(
|
||||||
|
@ -321,7 +323,6 @@ group by day order by day"""
|
||||||
yaxes=[dict(min=0), dict(position="right", min=0)],
|
yaxes=[dict(min=0), dict(position="right", min=0)],
|
||||||
)
|
)
|
||||||
if days is not None:
|
if days is not None:
|
||||||
# pylint: disable=invalid-unary-operand-type
|
|
||||||
conf["xaxis"]["min"] = -days + 0.5
|
conf["xaxis"]["min"] = -days + 0.5
|
||||||
|
|
||||||
def plot(id: str, data: Any, ylabel: str, ylabel2: str) -> str:
|
def plot(id: str, data: Any, ylabel: str, ylabel2: str) -> str:
|
||||||
|
@ -356,7 +357,6 @@ group by day order by day"""
|
||||||
yaxes=[dict(min=0), dict(position="right", min=0)],
|
yaxes=[dict(min=0), dict(position="right", min=0)],
|
||||||
)
|
)
|
||||||
if days is not None:
|
if days is not None:
|
||||||
# pylint: disable=invalid-unary-operand-type
|
|
||||||
conf["xaxis"]["min"] = -days + 0.5
|
conf["xaxis"]["min"] = -days + 0.5
|
||||||
|
|
||||||
def plot(id: str, data: Any, ylabel: str, ylabel2: str) -> str:
|
def plot(id: str, data: Any, ylabel: str, ylabel2: str) -> str:
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
# from subtlepatterns.com; CC BY 4.0.
|
# from subtlepatterns.com; CC BY 4.0.
|
||||||
# by Daniel Beaton
|
# by Daniel Beaton
|
||||||
# https://www.toptal.com/designers/subtlepatterns/fancy-deboss/
|
# https://www.toptal.com/designers/subtlepatterns/fancy-deboss/
|
||||||
|
|
|
@ -12,7 +12,6 @@ from anki import notetypes_pb2
|
||||||
from anki._legacy import DeprecatedNamesMixinForModule
|
from anki._legacy import DeprecatedNamesMixinForModule
|
||||||
from anki.utils import from_json_bytes
|
from anki.utils import from_json_bytes
|
||||||
|
|
||||||
# pylint: disable=no-member
|
|
||||||
StockNotetypeKind = notetypes_pb2.StockNotetype.Kind
|
StockNotetypeKind = notetypes_pb2.StockNotetype.Kind
|
||||||
|
|
||||||
# add-on authors can add ("note type name", function)
|
# add-on authors can add ("note type name", function)
|
||||||
|
|
|
@ -16,7 +16,7 @@ import re
|
||||||
from collections.abc import Collection, Sequence
|
from collections.abc import Collection, Sequence
|
||||||
from typing import Match
|
from typing import Match
|
||||||
|
|
||||||
import anki # pylint: disable=unused-import
|
import anki
|
||||||
import anki.collection
|
import anki.collection
|
||||||
from anki import tags_pb2
|
from anki import tags_pb2
|
||||||
from anki._legacy import DeprecatedNamesMixin, deprecated
|
from anki._legacy import DeprecatedNamesMixin, deprecated
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue