mirror of
https://github.com/ankitects/anki.git
synced 2025-09-24 08:46:37 -04:00
Merge branch 'main' into editor-3830
This commit is contained in:
commit
fa0c4b11ab
315 changed files with 6869 additions and 6766 deletions
|
@ -5,7 +5,8 @@ DESCRIPTORS_BIN = { value = "out/rslib/proto/descriptors.bin", relative = true }
|
|||
# build script will append .exe if necessary
|
||||
PROTOC = { value = "out/extracted/protoc/bin/protoc", relative = true }
|
||||
PYO3_NO_PYTHON = "1"
|
||||
MACOSX_DEPLOYMENT_TARGET = "10.13.4"
|
||||
MACOSX_DEPLOYMENT_TARGET = "11"
|
||||
PYTHONDONTWRITEBYTECODE = "1" # prevent junk files on Windows
|
||||
|
||||
[term]
|
||||
color = "always"
|
||||
|
|
|
@ -5,9 +5,6 @@
|
|||
db-path = "~/.cargo/advisory-db"
|
||||
db-urls = ["https://github.com/rustsec/advisory-db"]
|
||||
ignore = [
|
||||
# pyoxidizer is stuck on an old ring version
|
||||
"RUSTSEC-2025-0009",
|
||||
"RUSTSEC-2025-0010",
|
||||
# burn depends on an unmaintained package 'paste'
|
||||
"RUSTSEC-2024-0436",
|
||||
]
|
||||
|
@ -17,12 +14,11 @@ allow = [
|
|||
"MIT",
|
||||
"Apache-2.0",
|
||||
"Apache-2.0 WITH LLVM-exception",
|
||||
"CDLA-Permissive-2.0",
|
||||
"ISC",
|
||||
"MPL-2.0",
|
||||
"Unicode-DFS-2016",
|
||||
"BSD-2-Clause",
|
||||
"BSD-3-Clause",
|
||||
"OpenSSL",
|
||||
"CC0-1.0",
|
||||
"Unlicense",
|
||||
"Zlib",
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"ftl/usage",
|
||||
"licenses.json",
|
||||
".dmypy.json",
|
||||
"qt/bundle/PyOxidizer",
|
||||
"target",
|
||||
".mypy_cache",
|
||||
"extra",
|
||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -18,3 +18,5 @@ node_modules
|
|||
yarn-error.log
|
||||
ts/.svelte-kit
|
||||
.yarn
|
||||
.claude/settings.local.json
|
||||
CLAUDE.local.md
|
||||
|
|
6
.gitmodules
vendored
6
.gitmodules
vendored
|
@ -6,9 +6,3 @@
|
|||
path = ftl/qt-repo
|
||||
url = https://github.com/ankitects/anki-desktop-ftl.git
|
||||
shallow = true
|
||||
[submodule "qt/bundle/PyOxidizer"]
|
||||
path = qt/bundle/PyOxidizer
|
||||
url = https://github.com/ankitects/PyOxidizer.git
|
||||
shallow = true
|
||||
update = none
|
||||
|
||||
|
|
|
@ -2,4 +2,3 @@
|
|||
py_version=39
|
||||
known_first_party=anki,aqt,tests
|
||||
profile=black
|
||||
extend_skip=qt/bundle
|
||||
|
|
|
@ -18,7 +18,7 @@ mypy_path =
|
|||
ftl,
|
||||
pylib/tools,
|
||||
python
|
||||
exclude = (qt/bundle/PyOxidizer|pylib/anki/_vendor)
|
||||
exclude = (pylib/anki/_vendor)
|
||||
|
||||
[mypy-anki.*]
|
||||
disallow_untyped_defs = True
|
||||
|
@ -165,3 +165,5 @@ ignore_missing_imports = True
|
|||
ignore_missing_imports = True
|
||||
[mypy-pip_system_certs.*]
|
||||
ignore_missing_imports = True
|
||||
[mypy-anki_audio]
|
||||
ignore_missing_imports = True
|
||||
|
|
1
.python-version
Normal file
1
.python-version
Normal file
|
@ -0,0 +1 @@
|
|||
3.13.5
|
|
@ -1,2 +1,2 @@
|
|||
target-version = "py39"
|
||||
extend-exclude = ["qt/bundle"]
|
||||
extend-exclude = []
|
||||
|
|
2
.version
2
.version
|
@ -1 +1 @@
|
|||
25.05
|
||||
25.06b4
|
||||
|
|
|
@ -31,11 +31,13 @@
|
|||
"rust-analyzer.rustfmt.extraArgs": ["+nightly"],
|
||||
"search.exclude": {
|
||||
"**/node_modules": true,
|
||||
".bazel/**": true,
|
||||
"qt/bundle/PyOxidizer": true
|
||||
".bazel/**": true
|
||||
},
|
||||
"rust-analyzer.cargo.buildScripts.enable": true,
|
||||
"python.analysis.typeCheckingMode": "off",
|
||||
"python.analysis.exclude": [
|
||||
"out/launcher/**"
|
||||
],
|
||||
"terminal.integrated.env.windows": {
|
||||
"PATH": "c:\\msys64\\usr\\bin;${env:Path}"
|
||||
}
|
||||
|
|
82
CLAUDE.md
Normal file
82
CLAUDE.md
Normal file
|
@ -0,0 +1,82 @@
|
|||
# Claude Code Configuration
|
||||
|
||||
## Project Overview
|
||||
|
||||
Anki is a spaced repetition flashcard program with a multi-layered architecture. Main components:
|
||||
|
||||
- Web frontend: Svelte/TypeScript in ts/
|
||||
- PyQt GUI, which embeds the web components in aqt/
|
||||
- Python library which wraps our rust Layer (pylib/, with Rust module in pylib/rsbridge)
|
||||
- Core Rust layer in rslib/
|
||||
- Protobuf definitions in proto/ that are used by the different layers to
|
||||
talk to each other.
|
||||
|
||||
## Building/checking
|
||||
|
||||
./check (check.bat) will format the code and run the main build & checks.
|
||||
Please do this as a final step before marking a task as completed.
|
||||
|
||||
## Quick iteration
|
||||
|
||||
During development, you can build/check subsections of our code:
|
||||
|
||||
- Rust: 'cargo check'
|
||||
- Python: './tools/dmypy'
|
||||
- TypeScript/Svelte: './ninja check:svelte'
|
||||
|
||||
Be mindful that some changes (such as modifications to .proto files) may
|
||||
need a full build with './check' first.
|
||||
|
||||
## Build tooling
|
||||
|
||||
'./check' and './ninja' invoke our build system, which is implemented in build/. It takes care of downloading required deps and invoking our build
|
||||
steps.
|
||||
|
||||
## Translations
|
||||
|
||||
ftl/ contains our Fluent translation files. We have scripts in rslib/i18n
|
||||
to auto-generate an API for Rust, TypeScript and Python so that our code can
|
||||
access the translations in a type-safe manner. Changes should be made to
|
||||
ftl/core or ftl/qt. Except for features specific to our Qt interface, prefer
|
||||
the core module. When adding new strings, confirm the appropriate ftl file
|
||||
first, and try to match the existing style.
|
||||
|
||||
## Protobuf and IPC
|
||||
|
||||
Our build scripts use the .proto files to define our Rust library's
|
||||
non-Rust API. pylib/rsbridge exposes that API, and _backend.py exposes
|
||||
snake_case methods for each protobuf RPC that call into the API.
|
||||
Similar tooling creates a @generated/backend TypeScript module for
|
||||
communicating with the Rust backend (which happens over POST requests).
|
||||
|
||||
## Fixing errors
|
||||
|
||||
When dealing with build errors or failing tests, invoke 'check' or one
|
||||
of the quick iteration commands regularly. This helps verify your changes
|
||||
are correct. To locate other instances of a problem, run the check again -
|
||||
don't attempt to grep the codebase.
|
||||
|
||||
## Ignores
|
||||
|
||||
The files in out/ are auto-generated. Mostly you should ignore that folder,
|
||||
though you may sometimes find it useful to view out/{pylib/anki,qt/_aqt,ts/lib/generated} when dealing with cross-language communication or our other generated sourcecode.
|
||||
|
||||
## Launcher/installer
|
||||
|
||||
The code for our launcher is in qt/launcher, with separate code for each
|
||||
platform.
|
||||
|
||||
## Rust dependencies
|
||||
|
||||
Prefer adding to the root workspace, and using dep.workspace = true in the individual Rust project.
|
||||
|
||||
## Rust utilities
|
||||
|
||||
rslib/{process,io} contain some helpers for file and process operations,
|
||||
which provide better error messages/context and some ergonomics. Use them
|
||||
when possible.
|
||||
|
||||
## Rust error handling
|
||||
|
||||
in rslib, use error/mod.rs's AnkiError/Result and snafu. In our other Rust modules, prefer anyhow + additional context where appropriate. Unwrapping
|
||||
in build scripts/tests is fine.
|
|
@ -148,7 +148,7 @@ user1823 <92206575+user1823@users.noreply.github.com>
|
|||
Gustaf Carefall <https://github.com/Gustaf-C>
|
||||
virinci <github.com/virinci>
|
||||
snowtimeglass <snowtimeglass@gmail.com>
|
||||
brishtibheja <sorata225yume@gmail.com>
|
||||
brishtibheja <136738526+brishtibheja@users.noreply.github.com>
|
||||
Ben Olson <github.com/grepgrok>
|
||||
Akash Reddy <akashreddy2003@gmail.com>
|
||||
Lucio Sauer <watermanpaint@posteo.net>
|
||||
|
@ -230,6 +230,7 @@ KolbyML <https://github.com/KolbyML>
|
|||
Adnane Taghi <dev@soleuniverse.me>
|
||||
Spiritual Father <https://github.com/spiritualfather>
|
||||
Emmanuel Ferdman <https://github.com/emmanuel-ferdman>
|
||||
Sunong2008 <https://github.com/Sunrongguo2008>
|
||||
Marvin Kopf <marvinkopf@outlook.com>
|
||||
********************
|
||||
|
||||
|
|
2027
Cargo.lock
generated
2027
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
137
Cargo.toml
137
Cargo.toml
|
@ -12,8 +12,7 @@ members = [
|
|||
"build/runner",
|
||||
"ftl",
|
||||
"pylib/rsbridge",
|
||||
"qt/bundle/mac",
|
||||
"qt/bundle/win",
|
||||
"qt/launcher",
|
||||
"rslib",
|
||||
"rslib/i18n",
|
||||
"rslib/io",
|
||||
|
@ -23,7 +22,6 @@ members = [
|
|||
"rslib/sync",
|
||||
"tools/minilints",
|
||||
]
|
||||
exclude = ["qt/bundle"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.dependencies.percent-encoding-iri]
|
||||
|
@ -35,9 +33,9 @@ git = "https://github.com/ankitects/linkcheck.git"
|
|||
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
|
||||
|
||||
[workspace.dependencies.fsrs]
|
||||
# version = "3.0.0"
|
||||
git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
|
||||
rev = "33ec3ee4d5d73e704633469cf5bf1a42e620a524"
|
||||
version = "4.1.1"
|
||||
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
|
||||
# rev = "a7f7efc10f0a26b14ee348cc7402155685f2a24f"
|
||||
# path = "../open-spaced-repetition/fsrs-rs"
|
||||
|
||||
[workspace.dependencies]
|
||||
|
@ -54,99 +52,98 @@ ninja_gen = { "path" = "build/ninja_gen" }
|
|||
unicase = "=2.6.0" # any changes could invalidate sqlite indexes
|
||||
|
||||
# normal
|
||||
ammonia = "4.0.0"
|
||||
anyhow = "1.0.90"
|
||||
apple-bundles = "0.17.0"
|
||||
async-compression = { version = "0.4.17", features = ["zstd", "tokio"] }
|
||||
ammonia = "4.1.0"
|
||||
anyhow = "1.0.98"
|
||||
async-compression = { version = "0.4.24", features = ["zstd", "tokio"] }
|
||||
async-stream = "0.3.6"
|
||||
async-trait = "0.1.83"
|
||||
axum = { version = "0.7", features = ["multipart", "macros"] }
|
||||
axum-client-ip = "0.6"
|
||||
axum-extra = { version = "0.9.4", features = ["typed-header"] }
|
||||
blake3 = "1.5.4"
|
||||
bytes = "1.7.2"
|
||||
camino = "1.1.9"
|
||||
chrono = { version = "0.4.38", default-features = false, features = ["std", "clock"] }
|
||||
clap = { version = "4.5.20", features = ["derive"] }
|
||||
coarsetime = "0.1.34"
|
||||
convert_case = "0.6.0"
|
||||
criterion = { version = "0.5.1" }
|
||||
csv = "1.3.0"
|
||||
data-encoding = "2.6.0"
|
||||
async-trait = "0.1.88"
|
||||
axum = { version = "0.8.4", features = ["multipart", "macros"] }
|
||||
axum-client-ip = "1.1.3"
|
||||
axum-extra = { version = "0.10.1", features = ["typed-header"] }
|
||||
blake3 = "1.8.2"
|
||||
bytes = "1.10.1"
|
||||
camino = "1.1.10"
|
||||
chrono = { version = "0.4.41", default-features = false, features = ["std", "clock"] }
|
||||
clap = { version = "4.5.40", features = ["derive"] }
|
||||
coarsetime = "0.1.36"
|
||||
convert_case = "0.8.0"
|
||||
criterion = { version = "0.6.0" }
|
||||
csv = "1.3.1"
|
||||
data-encoding = "2.9.0"
|
||||
difflib = "0.4.0"
|
||||
dirs = "5.0.1"
|
||||
dirs = "6.0.0"
|
||||
dunce = "1.0.5"
|
||||
embed-resource = "3.0.4"
|
||||
envy = "0.4.2"
|
||||
flate2 = "1.0.34"
|
||||
fluent = "0.16.1"
|
||||
fluent-bundle = "0.15.3"
|
||||
fluent-syntax = "0.11.1"
|
||||
flate2 = "1.1.2"
|
||||
fluent = "0.17.0"
|
||||
fluent-bundle = "0.16.0"
|
||||
fluent-syntax = "0.12.0"
|
||||
fnv = "1.0.7"
|
||||
futures = "0.3.31"
|
||||
glob = "0.3.1"
|
||||
globset = "0.4.15"
|
||||
globset = "0.4.16"
|
||||
hex = "0.4.3"
|
||||
htmlescape = "0.3.1"
|
||||
hyper = "1"
|
||||
id_tree = "1.8.0"
|
||||
inflections = "1.1.1"
|
||||
intl-memoizer = "0.5.2"
|
||||
itertools = "0.13.0"
|
||||
intl-memoizer = "0.5.3"
|
||||
itertools = "0.14.0"
|
||||
junction = "1.2.0"
|
||||
lazy_static = "1.5.0"
|
||||
libc = "0.2"
|
||||
libc-stdhandle = "0.1"
|
||||
maplit = "1.0.2"
|
||||
nom = "7.1.3"
|
||||
num-format = "0.4.4"
|
||||
num_cpus = "1.16.0"
|
||||
num_cpus = "1.17.0"
|
||||
num_enum = "0.7.3"
|
||||
once_cell = "1.20.2"
|
||||
once_cell = "1.21.3"
|
||||
pbkdf2 = { version = "0.12", features = ["simple"] }
|
||||
phf = { version = "0.11.2", features = ["macros"] }
|
||||
pin-project = "1.1.6"
|
||||
plist = "1.7.0"
|
||||
prettyplease = "0.2.24"
|
||||
phf = { version = "0.11.3", features = ["macros"] }
|
||||
pin-project = "1.1.10"
|
||||
prettyplease = "0.2.34"
|
||||
prost = "0.13"
|
||||
prost-build = "0.13"
|
||||
prost-reflect = "0.14"
|
||||
prost-reflect = "0.14.7"
|
||||
prost-types = "0.13"
|
||||
pulldown-cmark = "0.9.6"
|
||||
pyo3 = { version = "0.24", features = ["extension-module", "abi3", "abi3-py39"] }
|
||||
rand = "0.8.5"
|
||||
regex = "1.11.0"
|
||||
reqwest = { version = "0.12.8", default-features = false, features = ["json", "socks", "stream", "multipart"] }
|
||||
rusqlite = { version = "0.30.0", features = ["trace", "functions", "collation", "bundled"] }
|
||||
pulldown-cmark = "0.13.0"
|
||||
pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] }
|
||||
rand = "0.9.1"
|
||||
regex = "1.11.1"
|
||||
reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] }
|
||||
rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] }
|
||||
rustls-pemfile = "2.2.0"
|
||||
scopeguard = "1.2.0"
|
||||
serde = { version = "1.0.210", features = ["derive"] }
|
||||
serde-aux = "4.5.0"
|
||||
serde_json = "1.0.132"
|
||||
serde_repr = "0.1.19"
|
||||
serde_tuple = "0.5.0"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde-aux = "4.7.0"
|
||||
serde_json = "1.0.140"
|
||||
serde_repr = "0.1.20"
|
||||
serde_tuple = "1.1.0"
|
||||
sha1 = "0.10.6"
|
||||
sha2 = { version = "0.10.8" }
|
||||
simple-file-manifest = "0.11.0"
|
||||
snafu = { version = "0.8.5", features = ["rust_1_61"] }
|
||||
strum = { version = "0.26.3", features = ["derive"] }
|
||||
syn = { version = "2.0.82", features = ["parsing", "printing"] }
|
||||
tar = "0.4.42"
|
||||
tempfile = "3.13.0"
|
||||
sha2 = { version = "0.10.9" }
|
||||
snafu = { version = "0.8.6", features = ["rust_1_61"] }
|
||||
strum = { version = "0.27.1", features = ["derive"] }
|
||||
syn = { version = "2.0.103", features = ["parsing", "printing"] }
|
||||
tar = "0.4.44"
|
||||
tempfile = "3.20.0"
|
||||
termcolor = "1.4.1"
|
||||
tokio = { version = "1.40", features = ["fs", "rt-multi-thread", "macros", "signal"] }
|
||||
tokio-util = { version = "0.7.12", features = ["io"] }
|
||||
tower-http = { version = "0.5", features = ["trace"] }
|
||||
tracing = { version = "0.1.40", features = ["max_level_trace", "release_max_level_debug"] }
|
||||
tokio = { version = "1.45", features = ["fs", "rt-multi-thread", "macros", "signal"] }
|
||||
tokio-util = { version = "0.7.15", features = ["io"] }
|
||||
tower-http = { version = "0.6.6", features = ["trace"] }
|
||||
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] }
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = { version = "0.3.18", features = ["fmt", "env-filter"] }
|
||||
tugger-windows-codesign = "0.10.0"
|
||||
unic-langid = { version = "0.9.5", features = ["macros"] }
|
||||
tracing-subscriber = { version = "0.3.19", features = ["fmt", "env-filter"] }
|
||||
unic-langid = { version = "0.9.6", features = ["macros"] }
|
||||
unic-ucd-category = "0.9.0"
|
||||
unicode-normalization = "0.1.24"
|
||||
walkdir = "2.5.0"
|
||||
which = "5.0.0"
|
||||
wiremock = "0.6.2"
|
||||
which = "8.0.0"
|
||||
winapi = { version = "0.3", features = ["wincon"] }
|
||||
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams"] }
|
||||
wiremock = "0.6.3"
|
||||
xz2 = "0.1.7"
|
||||
zip = { version = "0.6.6", default-features = false, features = ["deflate", "time"] }
|
||||
zstd = { version = "0.13.2", features = ["zstdmt"] }
|
||||
zip = { version = "4.1.0", default-features = false, features = ["deflate", "time"] }
|
||||
zstd = { version = "0.13.3", features = ["zstdmt"] }
|
||||
|
||||
# Apply mild optimizations to our dependencies in dev mode, which among other things
|
||||
# improves sha2 performance by about 21x. Opt 1 chosen due to
|
||||
|
|
|
@ -27,7 +27,6 @@ pub fn build_and_check_aqt(build: &mut Build) -> Result<()> {
|
|||
build_forms(build)?;
|
||||
build_generated_sources(build)?;
|
||||
build_data_folder(build)?;
|
||||
build_macos_helper(build)?;
|
||||
build_wheel(build)?;
|
||||
check_python(build)?;
|
||||
Ok(())
|
||||
|
@ -39,7 +38,6 @@ fn build_forms(build: &mut Build) -> Result<()> {
|
|||
let mut py_files = vec![];
|
||||
for path in ui_files.resolve() {
|
||||
let outpath = outdir.join(path.file_name().unwrap()).into_string();
|
||||
py_files.push(outpath.replace(".ui", "_qt5.py"));
|
||||
py_files.push(outpath.replace(".ui", "_qt6.py"));
|
||||
}
|
||||
build.add_action(
|
||||
|
@ -332,47 +330,20 @@ impl BuildAction for BuildThemedIcon<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn build_macos_helper(build: &mut Build) -> Result<()> {
|
||||
if cfg!(target_os = "macos") {
|
||||
build.add_action(
|
||||
"qt:aqt:data:lib:libankihelper",
|
||||
RunCommand {
|
||||
command: ":pyenv:bin",
|
||||
args: "$script $out $in",
|
||||
inputs: hashmap! {
|
||||
"script" => inputs!["qt/mac/helper_build.py"],
|
||||
"in" => inputs![glob!["qt/mac/*.swift"]],
|
||||
"" => inputs!["out/env"],
|
||||
},
|
||||
outputs: hashmap! {
|
||||
"out" => vec!["qt/_aqt/data/lib/libankihelper.dylib"],
|
||||
},
|
||||
},
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_wheel(build: &mut Build) -> Result<()> {
|
||||
build.add_action(
|
||||
"wheels:aqt",
|
||||
BuildWheel {
|
||||
name: "aqt",
|
||||
version: anki_version(),
|
||||
src_folder: "qt/aqt",
|
||||
gen_folder: "$builddir/qt/_aqt",
|
||||
platform: None,
|
||||
deps: inputs![":qt:aqt", glob!("qt/aqt/**"), "python/requirements.aqt.in"],
|
||||
deps: inputs![":qt:aqt", glob!("qt/aqt/**"), "qt/pyproject.toml"],
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn check_python(build: &mut Build) -> Result<()> {
|
||||
python_format(
|
||||
build,
|
||||
"qt",
|
||||
inputs![glob!("qt/**/*.py", "qt/bundle/PyOxidizer/**")],
|
||||
)?;
|
||||
python_format(build, "qt", inputs![glob!("qt/**/*.py")])?;
|
||||
|
||||
build.add_action(
|
||||
"check:pytest:aqt",
|
||||
|
|
|
@ -1,442 +0,0 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
use ninja_gen::action::BuildAction;
|
||||
use ninja_gen::archives::download_and_extract;
|
||||
use ninja_gen::archives::empty_manifest;
|
||||
use ninja_gen::archives::with_exe;
|
||||
use ninja_gen::archives::OnlineArchive;
|
||||
use ninja_gen::archives::Platform;
|
||||
use ninja_gen::build::BuildProfile;
|
||||
use ninja_gen::cargo::CargoBuild;
|
||||
use ninja_gen::cargo::RustOutput;
|
||||
use ninja_gen::git::SyncSubmodule;
|
||||
use ninja_gen::glob;
|
||||
use ninja_gen::input::BuildInput;
|
||||
use ninja_gen::inputs;
|
||||
use ninja_gen::python::PythonEnvironment;
|
||||
use ninja_gen::Build;
|
||||
use ninja_gen::Utf8Path;
|
||||
|
||||
use crate::anki_version;
|
||||
use crate::platform::overriden_python_target_platform;
|
||||
use crate::platform::overriden_rust_target_triple;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum DistKind {
|
||||
Standard,
|
||||
}
|
||||
|
||||
impl DistKind {
|
||||
fn folder_name(&self) -> &'static str {
|
||||
match self {
|
||||
DistKind::Standard => "std",
|
||||
}
|
||||
}
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
match self {
|
||||
DistKind::Standard => "standard",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_bundle(build: &mut Build) -> Result<()> {
|
||||
// install into venv
|
||||
setup_primary_venv(build)?;
|
||||
install_anki_wheels(build)?;
|
||||
|
||||
// bundle venv into output binary + extra_files
|
||||
build_pyoxidizer(build)?;
|
||||
build_artifacts(build)?;
|
||||
build_binary(build)?;
|
||||
|
||||
// package up outputs with Qt/other deps
|
||||
download_dist_folder_deps(build)?;
|
||||
build_dist_folder(build, DistKind::Standard)?;
|
||||
|
||||
build_packages(build)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn targetting_macos_arm() -> bool {
|
||||
cfg!(all(target_os = "macos", target_arch = "aarch64"))
|
||||
&& overriden_python_target_platform().is_none()
|
||||
}
|
||||
|
||||
const WIN_AUDIO: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/audio-win-amd64.tar.gz",
|
||||
sha256: "0815a601baba05e03bc36b568cdc2332b1cf4aa17125fc33c69de125f8dd687f",
|
||||
};
|
||||
|
||||
const MAC_ARM_AUDIO: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-05-26/audio-mac-arm64.tar.gz",
|
||||
sha256: "f6c4af9be59ae1c82a16f5c6307f13cbf31b49ad7b69ce1cb6e0e7b403cfdb8f",
|
||||
};
|
||||
|
||||
const MAC_AMD_AUDIO: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-05-26/audio-mac-amd64.tar.gz",
|
||||
sha256: "ecbb3c878805cdd58b1a0b8e3fd8c753b8ce3ad36c8b5904a79111f9db29ff42",
|
||||
};
|
||||
|
||||
const MAC_ARM_QT6: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2024-02-29/pyqt6.6-mac-arm64.tar.zst",
|
||||
sha256: "9b2ade4ae9b80506689062845e83e8c60f7fa9843545bf7bb2d11d3e2f105878",
|
||||
};
|
||||
|
||||
const MAC_AMD_QT6: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2024-02-29/pyqt6.6-mac-amd64.tar.zst",
|
||||
sha256: "dbd0871e4da22820d1fa9ab29220d631467d1178038dcab4b15169ad7f499b1b",
|
||||
};
|
||||
|
||||
const LINUX_QT_PLUGINS: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-02/qt-plugins-linux-amd64.tar.gz",
|
||||
sha256: "66bb568aca7242bc55ad419bf5c96755ca15d2a743e1c3a09cba8b83230b138b",
|
||||
};
|
||||
|
||||
const NSIS_PLUGINS: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-19/nsis.tar.zst",
|
||||
sha256: "6133f730ece699de19714d0479c73bc848647d277e9cc80dda9b9ebe532b40a8",
|
||||
};
|
||||
|
||||
fn download_dist_folder_deps(build: &mut Build) -> Result<()> {
|
||||
let mut bundle_deps = vec![":wheels"];
|
||||
if cfg!(windows) {
|
||||
download_and_extract(build, "win_amd64_audio", WIN_AUDIO, empty_manifest())?;
|
||||
download_and_extract(build, "nsis_plugins", NSIS_PLUGINS, empty_manifest())?;
|
||||
bundle_deps.extend([":extract:win_amd64_audio", ":extract:nsis_plugins"]);
|
||||
} else if cfg!(target_os = "macos") {
|
||||
if targetting_macos_arm() {
|
||||
download_and_extract(build, "mac_arm_audio", MAC_ARM_AUDIO, empty_manifest())?;
|
||||
download_and_extract(build, "mac_arm_qt6", MAC_ARM_QT6, empty_manifest())?;
|
||||
bundle_deps.extend([":extract:mac_arm_audio", ":extract:mac_arm_qt6"]);
|
||||
} else {
|
||||
download_and_extract(build, "mac_amd_audio", MAC_AMD_AUDIO, empty_manifest())?;
|
||||
download_and_extract(build, "mac_amd_qt6", MAC_AMD_QT6, empty_manifest())?;
|
||||
bundle_deps.extend([":extract:mac_amd_audio", ":extract:mac_amd_qt6"]);
|
||||
}
|
||||
} else {
|
||||
download_and_extract(
|
||||
build,
|
||||
"linux_qt_plugins",
|
||||
LINUX_QT_PLUGINS,
|
||||
empty_manifest(),
|
||||
)?;
|
||||
bundle_deps.extend([":extract:linux_qt_plugins"]);
|
||||
}
|
||||
build.add_dependency(
|
||||
"bundle:deps",
|
||||
inputs![bundle_deps
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
.collect::<Vec<_>>()],
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct Venv {
|
||||
label: &'static str,
|
||||
path_without_builddir: &'static str,
|
||||
}
|
||||
|
||||
impl Venv {
|
||||
fn label_as_target(&self, suffix: &str) -> String {
|
||||
format!(":{}{suffix}", self.label)
|
||||
}
|
||||
}
|
||||
|
||||
const PRIMARY_VENV: Venv = Venv {
|
||||
label: "bundle:pyenv",
|
||||
path_without_builddir: "bundle/pyenv",
|
||||
};
|
||||
|
||||
fn setup_primary_venv(build: &mut Build) -> Result<()> {
|
||||
let mut qt6_reqs = inputs![
|
||||
"python/requirements.bundle.txt",
|
||||
"python/requirements.qt6_6.txt",
|
||||
];
|
||||
if cfg!(windows) {
|
||||
qt6_reqs = inputs![qt6_reqs, "python/requirements.win.txt"];
|
||||
}
|
||||
build.add_action(
|
||||
PRIMARY_VENV.label,
|
||||
PythonEnvironment {
|
||||
folder: PRIMARY_VENV.path_without_builddir,
|
||||
base_requirements_txt: "python/requirements.base.txt".into(),
|
||||
requirements_txt: qt6_reqs,
|
||||
extra_binary_exports: &[],
|
||||
},
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct InstallAnkiWheels {
|
||||
venv: Venv,
|
||||
}
|
||||
|
||||
impl BuildAction for InstallAnkiWheels {
|
||||
fn command(&self) -> &str {
|
||||
"$pip install --force-reinstall --no-deps $in"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
build.add_inputs("pip", inputs![self.venv.label_as_target(":pip")]);
|
||||
build.add_inputs("in", inputs![":wheels"]);
|
||||
build.add_output_stamp("bundle/wheels.stamp");
|
||||
}
|
||||
}
|
||||
|
||||
fn install_anki_wheels(build: &mut Build) -> Result<()> {
|
||||
build.add_action(
|
||||
"bundle:add_wheels:qt6",
|
||||
InstallAnkiWheels { venv: PRIMARY_VENV },
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_pyoxidizer(build: &mut Build) -> Result<()> {
|
||||
let offline_build = env::var("OFFLINE_BUILD").is_ok();
|
||||
|
||||
build.add_action(
|
||||
"bundle:pyoxidizer:repo",
|
||||
SyncSubmodule {
|
||||
path: "qt/bundle/PyOxidizer",
|
||||
offline_build,
|
||||
},
|
||||
)?;
|
||||
let target =
|
||||
overriden_rust_target_triple().unwrap_or_else(|| Platform::current().as_rust_triple());
|
||||
let output_bin = format!("bundle/rust/{target}/release/pyoxidizer",);
|
||||
build.add_action(
|
||||
"bundle:pyoxidizer:bin",
|
||||
CargoBuild {
|
||||
inputs: inputs![
|
||||
":bundle:pyoxidizer:repo",
|
||||
"out/env",
|
||||
glob!["qt/bundle/PyOxidizer/**"]
|
||||
],
|
||||
// can't use ::Binary() here, as we're in a separate workspace
|
||||
outputs: &[RustOutput::Data("bin", &with_exe(&output_bin))],
|
||||
target: Some(target),
|
||||
extra_args: &format!(
|
||||
"--manifest-path={} --target-dir={} -p pyoxidizer",
|
||||
"qt/bundle/PyOxidizer/Cargo.toml", "$builddir/bundle/rust"
|
||||
),
|
||||
release_override: Some(BuildProfile::Release),
|
||||
},
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct BuildArtifacts {}
|
||||
|
||||
impl BuildAction for BuildArtifacts {
|
||||
fn command(&self) -> &str {
|
||||
"$runner build-artifacts $bundle_root $pyoxidizer_bin"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
build.add_inputs("pyoxidizer_bin", inputs![":bundle:pyoxidizer:bin"]);
|
||||
build.add_inputs("", inputs![PRIMARY_VENV.label_as_target("")]);
|
||||
build.add_inputs("", inputs![":bundle:add_wheels:qt6", glob!["qt/bundle/**"]]);
|
||||
build.add_variable("bundle_root", "$builddir/bundle");
|
||||
build.add_outputs_ext(
|
||||
"pyo3_config",
|
||||
vec!["bundle/artifacts/pyo3-build-config-file.txt"],
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
fn check_output_timestamps(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn build_artifacts(build: &mut Build) -> Result<()> {
|
||||
build.add_action("bundle:artifacts", BuildArtifacts {})
|
||||
}
|
||||
|
||||
struct BuildBundle {}
|
||||
|
||||
impl BuildAction for BuildBundle {
|
||||
fn command(&self) -> &str {
|
||||
"$runner build-bundle-binary"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
build.add_inputs("", inputs![":bundle:artifacts", glob!["qt/bundle/**"]]);
|
||||
build.add_outputs(
|
||||
"",
|
||||
vec![RustOutput::Binary("anki").path(
|
||||
Utf8Path::new("$builddir/bundle/rust"),
|
||||
Some(
|
||||
overriden_rust_target_triple()
|
||||
.unwrap_or_else(|| Platform::current().as_rust_triple()),
|
||||
),
|
||||
// our pyoxidizer bin uses lto on the release profile
|
||||
BuildProfile::Release,
|
||||
)],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn build_binary(build: &mut Build) -> Result<()> {
|
||||
build.add_action("bundle:binary", BuildBundle {})
|
||||
}
|
||||
|
||||
struct BuildDistFolder {
|
||||
kind: DistKind,
|
||||
deps: BuildInput,
|
||||
}
|
||||
|
||||
impl BuildAction for BuildDistFolder {
|
||||
fn command(&self) -> &str {
|
||||
"$runner build-dist-folder $kind $out_folder "
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
build.add_inputs("", &self.deps);
|
||||
build.add_variable("kind", self.kind.name());
|
||||
let folder = match self.kind {
|
||||
DistKind::Standard => "bundle/std",
|
||||
};
|
||||
build.add_outputs("out_folder", vec![folder]);
|
||||
build.add_outputs("stamp", vec![format!("{folder}.stamp")]);
|
||||
}
|
||||
|
||||
fn check_output_timestamps(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn build_dist_folder(build: &mut Build, kind: DistKind) -> Result<()> {
|
||||
let deps = inputs![":bundle:deps", ":bundle:binary", glob!["qt/bundle/**"]];
|
||||
let group = match kind {
|
||||
DistKind::Standard => "bundle:folder:std",
|
||||
};
|
||||
build.add_action(group, BuildDistFolder { kind, deps })
|
||||
}
|
||||
|
||||
fn build_packages(build: &mut Build) -> Result<()> {
|
||||
if cfg!(windows) {
|
||||
build_windows_installers(build)
|
||||
} else if cfg!(target_os = "macos") {
|
||||
build_mac_app(build, DistKind::Standard)?;
|
||||
build_dmgs(build)
|
||||
} else {
|
||||
build_tarball(build, DistKind::Standard)
|
||||
}
|
||||
}
|
||||
|
||||
struct BuildTarball {
|
||||
kind: DistKind,
|
||||
}
|
||||
|
||||
impl BuildAction for BuildTarball {
|
||||
fn command(&self) -> &str {
|
||||
"chmod -R a+r $folder && tar -I '$zstd' --transform $transform -cf $tarball -C $folder ."
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
let input_folder_name = self.kind.folder_name();
|
||||
let input_folder_target = format!(":bundle:folder:{input_folder_name}");
|
||||
let input_folder_path = format!("$builddir/bundle/{input_folder_name}");
|
||||
|
||||
let version = anki_version();
|
||||
let qt = match self.kind {
|
||||
DistKind::Standard => "qt6",
|
||||
};
|
||||
let output_folder_base = format!("anki-{version}-linux-{qt}");
|
||||
let output_tarball = format!("bundle/package/{output_folder_base}.tar.zst");
|
||||
|
||||
build.add_inputs("", inputs![input_folder_target]);
|
||||
build.add_variable("zstd", "zstd -c --long -T0 -18");
|
||||
build.add_variable("transform", format!("s%^.%{output_folder_base}%S"));
|
||||
build.add_variable("folder", input_folder_path);
|
||||
build.add_outputs("tarball", vec![output_tarball]);
|
||||
}
|
||||
}
|
||||
|
||||
fn build_tarball(build: &mut Build, kind: DistKind) -> Result<()> {
|
||||
let name = kind.folder_name();
|
||||
build.add_action(format!("bundle:package:{name}"), BuildTarball { kind })
|
||||
}
|
||||
|
||||
struct BuildWindowsInstallers {}
|
||||
|
||||
impl BuildAction for BuildWindowsInstallers {
|
||||
fn command(&self) -> &str {
|
||||
"cargo run -p makeexe --target-dir=out/rust -- $version $src_root $bundle_root $out"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
let version = anki_version();
|
||||
let outputs = ["qt6"].iter().map(|qt| {
|
||||
let output_base = format!("anki-{version}-windows-{qt}");
|
||||
format!("bundle/package/{output_base}.exe")
|
||||
});
|
||||
|
||||
build.add_inputs("", inputs![":bundle:folder:std"]);
|
||||
build.add_variable("version", &version);
|
||||
build.add_variable("bundle_root", "$builddir/bundle");
|
||||
build.add_outputs("out", outputs);
|
||||
}
|
||||
}
|
||||
|
||||
fn build_windows_installers(build: &mut Build) -> Result<()> {
|
||||
build.add_action("bundle:package", BuildWindowsInstallers {})
|
||||
}
|
||||
|
||||
struct BuildMacApp {
|
||||
kind: DistKind,
|
||||
}
|
||||
|
||||
impl BuildAction for BuildMacApp {
|
||||
fn command(&self) -> &str {
|
||||
"cargo run -p makeapp --target-dir=out/rust -- build-app $version $kind $stamp"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
let folder_name = self.kind.folder_name();
|
||||
build.add_inputs("", inputs![format!(":bundle:folder:{folder_name}")]);
|
||||
build.add_variable("version", anki_version());
|
||||
build.add_variable("kind", self.kind.name());
|
||||
build.add_outputs("stamp", vec![format!("bundle/app/{folder_name}.stamp")]);
|
||||
}
|
||||
}
|
||||
|
||||
fn build_mac_app(build: &mut Build, kind: DistKind) -> Result<()> {
|
||||
build.add_action(format!("bundle:app:{}", kind.name()), BuildMacApp { kind })
|
||||
}
|
||||
|
||||
struct BuildDmgs {}
|
||||
|
||||
impl BuildAction for BuildDmgs {
|
||||
fn command(&self) -> &str {
|
||||
"cargo run -p makeapp --target-dir=out/rust -- build-dmgs $dmgs"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
let version = anki_version();
|
||||
let platform = if targetting_macos_arm() {
|
||||
"apple"
|
||||
} else {
|
||||
"intel"
|
||||
};
|
||||
let qt = &["qt6"][..];
|
||||
let dmgs = qt
|
||||
.iter()
|
||||
.map(|qt| format!("bundle/dmg/anki-{version}-mac-{platform}-{qt}.dmg"));
|
||||
|
||||
build.add_inputs("", inputs![":bundle:app"]);
|
||||
build.add_outputs("dmgs", dmgs);
|
||||
}
|
||||
}
|
||||
|
||||
fn build_dmgs(build: &mut Build) -> Result<()> {
|
||||
build.add_action("bundle:dmg", BuildDmgs {})
|
||||
}
|
44
build/configure/src/launcher.rs
Normal file
44
build/configure/src/launcher.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use anyhow::Result;
|
||||
use ninja_gen::archives::download_and_extract;
|
||||
use ninja_gen::archives::empty_manifest;
|
||||
use ninja_gen::archives::OnlineArchive;
|
||||
use ninja_gen::command::RunCommand;
|
||||
use ninja_gen::hashmap;
|
||||
use ninja_gen::inputs;
|
||||
use ninja_gen::Build;
|
||||
|
||||
pub fn setup_uv_universal(build: &mut Build) -> Result<()> {
|
||||
if !cfg!(target_arch = "aarch64") {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
build.add_action(
|
||||
"launcher:uv_universal",
|
||||
RunCommand {
|
||||
command: "/usr/bin/lipo",
|
||||
args: "-create -output $out $arm_bin $x86_bin",
|
||||
inputs: hashmap! {
|
||||
"arm_bin" => inputs![":extract:uv:bin"],
|
||||
"x86_bin" => inputs![":extract:uv_mac_x86:bin"],
|
||||
},
|
||||
outputs: hashmap! {
|
||||
"out" => vec!["launcher/uv"],
|
||||
},
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn build_launcher(build: &mut Build) -> Result<()> {
|
||||
setup_uv_universal(build)?;
|
||||
download_and_extract(build, "nsis_plugins", NSIS_PLUGINS, empty_manifest())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
const NSIS_PLUGINS: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-19/nsis.tar.zst",
|
||||
sha256: "6133f730ece699de19714d0479c73bc848647d277e9cc80dda9b9ebe532b40a8",
|
||||
};
|
|
@ -2,7 +2,7 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
mod aqt;
|
||||
mod bundle;
|
||||
mod launcher;
|
||||
mod platform;
|
||||
mod pylib;
|
||||
mod python;
|
||||
|
@ -13,13 +13,14 @@ use std::env;
|
|||
|
||||
use anyhow::Result;
|
||||
use aqt::build_and_check_aqt;
|
||||
use bundle::build_bundle;
|
||||
use launcher::build_launcher;
|
||||
use ninja_gen::glob;
|
||||
use ninja_gen::inputs;
|
||||
use ninja_gen::protobuf::check_proto;
|
||||
use ninja_gen::protobuf::setup_protoc;
|
||||
use ninja_gen::python::setup_python;
|
||||
use ninja_gen::python::setup_uv;
|
||||
use ninja_gen::Build;
|
||||
use platform::overriden_python_venv_platform;
|
||||
use pylib::build_pylib;
|
||||
use pylib::check_pylib;
|
||||
use python::check_python;
|
||||
|
@ -47,7 +48,10 @@ fn main() -> Result<()> {
|
|||
check_proto(build, inputs![glob!["proto/**/*.proto"]])?;
|
||||
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
setup_python(build)?;
|
||||
setup_uv(
|
||||
build,
|
||||
overriden_python_venv_platform().unwrap_or(build.host_platform),
|
||||
)?;
|
||||
}
|
||||
setup_venv(build)?;
|
||||
|
||||
|
@ -57,7 +61,7 @@ fn main() -> Result<()> {
|
|||
build_and_check_aqt(build)?;
|
||||
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
build_bundle(build)?;
|
||||
build_launcher(build)?;
|
||||
}
|
||||
|
||||
setup_sphinx(build)?;
|
||||
|
|
|
@ -5,18 +5,30 @@ use std::env;
|
|||
|
||||
use ninja_gen::archives::Platform;
|
||||
|
||||
/// Usually None to use the host architecture; can be overriden by setting
|
||||
/// MAC_X86 to build for x86_64 on Apple Silicon
|
||||
/// Please see [`overriden_python_target_platform()`] for details.
|
||||
pub fn overriden_rust_target_triple() -> Option<&'static str> {
|
||||
overriden_python_target_platform().map(|p| p.as_rust_triple())
|
||||
overriden_python_wheel_platform().map(|p| p.as_rust_triple())
|
||||
}
|
||||
|
||||
/// Usually None to use the host architecture; can be overriden by setting
|
||||
/// MAC_X86 to build for x86_64 on Apple Silicon
|
||||
pub fn overriden_python_target_platform() -> Option<Platform> {
|
||||
if env::var("MAC_X86").is_ok() {
|
||||
Some(Platform::MacX64)
|
||||
/// Usually None to use the host architecture, except on Windows which
|
||||
/// always uses x86_64, since WebEngine is unavailable for ARM64.
|
||||
pub fn overriden_python_venv_platform() -> Option<Platform> {
|
||||
if cfg!(target_os = "windows") {
|
||||
Some(Platform::WindowsX64)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Like [`overriden_python_venv_platform`], but:
|
||||
/// If MAC_X86 is set, an X86 wheel will be built on macOS ARM.
|
||||
/// If LIN_ARM64 is set, an ARM64 wheel will be built on Linux AMD64.
|
||||
pub fn overriden_python_wheel_platform() -> Option<Platform> {
|
||||
if env::var("MAC_X86").is_ok() {
|
||||
Some(Platform::MacX64)
|
||||
} else if env::var("LIN_ARM64").is_ok() {
|
||||
Some(Platform::LinuxArm)
|
||||
} else {
|
||||
overriden_python_venv_platform()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ use ninja_gen::python::PythonTest;
|
|||
use ninja_gen::Build;
|
||||
|
||||
use crate::anki_version;
|
||||
use crate::platform::overriden_python_target_platform;
|
||||
use crate::platform::overriden_python_wheel_platform;
|
||||
use crate::python::BuildWheel;
|
||||
use crate::python::GenPythonProto;
|
||||
|
||||
|
@ -50,7 +50,7 @@ pub fn build_pylib(build: &mut Build) -> Result<()> {
|
|||
output: &format!(
|
||||
"pylib/anki/_rsbridge.{}",
|
||||
match build.host_platform {
|
||||
Platform::WindowsX64 => "pyd",
|
||||
Platform::WindowsX64 | Platform::WindowsArm => "pyd",
|
||||
_ => "so",
|
||||
}
|
||||
),
|
||||
|
@ -64,13 +64,11 @@ pub fn build_pylib(build: &mut Build) -> Result<()> {
|
|||
BuildWheel {
|
||||
name: "anki",
|
||||
version: anki_version(),
|
||||
src_folder: "pylib/anki",
|
||||
gen_folder: "$builddir/pylib/anki",
|
||||
platform: overriden_python_target_platform().or(Some(build.host_platform)),
|
||||
platform: overriden_python_wheel_platform().or(Some(build.host_platform)),
|
||||
deps: inputs![
|
||||
":pylib:anki",
|
||||
glob!("pylib/anki/**"),
|
||||
"python/requirements.anki.in",
|
||||
"pylib/pyproject.toml"
|
||||
],
|
||||
},
|
||||
)?;
|
||||
|
|
|
@ -20,74 +20,66 @@ use ninja_gen::python::PythonTypecheck;
|
|||
use ninja_gen::rsync::RsyncFiles;
|
||||
use ninja_gen::Build;
|
||||
|
||||
// When updating Qt, make sure to update the .txt file in bundle.rs as well.
|
||||
/// Normalize version string by removing leading zeros from numeric parts
|
||||
/// while preserving pre-release markers (b1, rc2, a3, etc.)
|
||||
fn normalize_version(version: &str) -> String {
|
||||
version
|
||||
.split('.')
|
||||
.map(|part| {
|
||||
// Check if the part contains only digits
|
||||
if part.chars().all(|c| c.is_ascii_digit()) {
|
||||
// Numeric part: remove leading zeros
|
||||
part.parse::<u32>().unwrap_or(0).to_string()
|
||||
} else {
|
||||
// Mixed part (contains both numbers and pre-release markers)
|
||||
// Split on first non-digit character and normalize the numeric prefix
|
||||
let chars = part.chars();
|
||||
let mut numeric_prefix = String::new();
|
||||
let mut rest = String::new();
|
||||
let mut found_non_digit = false;
|
||||
|
||||
for ch in chars {
|
||||
if ch.is_ascii_digit() && !found_non_digit {
|
||||
numeric_prefix.push(ch);
|
||||
} else {
|
||||
found_non_digit = true;
|
||||
rest.push(ch);
|
||||
}
|
||||
}
|
||||
|
||||
if numeric_prefix.is_empty() {
|
||||
part.to_string()
|
||||
} else {
|
||||
let normalized_prefix = numeric_prefix.parse::<u32>().unwrap_or(0).to_string();
|
||||
format!("{}{}", normalized_prefix, rest)
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(".")
|
||||
}
|
||||
|
||||
pub fn setup_venv(build: &mut Build) -> Result<()> {
|
||||
let platform_deps = if cfg!(windows) {
|
||||
inputs![
|
||||
"python/requirements.qt6_6.txt",
|
||||
"python/requirements.win.txt",
|
||||
]
|
||||
} else if cfg!(target_os = "macos") {
|
||||
inputs!["python/requirements.qt6_6.txt",]
|
||||
} else if std::env::var("PYTHONPATH").is_ok() {
|
||||
// assume we have a system-provided Qt
|
||||
inputs![]
|
||||
} else if cfg!(target_arch = "aarch64") {
|
||||
inputs!["python/requirements.qt6_8.txt"]
|
||||
} else {
|
||||
inputs!["python/requirements.qt6_6.txt"]
|
||||
};
|
||||
let requirements_txt = inputs!["python/requirements.dev.txt", platform_deps];
|
||||
let extra_binary_exports = &[
|
||||
"mypy",
|
||||
"black",
|
||||
"isort",
|
||||
"pylint",
|
||||
"pytest",
|
||||
"protoc-gen-mypy",
|
||||
];
|
||||
build.add_action(
|
||||
"pyenv",
|
||||
PythonEnvironment {
|
||||
folder: "pyenv",
|
||||
base_requirements_txt: inputs!["python/requirements.base.txt"],
|
||||
requirements_txt,
|
||||
extra_binary_exports: &[
|
||||
"pip-compile",
|
||||
"pip-sync",
|
||||
"mypy",
|
||||
"black", // Required for offline build
|
||||
"isort",
|
||||
"pylint",
|
||||
"pytest",
|
||||
"protoc-gen-mypy", // ditto
|
||||
venv_folder: "pyenv",
|
||||
deps: inputs![
|
||||
"pyproject.toml",
|
||||
"pylib/pyproject.toml",
|
||||
"qt/pyproject.toml",
|
||||
"uv.lock"
|
||||
],
|
||||
},
|
||||
)?;
|
||||
|
||||
// optional venvs for testing other Qt versions
|
||||
let mut venv_reqs = inputs!["python/requirements.bundle.txt"];
|
||||
if cfg!(windows) {
|
||||
venv_reqs = inputs![venv_reqs, "python/requirements.win.txt"];
|
||||
}
|
||||
|
||||
build.add_action(
|
||||
"pyenv-qt6.8",
|
||||
PythonEnvironment {
|
||||
folder: "pyenv-qt6.8",
|
||||
base_requirements_txt: inputs!["python/requirements.base.txt"],
|
||||
requirements_txt: inputs![&venv_reqs, "python/requirements.qt6_8.txt"],
|
||||
extra_binary_exports: &[],
|
||||
},
|
||||
)?;
|
||||
build.add_action(
|
||||
"pyenv-qt5.15",
|
||||
PythonEnvironment {
|
||||
folder: "pyenv-qt5.15",
|
||||
base_requirements_txt: inputs!["python/requirements.base.txt"],
|
||||
requirements_txt: inputs![&venv_reqs, "python/requirements.qt5_15.txt"],
|
||||
extra_binary_exports: &[],
|
||||
},
|
||||
)?;
|
||||
build.add_action(
|
||||
"pyenv-qt5.14",
|
||||
PythonEnvironment {
|
||||
folder: "pyenv-qt5.14",
|
||||
base_requirements_txt: inputs!["python/requirements.base.txt"],
|
||||
requirements_txt: inputs![venv_reqs, "python/requirements.qt5_14.txt"],
|
||||
extra_binary_exports: &[],
|
||||
extra_args: "--all-packages --extra qt --extra audio",
|
||||
extra_binary_exports,
|
||||
},
|
||||
)?;
|
||||
|
||||
|
@ -133,45 +125,59 @@ impl BuildAction for GenPythonProto {
|
|||
pub struct BuildWheel {
|
||||
pub name: &'static str,
|
||||
pub version: String,
|
||||
pub src_folder: &'static str,
|
||||
pub gen_folder: &'static str,
|
||||
pub platform: Option<Platform>,
|
||||
pub deps: BuildInput,
|
||||
}
|
||||
|
||||
impl BuildAction for BuildWheel {
|
||||
fn command(&self) -> &str {
|
||||
"$pyenv_bin $script $src $gen $out"
|
||||
"$uv build --wheel --out-dir=$out_dir --project=$project_dir"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl FilesHandle) {
|
||||
build.add_inputs("pyenv_bin", inputs![":pyenv:bin"]);
|
||||
build.add_inputs("script", inputs!["python/write_wheel.py"]);
|
||||
build.add_inputs("uv", inputs![":uv_binary"]);
|
||||
build.add_inputs("", &self.deps);
|
||||
build.add_variable("src", self.src_folder);
|
||||
build.add_variable("gen", self.gen_folder);
|
||||
|
||||
// Set the project directory based on which package we're building
|
||||
let project_dir = if self.name == "anki" { "pylib" } else { "qt" };
|
||||
build.add_variable("project_dir", project_dir);
|
||||
|
||||
// Set environment variable for uv to use our pyenv
|
||||
build.add_variable("pyenv_path", "$builddir/pyenv");
|
||||
build.add_env_var("UV_PROJECT_ENVIRONMENT", "$pyenv_path");
|
||||
|
||||
// Set output directory
|
||||
build.add_variable("out_dir", "$builddir/wheels/");
|
||||
|
||||
// Calculate the wheel filename that uv will generate
|
||||
let tag = if let Some(platform) = self.platform {
|
||||
let platform = match platform {
|
||||
Platform::LinuxX64 => "manylinux_2_35_x86_64",
|
||||
Platform::LinuxArm => "manylinux_2_35_aarch64",
|
||||
let platform_tag = match platform {
|
||||
Platform::LinuxX64 => "manylinux_2_36_x86_64",
|
||||
Platform::LinuxArm => "manylinux_2_36_aarch64",
|
||||
Platform::MacX64 => "macosx_12_0_x86_64",
|
||||
Platform::MacArm => "macosx_12_0_arm64",
|
||||
Platform::WindowsX64 => "win_amd64",
|
||||
Platform::WindowsArm => "win_arm64",
|
||||
};
|
||||
format!("cp39-abi3-{platform}")
|
||||
format!("cp39-abi3-{platform_tag}")
|
||||
} else {
|
||||
"py3-none-any".into()
|
||||
};
|
||||
|
||||
// Set environment variable for hatch_build.py to use the correct platform tag
|
||||
build.add_variable("wheel_tag", &tag);
|
||||
build.add_env_var("ANKI_WHEEL_TAG", "$wheel_tag");
|
||||
|
||||
let name = self.name;
|
||||
let version = &self.version;
|
||||
let wheel_path = format!("wheels/{name}-{version}-{tag}.whl");
|
||||
|
||||
let normalized_version = normalize_version(&self.version);
|
||||
|
||||
let wheel_path = format!("wheels/{name}-{normalized_version}-{tag}.whl");
|
||||
build.add_outputs("out", vec![wheel_path]);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_python(build: &mut Build) -> Result<()> {
|
||||
python_format(build, "ftl", inputs![glob!("ftl/**/*.py")])?;
|
||||
python_format(build, "tools", inputs![glob!("tools/**/*.py")])?;
|
||||
|
||||
build.add_action(
|
||||
|
@ -183,7 +189,6 @@ pub fn check_python(build: &mut Build) -> Result<()> {
|
|||
"qt/tools",
|
||||
"out/pylib/anki",
|
||||
"out/qt/_aqt",
|
||||
"ftl",
|
||||
"python",
|
||||
"tools",
|
||||
],
|
||||
|
@ -262,8 +267,7 @@ struct Sphinx {
|
|||
impl BuildAction for Sphinx {
|
||||
fn command(&self) -> &str {
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
"$pip install sphinx sphinx_rtd_theme sphinx-autoapi \
|
||||
&& $python python/sphinx/build.py"
|
||||
"$uv sync --extra sphinx && $python python/sphinx/build.py"
|
||||
} else {
|
||||
"$python python/sphinx/build.py"
|
||||
}
|
||||
|
@ -271,7 +275,10 @@ impl BuildAction for Sphinx {
|
|||
|
||||
fn files(&mut self, build: &mut impl FilesHandle) {
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
build.add_inputs("pip", inputs![":pyenv:pip"]);
|
||||
build.add_inputs("uv", inputs![":uv_binary"]);
|
||||
// Set environment variable to use the existing pyenv
|
||||
build.add_variable("pyenv_path", "$builddir/pyenv");
|
||||
build.add_env_var("UV_PROJECT_ENVIRONMENT", "$pyenv_path");
|
||||
}
|
||||
build.add_inputs("python", inputs![":pyenv:bin"]);
|
||||
build.add_inputs("", &self.deps);
|
||||
|
@ -294,8 +301,35 @@ pub(crate) fn setup_sphinx(build: &mut Build) -> Result<()> {
|
|||
build.add_action(
|
||||
"python:sphinx",
|
||||
Sphinx {
|
||||
deps: inputs![":pylib", ":qt", ":python:sphinx:copy_conf"],
|
||||
deps: inputs![
|
||||
":pylib",
|
||||
":qt",
|
||||
":python:sphinx:copy_conf",
|
||||
"pyproject.toml"
|
||||
],
|
||||
},
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_normalize_version_basic() {
|
||||
assert_eq!(normalize_version("1.2.3"), "1.2.3");
|
||||
assert_eq!(normalize_version("01.02.03"), "1.2.3");
|
||||
assert_eq!(normalize_version("1.0.0"), "1.0.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_version_with_prerelease() {
|
||||
assert_eq!(normalize_version("1.2.3b1"), "1.2.3b1");
|
||||
assert_eq!(normalize_version("01.02.03b1"), "1.2.3b1");
|
||||
assert_eq!(normalize_version("1.0.0rc2"), "1.0.0rc2");
|
||||
assert_eq!(normalize_version("2.1.0a3"), "2.1.0a3");
|
||||
assert_eq!(normalize_version("1.2.3beta1"), "1.2.3beta1");
|
||||
assert_eq!(normalize_version("1.2.3alpha1"), "1.2.3alpha1");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -154,7 +154,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
|
|||
"$builddir/buildhash",
|
||||
// building on Windows requires python3.lib
|
||||
if cfg!(windows) {
|
||||
inputs![":extract:python"]
|
||||
inputs![":pyenv:bin"]
|
||||
} else {
|
||||
inputs![]
|
||||
}
|
||||
|
@ -247,7 +247,7 @@ pub fn check_minilints(build: &mut Build) -> Result<()> {
|
|||
let files = inputs![
|
||||
glob![
|
||||
"**/*.{py,rs,ts,svelte,mjs,md}",
|
||||
"{node_modules,qt/bundle/PyOxidizer,ts/.svelte-kit}/**"
|
||||
"{node_modules,ts/.svelte-kit}/**"
|
||||
],
|
||||
"Cargo.lock"
|
||||
];
|
||||
|
|
|
@ -16,5 +16,22 @@ globset.workspace = true
|
|||
itertools.workspace = true
|
||||
maplit.workspace = true
|
||||
num_cpus.workspace = true
|
||||
regex.workspace = true
|
||||
serde_json.workspace = true
|
||||
sha2.workspace = true
|
||||
walkdir.workspace = true
|
||||
which.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
reqwest = { workspace = true, features = ["blocking", "json", "native-tls"] }
|
||||
|
||||
[target.'cfg(not(windows))'.dependencies]
|
||||
reqwest = { workspace = true, features = ["blocking", "json", "rustls-tls"] }
|
||||
|
||||
[[bin]]
|
||||
name = "update_uv"
|
||||
path = "src/bin/update_uv.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "update_protoc"
|
||||
path = "src/bin/update_protoc.rs"
|
||||
|
|
|
@ -26,22 +26,21 @@ pub enum Platform {
|
|||
MacX64,
|
||||
MacArm,
|
||||
WindowsX64,
|
||||
WindowsArm,
|
||||
}
|
||||
|
||||
impl Platform {
|
||||
pub fn current() -> Self {
|
||||
if cfg!(windows) {
|
||||
Self::WindowsX64
|
||||
} else {
|
||||
let os = std::env::consts::OS;
|
||||
let arch = std::env::consts::ARCH;
|
||||
match (os, arch) {
|
||||
("linux", "x86_64") => Self::LinuxX64,
|
||||
("linux", "aarch64") => Self::LinuxArm,
|
||||
("macos", "x86_64") => Self::MacX64,
|
||||
("macos", "aarch64") => Self::MacArm,
|
||||
_ => panic!("unsupported os/arch {os} {arch} - PR welcome!"),
|
||||
}
|
||||
let os = std::env::consts::OS;
|
||||
let arch = std::env::consts::ARCH;
|
||||
match (os, arch) {
|
||||
("linux", "x86_64") => Self::LinuxX64,
|
||||
("linux", "aarch64") => Self::LinuxArm,
|
||||
("macos", "x86_64") => Self::MacX64,
|
||||
("macos", "aarch64") => Self::MacArm,
|
||||
("windows", "x86_64") => Self::WindowsX64,
|
||||
("windows", "aarch64") => Self::WindowsArm,
|
||||
_ => panic!("unsupported os/arch {os} {arch} - PR welcome!"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -62,6 +61,7 @@ impl Platform {
|
|||
Platform::MacX64 => "x86_64-apple-darwin",
|
||||
Platform::MacArm => "aarch64-apple-darwin",
|
||||
Platform::WindowsX64 => "x86_64-pc-windows-msvc",
|
||||
Platform::WindowsArm => "aarch64-pc-windows-msvc",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
126
build/ninja_gen/src/bin/update_protoc.rs
Normal file
126
build/ninja_gen/src/bin/update_protoc.rs
Normal file
|
@ -0,0 +1,126 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::error::Error;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use regex::Regex;
|
||||
use reqwest::blocking::Client;
|
||||
use serde_json::Value;
|
||||
use sha2::Digest;
|
||||
use sha2::Sha256;
|
||||
|
||||
fn fetch_protoc_release_info() -> Result<String, Box<dyn Error>> {
|
||||
let client = Client::new();
|
||||
|
||||
println!("Fetching latest protoc release info from GitHub...");
|
||||
// Fetch latest release info
|
||||
let response = client
|
||||
.get("https://api.github.com/repos/protocolbuffers/protobuf/releases/latest")
|
||||
.header("User-Agent", "Anki-Build-Script")
|
||||
.send()?;
|
||||
|
||||
let release_info: Value = response.json()?;
|
||||
let assets = release_info["assets"]
|
||||
.as_array()
|
||||
.expect("assets should be an array");
|
||||
|
||||
// Map platform names to their corresponding asset patterns
|
||||
let platform_patterns = [
|
||||
("LinuxX64", "linux-x86_64"),
|
||||
("LinuxArm", "linux-aarch_64"),
|
||||
("MacX64", "osx-universal_binary"), // Mac uses universal binary for both
|
||||
("MacArm", "osx-universal_binary"),
|
||||
("WindowsX64", "win64"), // Windows uses x86 binary for both archs
|
||||
("WindowsArm", "win64"),
|
||||
];
|
||||
|
||||
let mut match_blocks = Vec::new();
|
||||
|
||||
for (platform, pattern) in platform_patterns {
|
||||
// Find the asset matching the platform pattern
|
||||
let asset = assets.iter().find(|asset| {
|
||||
let name = asset["name"].as_str().unwrap_or("");
|
||||
name.starts_with("protoc-") && name.contains(pattern) && name.ends_with(".zip")
|
||||
});
|
||||
|
||||
if asset.is_none() {
|
||||
eprintln!("No asset found for platform {platform} pattern {pattern}");
|
||||
continue;
|
||||
}
|
||||
|
||||
let asset = asset.unwrap();
|
||||
let download_url = asset["browser_download_url"].as_str().unwrap();
|
||||
let asset_name = asset["name"].as_str().unwrap();
|
||||
|
||||
// Download the file and calculate SHA256 locally
|
||||
println!("Downloading and checksumming {asset_name} for {platform}...");
|
||||
let response = client
|
||||
.get(download_url)
|
||||
.header("User-Agent", "Anki-Build-Script")
|
||||
.send()?;
|
||||
|
||||
let bytes = response.bytes()?;
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(&bytes);
|
||||
let sha256 = format!("{:x}", hasher.finalize());
|
||||
|
||||
// Handle platform-specific match patterns
|
||||
let match_pattern = match platform {
|
||||
"MacX64" => "Platform::MacX64 | Platform::MacArm",
|
||||
"MacArm" => continue, // Skip MacArm since it's handled with MacX64
|
||||
"WindowsX64" => "Platform::WindowsX64 | Platform::WindowsArm",
|
||||
"WindowsArm" => continue, // Skip WindowsArm since it's handled with WindowsX64
|
||||
_ => &format!("Platform::{}", platform),
|
||||
};
|
||||
|
||||
match_blocks.push(format!(
|
||||
" {} => {{\n OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }}\n }}",
|
||||
match_pattern, download_url, sha256
|
||||
));
|
||||
}
|
||||
|
||||
Ok(format!(
|
||||
"pub fn protoc_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}",
|
||||
match_blocks.join(",\n")
|
||||
))
|
||||
}
|
||||
|
||||
fn read_protobuf_rs() -> Result<String, Box<dyn Error>> {
|
||||
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||
let path = Path::new(&manifest_dir).join("src/protobuf.rs");
|
||||
println!("Reading {}", path.display());
|
||||
let content = fs::read_to_string(path)?;
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
fn update_protoc_text(old_text: &str, new_protoc_text: &str) -> Result<String, Box<dyn Error>> {
|
||||
let re =
|
||||
Regex::new(r"(?ms)^pub fn protoc_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\}")
|
||||
.unwrap();
|
||||
if !re.is_match(old_text) {
|
||||
return Err("Could not find protoc_archive function block to replace".into());
|
||||
}
|
||||
let new_content = re.replace(old_text, new_protoc_text).to_string();
|
||||
println!("Original lines: {}", old_text.lines().count());
|
||||
println!("Updated lines: {}", new_content.lines().count());
|
||||
Ok(new_content)
|
||||
}
|
||||
|
||||
fn write_protobuf_rs(content: &str) -> Result<(), Box<dyn Error>> {
|
||||
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||
let path = Path::new(&manifest_dir).join("src/protobuf.rs");
|
||||
println!("Writing to {}", path.display());
|
||||
fs::write(path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
let new_protoc_archive = fetch_protoc_release_info()?;
|
||||
let content = read_protobuf_rs()?;
|
||||
let updated_content = update_protoc_text(&content, &new_protoc_archive)?;
|
||||
write_protobuf_rs(&updated_content)?;
|
||||
println!("Successfully updated protoc_archive function in protobuf.rs");
|
||||
Ok(())
|
||||
}
|
144
build/ninja_gen/src/bin/update_uv.rs
Normal file
144
build/ninja_gen/src/bin/update_uv.rs
Normal file
|
@ -0,0 +1,144 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::error::Error;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use regex::Regex;
|
||||
use reqwest::blocking::Client;
|
||||
use serde_json::Value;
|
||||
|
||||
fn fetch_uv_release_info() -> Result<String, Box<dyn Error>> {
|
||||
let client = Client::new();
|
||||
|
||||
println!("Fetching latest uv release info from GitHub...");
|
||||
// Fetch latest release info
|
||||
let response = client
|
||||
.get("https://api.github.com/repos/astral-sh/uv/releases/latest")
|
||||
.header("User-Agent", "Anki-Build-Script")
|
||||
.send()?;
|
||||
|
||||
let release_info: Value = response.json()?;
|
||||
let assets = release_info["assets"]
|
||||
.as_array()
|
||||
.expect("assets should be an array");
|
||||
|
||||
// Map platform names to their corresponding asset patterns
|
||||
let platform_patterns = [
|
||||
("LinuxX64", "x86_64-unknown-linux-gnu"),
|
||||
("LinuxArm", "aarch64-unknown-linux-gnu"),
|
||||
("MacX64", "x86_64-apple-darwin"),
|
||||
("MacArm", "aarch64-apple-darwin"),
|
||||
("WindowsX64", "x86_64-pc-windows-msvc"),
|
||||
("WindowsArm", "aarch64-pc-windows-msvc"),
|
||||
];
|
||||
|
||||
let mut match_blocks = Vec::new();
|
||||
|
||||
for (platform, pattern) in platform_patterns {
|
||||
// Find the asset matching the platform pattern (the binary)
|
||||
let asset = assets.iter().find(|asset| {
|
||||
let name = asset["name"].as_str().unwrap_or("");
|
||||
name.contains(pattern) && (name.ends_with(".tar.gz") || name.ends_with(".zip"))
|
||||
});
|
||||
if asset.is_none() {
|
||||
eprintln!("No asset found for platform {platform} pattern {pattern}");
|
||||
continue;
|
||||
}
|
||||
let asset = asset.unwrap();
|
||||
let download_url = asset["browser_download_url"].as_str().unwrap();
|
||||
let asset_name = asset["name"].as_str().unwrap();
|
||||
|
||||
// Find the corresponding .sha256 or .sha256sum asset
|
||||
let sha_asset = assets.iter().find(|a| {
|
||||
let name = a["name"].as_str().unwrap_or("");
|
||||
name == format!("{}.sha256", asset_name) || name == format!("{}.sha256sum", asset_name)
|
||||
});
|
||||
if sha_asset.is_none() {
|
||||
eprintln!("No sha256 asset found for {asset_name}");
|
||||
continue;
|
||||
}
|
||||
let sha_asset = sha_asset.unwrap();
|
||||
let sha_url = sha_asset["browser_download_url"].as_str().unwrap();
|
||||
println!("Fetching SHA256 for {platform}...");
|
||||
let sha_text = client
|
||||
.get(sha_url)
|
||||
.header("User-Agent", "Anki-Build-Script")
|
||||
.send()?
|
||||
.text()?;
|
||||
// The sha file is usually of the form: "<sha256> <filename>"
|
||||
let sha256 = sha_text.split_whitespace().next().unwrap_or("");
|
||||
|
||||
match_blocks.push(format!(
|
||||
" Platform::{} => {{\n OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }}\n }}",
|
||||
platform, download_url, sha256
|
||||
));
|
||||
}
|
||||
|
||||
Ok(format!(
|
||||
"pub fn uv_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}",
|
||||
match_blocks.join(",\n")
|
||||
))
|
||||
}
|
||||
|
||||
fn read_python_rs() -> Result<String, Box<dyn Error>> {
|
||||
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||
let path = Path::new(&manifest_dir).join("src/python.rs");
|
||||
println!("Reading {}", path.display());
|
||||
let content = fs::read_to_string(path)?;
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
fn update_uv_text(old_text: &str, new_uv_text: &str) -> Result<String, Box<dyn Error>> {
|
||||
let re = Regex::new(r"(?ms)^pub fn uv_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}\s*\n\s*\}").unwrap();
|
||||
if !re.is_match(old_text) {
|
||||
return Err("Could not find uv_archive function block to replace".into());
|
||||
}
|
||||
let new_content = re.replace(old_text, new_uv_text).to_string();
|
||||
println!("Original lines: {}", old_text.lines().count());
|
||||
println!("Updated lines: {}", new_content.lines().count());
|
||||
Ok(new_content)
|
||||
}
|
||||
|
||||
fn write_python_rs(content: &str) -> Result<(), Box<dyn Error>> {
|
||||
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||
let path = Path::new(&manifest_dir).join("src/python.rs");
|
||||
println!("Writing to {}", path.display());
|
||||
fs::write(path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
let new_uv_archive = fetch_uv_release_info()?;
|
||||
let content = read_python_rs()?;
|
||||
let updated_content = update_uv_text(&content, &new_uv_archive)?;
|
||||
write_python_rs(&updated_content)?;
|
||||
println!("Successfully updated uv_archive function in python.rs");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_update_uv_text_with_actual_file() {
|
||||
let content = fs::read_to_string("src/python.rs").unwrap();
|
||||
let original_lines = content.lines().count();
|
||||
|
||||
const EXPECTED_LINES_REMOVED: usize = 38;
|
||||
|
||||
let updated = update_uv_text(&content, "").unwrap();
|
||||
let updated_lines = updated.lines().count();
|
||||
|
||||
assert_eq!(
|
||||
updated_lines,
|
||||
original_lines - EXPECTED_LINES_REMOVED,
|
||||
"Expected line count to decrease by exactly {} lines (original: {}, updated: {})",
|
||||
EXPECTED_LINES_REMOVED,
|
||||
original_lines,
|
||||
updated_lines
|
||||
);
|
||||
}
|
||||
}
|
|
@ -162,7 +162,7 @@ impl BuildAction for CargoTest {
|
|||
"cargo-nextest",
|
||||
CargoInstall {
|
||||
binary_name: "cargo-nextest",
|
||||
args: "cargo-nextest --version 0.9.57 --locked",
|
||||
args: "cargo-nextest --version 0.9.99 --locked --no-default-features --features default-no-update",
|
||||
},
|
||||
)?;
|
||||
setup_flags(build)
|
||||
|
|
|
@ -38,6 +38,10 @@ pub fn node_archive(platform: Platform) -> OnlineArchive {
|
|||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-x64.zip",
|
||||
sha256: "893115cd92ad27bf178802f15247115e93c0ef0c753b93dca96439240d64feb5",
|
||||
},
|
||||
Platform::WindowsArm => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-arm64.zip",
|
||||
sha256: "89c1f7034dcd6ff5c17f2af61232a96162a1902f862078347dcf274a938b6142",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,26 +21,26 @@ pub fn protoc_archive(platform: Platform) -> OnlineArchive {
|
|||
match platform {
|
||||
Platform::LinuxX64 => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-linux-x86_64.zip",
|
||||
sha256: "f90d0dd59065fef94374745627336d622702b67f0319f96cee894d41a974d47a",
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-linux-x86_64.zip",
|
||||
sha256: "96553041f1a91ea0efee963cb16f462f5985b4d65365f3907414c360044d8065",
|
||||
}
|
||||
}
|
||||
},
|
||||
Platform::LinuxArm => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-linux-aarch_64.zip",
|
||||
sha256: "f3d8eb5839d6186392d8c7b54fbeabbb6fcdd90618a500b77cb2e24faa245cad",
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-linux-aarch_64.zip",
|
||||
sha256: "6c554de11cea04c56ebf8e45b54434019b1cd85223d4bbd25c282425e306ecc2",
|
||||
}
|
||||
}
|
||||
},
|
||||
Platform::MacX64 | Platform::MacArm => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-osx-universal_binary.zip",
|
||||
sha256: "e3324d3bc2e9bc967a0bec2472e0ec73b26f952c7c87f2403197414f780c3c6c",
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-osx-universal_binary.zip",
|
||||
sha256: "99ea004549c139f46da5638187a85bbe422d78939be0fa01af1aa8ab672e395f",
|
||||
}
|
||||
}
|
||||
Platform::WindowsX64 => {
|
||||
},
|
||||
Platform::WindowsX64 | Platform::WindowsArm => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-win64.zip",
|
||||
sha256: "3657053024faa439ff5f8c1dd2ee06bac0f9b9a3d660e99944f015a7451e87ec",
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-win64.zip",
|
||||
sha256: "70381b116ab0d71cb6a5177d9b17c7c13415866603a0fd40d513dafe32d56c35",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ fn clang_format_archive(platform: Platform) -> OnlineArchive {
|
|||
sha256: "238be68d9478163a945754f06a213483473044f5a004c4125d3d9d8d3556466e",
|
||||
}
|
||||
}
|
||||
Platform::WindowsX64 => {
|
||||
Platform::WindowsX64 | Platform::WindowsArm=> {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/ankitects/clang-format-binaries/releases/download/anki-2021-01-09/clang-format_windows_x86_64.zip",
|
||||
sha256: "7d9f6915e3f0fb72407830f0fc37141308d2e6915daba72987a52f309fbeaccc",
|
||||
|
|
|
@ -9,6 +9,7 @@ use maplit::hashmap;
|
|||
|
||||
use crate::action::BuildAction;
|
||||
use crate::archives::download_and_extract;
|
||||
use crate::archives::with_exe;
|
||||
use crate::archives::OnlineArchive;
|
||||
use crate::archives::Platform;
|
||||
use crate::hash::simple_hash;
|
||||
|
@ -16,82 +17,113 @@ use crate::input::BuildInput;
|
|||
use crate::inputs;
|
||||
use crate::Build;
|
||||
|
||||
/// When updating this, pyoxidizer.bzl needs updating too, but it uses different
|
||||
/// files.
|
||||
pub fn python_archive(platform: Platform) -> OnlineArchive {
|
||||
// To update, run 'cargo run --bin update_uv'.
|
||||
// You'll need to do this when bumping Python versions, as uv bakes in
|
||||
// the latest known version.
|
||||
// When updating Python version, make sure to update version tag in BuildWheel
|
||||
// too.
|
||||
pub fn uv_archive(platform: Platform) -> OnlineArchive {
|
||||
match platform {
|
||||
Platform::LinuxX64 => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64_v2-unknown-linux-gnu-install_only.tar.gz",
|
||||
sha256: "9426bca501ae0a257392b10719e2e20ff5fa5e22a3ce4599d6ad0b3139f86417",
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-unknown-linux-gnu.tar.gz",
|
||||
sha256: "909278eb197c5ed0e9b5f16317d1255270d1f9ea4196e7179ce934d48c4c2545",
|
||||
}
|
||||
}
|
||||
},
|
||||
Platform::LinuxArm => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-aarch64-unknown-linux-gnu-install_only.tar.gz",
|
||||
sha256: "7d19e1ecd6e582423f7c74a0c67491eaa982ce9d5c5f35f0e4289f83127abcb8",
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-unknown-linux-gnu.tar.gz",
|
||||
sha256: "0b2ad9fe4295881615295add8cc5daa02549d29cc9a61f0578e397efcf12f08f",
|
||||
}
|
||||
}
|
||||
},
|
||||
Platform::MacX64 => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64-apple-darwin-install_only.tar.gz",
|
||||
sha256: "5a0bf895a5cb08d6d008140abb41bb2c8cd638a665273f7d8eb258bc89de439b",
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-apple-darwin.tar.gz",
|
||||
sha256: "d785753ac092e25316180626aa691c5dfe1fb075290457ba4fdb72c7c5661321",
|
||||
}
|
||||
}
|
||||
},
|
||||
Platform::MacArm => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-aarch64-apple-darwin-install_only.tar.gz",
|
||||
sha256: "bf0cd90204a2cc6da48cae1e4b32f48c9f7031fbe1238c5972104ccb0155d368",
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-apple-darwin.tar.gz",
|
||||
sha256: "721f532b73171586574298d4311a91d5ea2c802ef4db3ebafc434239330090c6",
|
||||
}
|
||||
}
|
||||
},
|
||||
Platform::WindowsX64 => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64-pc-windows-msvc-shared-install_only.tar.gz",
|
||||
sha256: "8f0544cd593984f7ecb90c685931249c579302124b9821064873f3a14ed07005",
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-pc-windows-msvc.zip",
|
||||
sha256: "e199b10bef1a7cc540014483e7f60f825a174988f41020e9d2a6b01bd60f0669",
|
||||
}
|
||||
},
|
||||
Platform::WindowsArm => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-pc-windows-msvc.zip",
|
||||
sha256: "bb40708ad549ad6a12209cb139dd751bf0ede41deb679ce7513ce197bd9ef234",
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the Python binary, which can be used to create venvs.
|
||||
/// Downloads if missing.
|
||||
pub fn setup_python(build: &mut Build) -> Result<()> {
|
||||
// if changing this, make sure you remove out/pyenv
|
||||
let python_binary = match env::var("PYTHON_BINARY") {
|
||||
pub fn setup_uv(build: &mut Build, platform: Platform) -> Result<()> {
|
||||
let uv_binary = match env::var("UV_BINARY") {
|
||||
Ok(path) => {
|
||||
assert!(
|
||||
Utf8Path::new(&path).is_absolute(),
|
||||
"PYTHON_BINARY must be absolute"
|
||||
"UV_BINARY must be absolute"
|
||||
);
|
||||
path.into()
|
||||
}
|
||||
Err(_) => {
|
||||
download_and_extract(
|
||||
build,
|
||||
"python",
|
||||
python_archive(build.host_platform),
|
||||
"uv",
|
||||
uv_archive(platform),
|
||||
hashmap! { "bin" => [
|
||||
if cfg!(windows) { "python.exe" } else { "bin/python3"}
|
||||
] },
|
||||
with_exe("uv")
|
||||
] },
|
||||
)?;
|
||||
inputs![":extract:python:bin"]
|
||||
inputs![":extract:uv:bin"]
|
||||
}
|
||||
};
|
||||
build.add_dependency("python_binary", python_binary);
|
||||
build.add_dependency("uv_binary", uv_binary);
|
||||
|
||||
// Our macOS packaging needs access to the x86 binary on ARM.
|
||||
if cfg!(target_arch = "aarch64") {
|
||||
download_and_extract(
|
||||
build,
|
||||
"uv_mac_x86",
|
||||
uv_archive(Platform::MacX64),
|
||||
hashmap! { "bin" => [
|
||||
with_exe("uv")
|
||||
] },
|
||||
)?;
|
||||
}
|
||||
// Our Linux packaging needs access to the ARM binary on x86
|
||||
if cfg!(target_arch = "x86_64") {
|
||||
download_and_extract(
|
||||
build,
|
||||
"uv_lin_arm",
|
||||
uv_archive(Platform::LinuxArm),
|
||||
hashmap! { "bin" => [
|
||||
with_exe("uv")
|
||||
] },
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub struct PythonEnvironment {
|
||||
pub folder: &'static str,
|
||||
pub base_requirements_txt: BuildInput,
|
||||
pub requirements_txt: BuildInput,
|
||||
pub deps: BuildInput,
|
||||
// todo: rename
|
||||
pub venv_folder: &'static str,
|
||||
pub extra_args: &'static str,
|
||||
pub extra_binary_exports: &'static [&'static str],
|
||||
}
|
||||
|
||||
impl BuildAction for PythonEnvironment {
|
||||
fn command(&self) -> &str {
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
"$runner pyenv $python_binary $builddir/$pyenv_folder $system_pkgs $base_requirements $requirements"
|
||||
"$runner pyenv $uv_binary $builddir/$pyenv_folder -- $extra_args"
|
||||
} else {
|
||||
"echo 'OFFLINE_BUILD is set. Using the existing PythonEnvironment.'"
|
||||
}
|
||||
|
@ -99,7 +131,7 @@ impl BuildAction for PythonEnvironment {
|
|||
|
||||
fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
|
||||
let bin_path = |binary: &str| -> Vec<String> {
|
||||
let folder = self.folder;
|
||||
let folder = self.venv_folder;
|
||||
let path = if cfg!(windows) {
|
||||
format!("{folder}/scripts/{binary}.exe")
|
||||
} else {
|
||||
|
@ -108,21 +140,24 @@ impl BuildAction for PythonEnvironment {
|
|||
vec![path]
|
||||
};
|
||||
|
||||
build.add_inputs("", &self.deps);
|
||||
build.add_variable("pyenv_folder", self.venv_folder);
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
build.add_inputs("python_binary", inputs![":python_binary"]);
|
||||
build.add_variable("pyenv_folder", self.folder);
|
||||
build.add_inputs("base_requirements", &self.base_requirements_txt);
|
||||
build.add_inputs("requirements", &self.requirements_txt);
|
||||
build.add_outputs_ext("pip", bin_path("pip"), true);
|
||||
build.add_inputs("uv_binary", inputs![":uv_binary"]);
|
||||
|
||||
// Add --python flag to extra_args if PYTHON_BINARY is set
|
||||
let mut args = self.extra_args.to_string();
|
||||
if let Ok(python_binary) = env::var("PYTHON_BINARY") {
|
||||
args = format!("--python {} {}", python_binary, args);
|
||||
}
|
||||
build.add_variable("extra_args", args);
|
||||
}
|
||||
|
||||
build.add_outputs_ext("bin", bin_path("python"), true);
|
||||
for binary in self.extra_binary_exports {
|
||||
build.add_outputs_ext(*binary, bin_path(binary), true);
|
||||
}
|
||||
}
|
||||
|
||||
fn check_output_timestamps(&self) -> bool {
|
||||
true
|
||||
build.add_output_stamp(format!("{}/.stamp", self.venv_folder));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -15,7 +15,6 @@ camino.workspace = true
|
|||
clap.workspace = true
|
||||
flate2.workspace = true
|
||||
junction.workspace = true
|
||||
reqwest = { workspace = true, features = ["rustls-tls", "rustls-tls-native-roots"] }
|
||||
sha2.workspace = true
|
||||
tar.workspace = true
|
||||
termcolor.workspace = true
|
||||
|
@ -24,3 +23,9 @@ which.workspace = true
|
|||
xz2.workspace = true
|
||||
zip.workspace = true
|
||||
zstd.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
reqwest = { workspace = true, features = ["native-tls"] }
|
||||
|
||||
[target.'cfg(not(windows))'.dependencies]
|
||||
reqwest = { workspace = true, features = ["rustls-tls", "rustls-tls-native-roots"] }
|
||||
|
|
|
@ -1,62 +0,0 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
|
||||
use camino::Utf8PathBuf;
|
||||
use clap::Args;
|
||||
|
||||
use crate::run::run_command;
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
pub struct BuildArtifactsArgs {
|
||||
bundle_root: Utf8PathBuf,
|
||||
pyoxidizer_bin: String,
|
||||
}
|
||||
|
||||
pub fn build_artifacts(args: BuildArtifactsArgs) {
|
||||
// build.rs doesn't declare inputs from venv, so we need to force a rebuild to
|
||||
// ensure changes to our libs/the venv get included
|
||||
let artifacts = args.bundle_root.join("artifacts");
|
||||
if artifacts.exists() {
|
||||
fs::remove_dir_all(&artifacts).unwrap();
|
||||
}
|
||||
let bundle_root = args.bundle_root.canonicalize_utf8().unwrap();
|
||||
let build_folder = bundle_root.join("build");
|
||||
if build_folder.exists() {
|
||||
fs::remove_dir_all(&build_folder).unwrap();
|
||||
}
|
||||
|
||||
run_command(
|
||||
Command::new(&args.pyoxidizer_bin)
|
||||
.args([
|
||||
"--system-rust",
|
||||
"run-build-script",
|
||||
"qt/bundle/build.rs",
|
||||
"--var",
|
||||
"venv",
|
||||
"out/bundle/pyenv",
|
||||
"--var",
|
||||
"build",
|
||||
build_folder.as_str(),
|
||||
])
|
||||
.env("CARGO_MANIFEST_DIR", "qt/bundle")
|
||||
.env("CARGO_TARGET_DIR", "out/bundle/rust")
|
||||
.env("PROFILE", "release")
|
||||
.env("OUT_DIR", &artifacts)
|
||||
.env("TARGET", env!("TARGET"))
|
||||
.env("SDKROOT", "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk")
|
||||
.env("MACOSX_DEPLOYMENT_TARGET", macos_deployment_target())
|
||||
.env("CARGO_BUILD_TARGET", env!("TARGET")),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn macos_deployment_target() -> &'static str {
|
||||
if env!("TARGET") == "x86_64-apple-darwin" {
|
||||
"10.13.4"
|
||||
} else {
|
||||
"11"
|
||||
}
|
||||
}
|
|
@ -1,53 +0,0 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::process::Command;
|
||||
|
||||
use anki_process::CommandExt;
|
||||
use camino::Utf8Path;
|
||||
use camino::Utf8PathBuf;
|
||||
|
||||
use super::artifacts::macos_deployment_target;
|
||||
use crate::run::run_command;
|
||||
|
||||
pub fn build_bundle_binary() {
|
||||
let mut features = String::from("build-mode-prebuilt-artifacts");
|
||||
if cfg!(target_os = "linux") || cfg!(target_os = "macos") {
|
||||
features.push_str(",global-allocator-jemalloc,allocator-jemalloc");
|
||||
}
|
||||
|
||||
let mut command = Command::new("cargo");
|
||||
command
|
||||
.args([
|
||||
"build",
|
||||
"--manifest-path=qt/bundle/Cargo.toml",
|
||||
"--target-dir=out/bundle/rust",
|
||||
"--release",
|
||||
"--no-default-features",
|
||||
])
|
||||
.arg(format!("--features={features}"))
|
||||
.env(
|
||||
"DEFAULT_PYTHON_CONFIG_RS",
|
||||
// included in main.rs, so relative to qt/bundle/src
|
||||
"../../../out/bundle/artifacts/",
|
||||
)
|
||||
.env(
|
||||
"PYO3_CONFIG_FILE",
|
||||
Utf8Path::new("out/bundle/artifacts/pyo3-build-config-file.txt")
|
||||
.canonicalize_utf8()
|
||||
.unwrap(),
|
||||
)
|
||||
.env("MACOSX_DEPLOYMENT_TARGET", macos_deployment_target())
|
||||
.env("SDKROOT", "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk")
|
||||
.env("CARGO_BUILD_TARGET", env!("TARGET"));
|
||||
if env!("TARGET") == "x86_64-apple-darwin" {
|
||||
let xcode_path = Command::run_with_output(["xcode-select", "-p"]).unwrap();
|
||||
let ld_classic = Utf8PathBuf::from(xcode_path.stdout.trim())
|
||||
.join("Toolchains/XcodeDefault.xctoolchain/usr/bin/ld-classic");
|
||||
if ld_classic.exists() {
|
||||
// work around XCode 15's default linker not supporting macOS 10.15-12.
|
||||
command.env("RUSTFLAGS", format!("-Clink-arg=-fuse-ld={ld_classic}"));
|
||||
}
|
||||
}
|
||||
run_command(&mut command);
|
||||
}
|
|
@ -1,156 +0,0 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
|
||||
use camino::Utf8Path;
|
||||
use camino::Utf8PathBuf;
|
||||
use clap::Args;
|
||||
use clap::ValueEnum;
|
||||
|
||||
use crate::paths::absolute_msys_path;
|
||||
use crate::paths::unix_path;
|
||||
use crate::run::run_command;
|
||||
|
||||
#[derive(Clone, Copy, ValueEnum, Debug)]
|
||||
enum DistKind {
|
||||
Standard,
|
||||
Alternate,
|
||||
}
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
pub struct BuildDistFolderArgs {
|
||||
kind: DistKind,
|
||||
folder_root: Utf8PathBuf,
|
||||
}
|
||||
|
||||
pub fn build_dist_folder(args: BuildDistFolderArgs) {
|
||||
let BuildDistFolderArgs { kind, folder_root } = args;
|
||||
fs::create_dir_all(&folder_root).unwrap();
|
||||
// Start with Qt, as it's the largest, and we use --delete to ensure there are
|
||||
// no stale files in lib/. Skipped on macOS as Qt is handled later.
|
||||
if !cfg!(target_os = "macos") {
|
||||
copy_qt_from_venv(kind, &folder_root);
|
||||
}
|
||||
clean_top_level_files(&folder_root);
|
||||
copy_binary_and_pylibs(&folder_root);
|
||||
if cfg!(target_os = "linux") {
|
||||
copy_linux_extras(kind, &folder_root);
|
||||
} else if cfg!(windows) {
|
||||
copy_windows_extras(&folder_root);
|
||||
}
|
||||
fs::write(folder_root.with_extension("stamp"), b"").unwrap();
|
||||
}
|
||||
|
||||
fn copy_qt_from_venv(kind: DistKind, folder_root: &Utf8Path) {
|
||||
let python39 = if cfg!(windows) { "" } else { "python3.9/" };
|
||||
let qt_root = match kind {
|
||||
DistKind::Standard => {
|
||||
folder_root.join(format!("../pyenv/lib/{python39}site-packages/PyQt6"))
|
||||
}
|
||||
DistKind::Alternate => {
|
||||
folder_root.join(format!("../pyenv-qt5/lib/{python39}site-packages/PyQt5"))
|
||||
}
|
||||
};
|
||||
let src_path = absolute_msys_path(&qt_root);
|
||||
let lib_path = folder_root.join("lib");
|
||||
fs::create_dir_all(&lib_path).unwrap();
|
||||
let dst_path = with_slash(absolute_msys_path(&lib_path));
|
||||
run_command(Command::new("rsync").args([
|
||||
"-a",
|
||||
"--delete",
|
||||
"--exclude-from",
|
||||
"qt/bundle/qt.exclude",
|
||||
&src_path,
|
||||
&dst_path,
|
||||
]));
|
||||
}
|
||||
|
||||
fn copy_linux_extras(kind: DistKind, folder_root: &Utf8Path) {
|
||||
// add README, installer, etc
|
||||
run_command(Command::new("rsync").args(["-a", "qt/bundle/lin/", &with_slash(folder_root)]));
|
||||
|
||||
// add extra IME plugins from download
|
||||
let lib_path = folder_root.join("lib");
|
||||
let src_path = folder_root
|
||||
.join("../../extracted/linux_qt_plugins")
|
||||
.join(match kind {
|
||||
DistKind::Standard => "qt6",
|
||||
DistKind::Alternate => "qt5",
|
||||
});
|
||||
let dst_path = lib_path.join(match kind {
|
||||
DistKind::Standard => "PyQt6/Qt6/plugins",
|
||||
DistKind::Alternate => "PyQt5/Qt5/plugins",
|
||||
});
|
||||
run_command(Command::new("rsync").args(["-a", &with_slash(src_path), &with_slash(dst_path)]));
|
||||
}
|
||||
|
||||
fn copy_windows_extras(folder_root: &Utf8Path) {
|
||||
run_command(Command::new("rsync").args([
|
||||
"-a",
|
||||
"out/extracted/win_amd64_audio/",
|
||||
&with_slash(folder_root),
|
||||
]));
|
||||
}
|
||||
|
||||
fn clean_top_level_files(folder_root: &Utf8Path) {
|
||||
let mut to_remove = vec![];
|
||||
for entry in fs::read_dir(folder_root).unwrap() {
|
||||
let entry = entry.unwrap();
|
||||
if entry.file_name() == "lib" {
|
||||
continue;
|
||||
} else {
|
||||
to_remove.push(entry.path());
|
||||
}
|
||||
}
|
||||
for path in to_remove {
|
||||
if path.is_dir() {
|
||||
fs::remove_dir_all(path).unwrap()
|
||||
} else {
|
||||
fs::remove_file(path).unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn with_slash<P>(path: P) -> String
|
||||
where
|
||||
P: AsRef<str>,
|
||||
{
|
||||
format!("{}/", path.as_ref())
|
||||
}
|
||||
|
||||
fn copy_binary_and_pylibs(folder_root: &Utf8Path) {
|
||||
let binary = folder_root
|
||||
.join("../rust")
|
||||
.join(env!("TARGET"))
|
||||
.join("release")
|
||||
.join(if cfg!(windows) { "anki.exe" } else { "anki" });
|
||||
let extra_files = folder_root
|
||||
.join("../build")
|
||||
.join(env!("TARGET"))
|
||||
.join("release/resources/extra_files");
|
||||
run_command(Command::new("rsync").args([
|
||||
"-a",
|
||||
"--exclude",
|
||||
"PyQt6",
|
||||
// misleading, as it misses the GPL PyQt, and our Rust/JS
|
||||
// dependencies
|
||||
"--exclude",
|
||||
"COPYING.txt",
|
||||
&unix_path(&binary),
|
||||
&with_slash(unix_path(&extra_files)),
|
||||
&with_slash(unix_path(folder_root)),
|
||||
]));
|
||||
let google_py = if cfg!(windows) {
|
||||
folder_root.join("../pyenv/lib/site-packages/google")
|
||||
} else {
|
||||
folder_root.join("../pyenv/lib/python3.9/site-packages/google")
|
||||
};
|
||||
run_command(Command::new("rsync").args([
|
||||
"-a",
|
||||
&unix_path(&google_py),
|
||||
&with_slash(unix_path(&folder_root.join("lib"))),
|
||||
]));
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
pub mod artifacts;
|
||||
pub mod binary;
|
||||
pub mod folder;
|
|
@ -7,7 +7,6 @@
|
|||
|
||||
mod archive;
|
||||
mod build;
|
||||
mod bundle;
|
||||
mod paths;
|
||||
mod pyenv;
|
||||
mod rsync;
|
||||
|
@ -19,11 +18,6 @@ use archive::archive_command;
|
|||
use archive::ArchiveArgs;
|
||||
use build::run_build;
|
||||
use build::BuildArgs;
|
||||
use bundle::artifacts::build_artifacts;
|
||||
use bundle::artifacts::BuildArtifactsArgs;
|
||||
use bundle::binary::build_bundle_binary;
|
||||
use bundle::folder::build_dist_folder;
|
||||
use bundle::folder::BuildDistFolderArgs;
|
||||
use clap::Parser;
|
||||
use clap::Subcommand;
|
||||
use pyenv::setup_pyenv;
|
||||
|
@ -48,9 +42,6 @@ enum Command {
|
|||
Rsync(RsyncArgs),
|
||||
Run(RunArgs),
|
||||
Build(BuildArgs),
|
||||
BuildArtifacts(BuildArtifactsArgs),
|
||||
BuildBundleBinary,
|
||||
BuildDistFolder(BuildDistFolderArgs),
|
||||
#[clap(subcommand)]
|
||||
Archive(ArchiveArgs),
|
||||
}
|
||||
|
@ -62,9 +53,6 @@ fn main() -> Result<()> {
|
|||
Command::Rsync(args) => rsync_files(args),
|
||||
Command::Yarn(args) => setup_yarn(args),
|
||||
Command::Build(args) => run_build(args),
|
||||
Command::BuildArtifacts(args) => build_artifacts(args),
|
||||
Command::BuildBundleBinary => build_bundle_binary(),
|
||||
Command::BuildDistFolder(args) => build_dist_folder(args),
|
||||
Command::Archive(args) => archive_command(args)?,
|
||||
};
|
||||
Ok(())
|
||||
|
|
|
@ -16,8 +16,3 @@ pub fn absolute_msys_path(path: &Utf8Path) -> String {
|
|||
// and \ -> /
|
||||
format!("/{drive}/{}", path[7..].replace('\\', "/"))
|
||||
}
|
||||
|
||||
/// Converts backslashes to forward slashes
|
||||
pub fn unix_path(path: &Utf8Path) -> String {
|
||||
path.as_str().replace('\\', "/")
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
|
||||
use camino::Utf8Path;
|
||||
|
@ -10,12 +11,10 @@ use crate::run::run_command;
|
|||
|
||||
#[derive(Args)]
|
||||
pub struct PyenvArgs {
|
||||
python_bin: String,
|
||||
uv_bin: String,
|
||||
pyenv_folder: String,
|
||||
initial_reqs: String,
|
||||
reqs: Vec<String>,
|
||||
#[arg(long, allow_hyphen_values(true))]
|
||||
venv_args: Vec<String>,
|
||||
#[arg(trailing_var_arg = true)]
|
||||
extra_args: Vec<String>,
|
||||
}
|
||||
|
||||
/// Set up a venv if one doesn't already exist, and then sync packages with
|
||||
|
@ -23,35 +22,23 @@ pub struct PyenvArgs {
|
|||
pub fn setup_pyenv(args: PyenvArgs) {
|
||||
let pyenv_folder = Utf8Path::new(&args.pyenv_folder);
|
||||
|
||||
let pyenv_bin_folder = pyenv_folder.join(if cfg!(windows) { "scripts" } else { "bin" });
|
||||
let pyenv_python = pyenv_bin_folder.join("python");
|
||||
let pip_sync = pyenv_bin_folder.join("pip-sync");
|
||||
|
||||
if !pyenv_python.exists() {
|
||||
run_command(
|
||||
Command::new(&args.python_bin)
|
||||
.args(["-m", "venv"])
|
||||
.args(args.venv_args)
|
||||
.arg(pyenv_folder),
|
||||
);
|
||||
|
||||
if cfg!(windows) {
|
||||
// the first install on Windows throws an error the first time pip is upgraded,
|
||||
// so we install it twice and swallow the first error
|
||||
let _output = Command::new(&pyenv_python)
|
||||
.args(["-m", "pip", "install", "-r", &args.initial_reqs])
|
||||
.output()
|
||||
.unwrap();
|
||||
// On first run, ninja creates an empty bin/ folder which breaks the initial
|
||||
// install. But we don't want to indiscriminately remove the folder, or
|
||||
// macOS Gatekeeper needs to rescan the files each time.
|
||||
if pyenv_folder.exists() {
|
||||
let cache_tag = pyenv_folder.join("CACHEDIR.TAG");
|
||||
if !cache_tag.exists() {
|
||||
fs::remove_dir_all(pyenv_folder).expect("Failed to remove existing pyenv folder");
|
||||
}
|
||||
|
||||
run_command(Command::new(pyenv_python).args([
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"-r",
|
||||
&args.initial_reqs,
|
||||
]));
|
||||
}
|
||||
|
||||
run_command(Command::new(pip_sync).args(&args.reqs));
|
||||
run_command(
|
||||
Command::new(args.uv_bin)
|
||||
.env("UV_PROJECT_ENVIRONMENT", args.pyenv_folder.clone())
|
||||
.args(["sync", "--frozen"])
|
||||
.args(args.extra_args),
|
||||
);
|
||||
|
||||
// Write empty stamp file
|
||||
fs::write(pyenv_folder.join(".stamp"), "").expect("Failed to write stamp file");
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::io::ErrorKind;
|
||||
use std::process::Command;
|
||||
|
||||
use anki_io::create_dir_all;
|
||||
|
@ -44,7 +43,7 @@ fn split_env(s: &str) -> Result<(String, String), std::io::Error> {
|
|||
if let Some((k, v)) = s.split_once('=') {
|
||||
Ok((k.into(), v.into()))
|
||||
} else {
|
||||
Err(std::io::Error::new(ErrorKind::Other, "invalid env var"))
|
||||
Err(std::io::Error::other("invalid env var"))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -190,13 +190,10 @@ in the collection2.log file will also be printed on stdout.
|
|||
|
||||
If ANKI_PROFILE_CODE is set, Python profiling data will be written on exit.
|
||||
|
||||
# Binary Bundles
|
||||
# Installer/launcher
|
||||
|
||||
Anki's official binary packages are created with `./ninja bundle`. The bundling
|
||||
process was created specifically for the official builds, and is provided as-is;
|
||||
we are unfortunately not able to provide assistance with any issues you may run
|
||||
into when using it. You'll need to run
|
||||
`git submodule update --checkout qt/bundle/PyOxidizer` first.
|
||||
- The anki-release package is created/published with the scripts in qt/release.
|
||||
- The installer/launcher is created with the build scripts in qt/launcher/{platform}.
|
||||
|
||||
## Mixing development and study
|
||||
|
||||
|
|
|
@ -51,13 +51,8 @@ Anki requires a recent glibc.
|
|||
|
||||
If you are using a distro that uses musl, Anki will not work.
|
||||
|
||||
If your glibc version is 2.35+ on AMD64 or 2.39+ on ARM64, you can skip the rest of this section.
|
||||
|
||||
If your system has an older glibc, you won't be able to use the PyQt wheels that are
|
||||
available in pip/PyPy, and will need to use your system-installed PyQt instead.
|
||||
Your distro will also need to have Python 3.9 or later.
|
||||
|
||||
After installing the system libraries (eg:
|
||||
You can use your system's Qt libraries if they are Qt 6.2 or later, if
|
||||
you wish. After installing the system libraries (eg:
|
||||
'sudo apt install python3-pyqt6.qt{quick,webengine} python3-venv pyqt6-dev-tools'),
|
||||
find the place they are installed (eg '/usr/lib/python3/dist-packages'). On modern Ubuntu, you'll
|
||||
also need 'sudo apt remove python3-protobuf'. Then before running any commands like './run', tell Anki where
|
||||
|
@ -68,12 +63,6 @@ export PYTHONPATH=/usr/lib/python3/dist-packages
|
|||
export PYTHON_BINARY=/usr/bin/python3
|
||||
```
|
||||
|
||||
There are a few things to be aware of:
|
||||
|
||||
- You should use ./run and not tools/run-qt5\*, even if your system libraries are Qt5.
|
||||
- If your system libraries are Qt5, when creating an aqt wheel, the wheel will not work
|
||||
on Qt6 environments.
|
||||
|
||||
## Packaging considerations
|
||||
|
||||
Python, node and protoc are downloaded as part of the build. You can optionally define
|
||||
|
|
|
@ -9,7 +9,12 @@ You must be running 64 bit Windows 10, version 1703 or newer.
|
|||
**Rustup**:
|
||||
|
||||
As mentioned in development.md, rustup must be installed. If you're on
|
||||
ARM Windows, you must set the default target to x86_64-pc-windows-msvc.
|
||||
ARM Windows and install the ARM64 version of rust-up, from this project folder,
|
||||
run
|
||||
|
||||
```
|
||||
rustup target add x86_64-pc-windows-msvc
|
||||
```
|
||||
|
||||
**Visual Studio**:
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit ca04132a8f82296f3e0ea22b74bb4221e1d11d3f
|
||||
Subproject commit 2f8c9d9566aef8b86e3326fe9ff007d594b7ec83
|
|
@ -307,16 +307,17 @@ deck-config-new-interval-tooltip = The multiplier applied to a review interval w
|
|||
deck-config-minimum-interval-tooltip = The minimum interval given to a review card after answering `Again`.
|
||||
deck-config-custom-scheduling = Custom scheduling
|
||||
deck-config-custom-scheduling-tooltip = Affects the entire collection. Use at your own risk!
|
||||
# Easy Days section
|
||||
|
||||
## Easy Days section.
|
||||
|
||||
deck-config-easy-days-title = Easy Days
|
||||
deck-config-easy-days-monday = Monday
|
||||
deck-config-easy-days-tuesday = Tuesday
|
||||
deck-config-easy-days-wednesday = Wednesday
|
||||
deck-config-easy-days-thursday = Thursday
|
||||
deck-config-easy-days-friday = Friday
|
||||
deck-config-easy-days-saturday = Saturday
|
||||
deck-config-easy-days-sunday = Sunday
|
||||
deck-config-easy-days-monday = Mon
|
||||
deck-config-easy-days-tuesday = Tue
|
||||
deck-config-easy-days-wednesday = Wed
|
||||
deck-config-easy-days-thursday = Thu
|
||||
deck-config-easy-days-friday = Fri
|
||||
deck-config-easy-days-saturday = Sat
|
||||
deck-config-easy-days-sunday = Sun
|
||||
deck-config-easy-days-normal = Normal
|
||||
deck-config-easy-days-reduced = Reduced
|
||||
deck-config-easy-days-minimum = Minimum
|
||||
|
@ -395,9 +396,11 @@ deck-config-weights = FSRS parameters
|
|||
deck-config-compute-optimal-weights = Optimize FSRS parameters
|
||||
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
|
||||
deck-config-optimize-button = Optimize Current Preset
|
||||
# Indicates that a given function or label, provided via the "text" variable, operates slowly.
|
||||
deck-config-slow-suffix = { $text } (slow)
|
||||
deck-config-compute-button = Compute
|
||||
deck-config-ignore-before = Ignore cards reviewed before
|
||||
deck-config-time-to-optimize = It's been a while - using the Optimize All button is recommended.
|
||||
deck-config-time-to-optimize = It's been a while - using the Optimize All Presets button is recommended.
|
||||
deck-config-evaluate-button = Evaluate
|
||||
deck-config-desired-retention = Desired retention
|
||||
deck-config-historical-retention = Historical retention
|
||||
|
@ -482,9 +485,12 @@ deck-config-percent-of-reviews =
|
|||
*[other] { $pct }% of { $reviews } reviews
|
||||
}
|
||||
deck-config-percent-input = { $pct }%
|
||||
# This message appears during FSRS parameter optimization.
|
||||
deck-config-checking-for-improvement = Checking for improvement...
|
||||
deck-config-optimizing-preset = Optimizing preset { $current_count }/{ $total_count }...
|
||||
deck-config-fsrs-must-be-enabled = FSRS must be enabled first.
|
||||
deck-config-fsrs-params-optimal = The FSRS parameters currently appear to be optimal.
|
||||
|
||||
deck-config-fsrs-params-no-reviews = No reviews found. Make sure this preset is assigned to all decks (including subdecks) that you want to optimize, and try again.
|
||||
|
||||
deck-config-wait-for-audio = Wait for audio
|
||||
|
@ -511,6 +517,23 @@ deck-config-save-options-to-preset = Save Changes to Preset
|
|||
# specific date.
|
||||
deck-config-fsrs-simulator-radio-memorized = Memorized
|
||||
|
||||
## Messages related to the FSRS scheduler’s health check. The health check determines whether the correlation between FSRS predictions and your memory is good or bad. It can be optionally triggered as part of the "Optimize" function.
|
||||
|
||||
# Checkbox
|
||||
deck-config-health-check = Check health when optimizing
|
||||
# Message box showing the result of the health check
|
||||
deck-config-fsrs-bad-fit-warning = Health Check:
|
||||
Your memory is difficult for FSRS to predict. Recommendations:
|
||||
|
||||
- Suspend or reformulate leeches.
|
||||
- Use the answer buttons consistently. Keep in mind that "Hard" is a passing grade, not a failing grade.
|
||||
- Understand before you memorize.
|
||||
|
||||
If you follow these suggestions, performance will usually improve over the next few months.
|
||||
# Message box showing the result of the health check
|
||||
deck-config-fsrs-good-fit = Health Check:
|
||||
FSRS can adapt to your memory well.
|
||||
|
||||
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
|
||||
|
||||
deck-config-a-100-day-interval =
|
||||
|
@ -550,6 +573,8 @@ deck-config-compute-optimal-retention-tooltip =
|
|||
if it significantly differs from 0.9, it's a sign that the time you've allocated each day is either too low
|
||||
or too high for the amount of cards you're trying to learn. This number can be useful as a reference, but it
|
||||
is not recommended to copy it into the desired retention field.
|
||||
deck-config-health-check-tooltip1 = This will show a warning if FSRS struggles to adapt to your memory.
|
||||
deck-config-health-check-tooltip2 = Health check is performed only when using Optimize Current Preset.
|
||||
|
||||
deck-config-compute-optimal-retention = Compute minimum recommended retention
|
||||
deck-config-predicted-optimal-retention = Minimum recommended retention: { $num }
|
||||
|
|
|
@ -96,6 +96,7 @@ editing-image-occlusion-rectangle-tool = Rectangle
|
|||
editing-image-occlusion-ellipse-tool = Ellipse
|
||||
editing-image-occlusion-polygon-tool = Polygon
|
||||
editing-image-occlusion-text-tool = Text
|
||||
editing-image-occlusion-fill-tool = Fill with colour
|
||||
editing-image-occlusion-toggle-mask-editor = Toggle Mask Editor
|
||||
editing-image-occlusion-reset = Reset Image Occlusion
|
||||
editing-image-occlusion-confirm-reset = Are you sure you want to reset this image occlusion?
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
"""
|
||||
Tool to extract core strings and keys from .ftl files.
|
||||
"""
|
||||
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
|
||||
from fluent.syntax import parse
|
||||
from fluent.syntax.ast import Junk, Message
|
||||
from fluent.syntax.serializer import serialize_element
|
||||
|
||||
root = ".."
|
||||
ftl_files = glob.glob(os.path.join(root, "ftl", "core", "*.ftl"), recursive=True)
|
||||
keys_by_value: dict[str, list[str]] = {}
|
||||
|
||||
for path in ftl_files:
|
||||
obj = parse(open(path, encoding="utf8").read(), with_spans=False)
|
||||
for ent in obj.body:
|
||||
if isinstance(ent, Junk):
|
||||
raise Exception(f"file had junk! {path} {ent}")
|
||||
if isinstance(ent, Message):
|
||||
key = ent.id.name
|
||||
val = "".join(serialize_element(elem) for elem in ent.value.elements)
|
||||
if val in keys_by_value:
|
||||
print("duplicate found:", keys_by_value[val], key)
|
||||
keys_by_value.setdefault(val, []).append(key)
|
||||
|
||||
json.dump(
|
||||
keys_by_value, open(os.path.join(root, "keys_by_value.json"), "w", encoding="utf8")
|
||||
)
|
||||
print("keys:", len(keys_by_value))
|
|
@ -1,99 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
"""
|
||||
Parse and re-serialize ftl files to get them in a consistent form.
|
||||
"""
|
||||
|
||||
import difflib
|
||||
import glob
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
from compare_locales import parser
|
||||
from compare_locales.checks.fluent import ReferenceMessageVisitor
|
||||
from compare_locales.paths import File
|
||||
from fluent.syntax import parse, serialize
|
||||
from fluent.syntax.ast import Junk
|
||||
|
||||
|
||||
def check_missing_terms(path: str) -> bool:
|
||||
"True if file is ok."
|
||||
file = File(path, os.path.basename(path))
|
||||
content = open(path, "rb").read()
|
||||
p = parser.getParser(file.file)
|
||||
p.readContents(content)
|
||||
refList = p.parse()
|
||||
|
||||
p.readContents(content)
|
||||
for e in p.parse():
|
||||
ref_data = ReferenceMessageVisitor()
|
||||
ref_data.visit(e.entry)
|
||||
|
||||
for attr_or_val, refs in ref_data.entry_refs.items():
|
||||
for ref, ref_type in refs.items():
|
||||
if ref not in refList:
|
||||
print(f"In {path}:{e}, missing '{ref}'")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_file(path: str, fix: bool) -> bool:
|
||||
"True if file is ok."
|
||||
orig_text = open(path, encoding="utf8").read()
|
||||
obj = parse(orig_text, with_spans=False)
|
||||
# make sure there's no junk
|
||||
for ent in obj.body:
|
||||
if isinstance(ent, Junk):
|
||||
raise Exception(f"file had junk! {path} {ent}")
|
||||
# serialize
|
||||
new_text = serialize(obj)
|
||||
# make sure serializing did not introduce new junk
|
||||
obj = parse(new_text, with_spans=False)
|
||||
for ent in obj.body:
|
||||
if isinstance(ent, Junk):
|
||||
raise Exception(f"file introduced junk! {path} {ent}")
|
||||
|
||||
if new_text == orig_text:
|
||||
return check_missing_terms(path)
|
||||
|
||||
if fix:
|
||||
print(f"Fixing {path}")
|
||||
open(path, "w", newline="\n", encoding="utf8").write(new_text)
|
||||
return True
|
||||
else:
|
||||
print(f"Bad formatting in {path}")
|
||||
print(
|
||||
"\n".join(
|
||||
difflib.unified_diff(
|
||||
orig_text.splitlines(),
|
||||
new_text.splitlines(),
|
||||
fromfile="bad",
|
||||
tofile="good",
|
||||
lineterm="",
|
||||
)
|
||||
)
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def check_files(files: List[str], fix: bool) -> bool:
|
||||
"True if files ok."
|
||||
|
||||
found_bad = False
|
||||
for path in files:
|
||||
ok = check_file(path, fix)
|
||||
if not ok:
|
||||
found_bad = True
|
||||
return not found_bad
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
template_root = os.environ["BUILD_WORKSPACE_DIRECTORY"]
|
||||
template_files = glob.glob(
|
||||
os.path.join(template_root, "ftl", "*", "*.ftl"), recursive=True
|
||||
)
|
||||
|
||||
check_files(template_files, fix=True)
|
|
@ -1,14 +0,0 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
import format
|
||||
|
||||
template_root = os.path.dirname(sys.argv[1])
|
||||
template_files = glob.glob(os.path.join(template_root, "*", "*.ftl"), recursive=True)
|
||||
|
||||
if not format.check_files(template_files, fix=False):
|
||||
sys.exit(1)
|
|
@ -1 +1 @@
|
|||
Subproject commit f35acabb46dc9197a62c47eb7f2ca062628b1d94
|
||||
Subproject commit 69f2dbaeba6f72ac62da0b35881f320603da5124
|
|
@ -73,6 +73,7 @@ qt-misc-second =
|
|||
qt-misc-layout-auto-enabled = Responsive layout enabled
|
||||
qt-misc-layout-vertical-enabled = Vertical layout enabled
|
||||
qt-misc-layout-horizontal-enabled = Horizontal layout enabled
|
||||
qt-misc-please-restart-to-update-anki = Please restart Anki to update to the latest version.
|
||||
|
||||
## deprecated- these strings will be removed in the future, and do not need
|
||||
## to be translated
|
||||
|
|
2
ninja
2
ninja
|
@ -8,7 +8,7 @@ else
|
|||
out="$BUILD_ROOT"
|
||||
fi
|
||||
export CARGO_TARGET_DIR=$out/rust
|
||||
export RECONFIGURE_KEY="${MAC_X86};${SOURCEMAP};${HMR}"
|
||||
export RECONFIGURE_KEY="${MAC_X86};${LIN_ARM64};${SOURCEMAP};${HMR}"
|
||||
|
||||
if [ "$SKIP_RUNNER_BUILD" = "1" ]; then
|
||||
echo "Runner not rebuilt."
|
||||
|
|
|
@ -235,6 +235,7 @@ message DeckConfigsForUpdate {
|
|||
// only applies to v3 scheduler
|
||||
bool new_cards_ignore_review_limit = 7;
|
||||
bool fsrs = 8;
|
||||
bool fsrs_health_check = 11;
|
||||
bool apply_all_parent_limits = 9;
|
||||
uint32 days_since_last_fsrs_optimize = 10;
|
||||
}
|
||||
|
@ -258,4 +259,5 @@ message UpdateDeckConfigsRequest {
|
|||
bool fsrs = 8;
|
||||
bool apply_all_parent_limits = 9;
|
||||
bool fsrs_reschedule = 10;
|
||||
bool fsrs_health_check = 11;
|
||||
}
|
||||
|
|
|
@ -354,11 +354,13 @@ message ComputeFsrsParamsRequest {
|
|||
repeated float current_params = 2;
|
||||
int64 ignore_revlogs_before_ms = 3;
|
||||
uint32 num_of_relearning_steps = 4;
|
||||
bool health_check = 5;
|
||||
}
|
||||
|
||||
message ComputeFsrsParamsResponse {
|
||||
repeated float params = 1;
|
||||
uint32 fsrs_items = 2;
|
||||
optional bool health_check_passed = 3;
|
||||
}
|
||||
|
||||
message ComputeFsrsParamsFromItemsRequest {
|
||||
|
@ -435,9 +437,9 @@ message GetOptimalRetentionParametersResponse {
|
|||
}
|
||||
|
||||
message EvaluateParamsRequest {
|
||||
repeated float params = 1;
|
||||
string search = 2;
|
||||
int64 ignore_revlogs_before_ms = 3;
|
||||
string search = 1;
|
||||
int64 ignore_revlogs_before_ms = 2;
|
||||
uint32 num_of_relearning_steps = 3;
|
||||
}
|
||||
|
||||
message EvaluateParamsResponse {
|
||||
|
@ -448,6 +450,7 @@ message EvaluateParamsResponse {
|
|||
message ComputeMemoryStateResponse {
|
||||
optional cards.FsrsMemoryState state = 1;
|
||||
float desired_retention = 2;
|
||||
float decay = 3;
|
||||
}
|
||||
|
||||
message FuzzDeltaRequest {
|
||||
|
|
|
@ -122,6 +122,7 @@ class ComputedMemoryState:
|
|||
desired_retention: float
|
||||
stability: float | None = None
|
||||
difficulty: float | None = None
|
||||
decay: float | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
|
@ -1189,9 +1190,13 @@ class Collection(DeprecatedNamesMixin):
|
|||
desired_retention=resp.desired_retention,
|
||||
stability=resp.state.stability,
|
||||
difficulty=resp.state.difficulty,
|
||||
decay=resp.decay,
|
||||
)
|
||||
else:
|
||||
return ComputedMemoryState(desired_retention=resp.desired_retention)
|
||||
return ComputedMemoryState(
|
||||
desired_retention=resp.desired_retention,
|
||||
decay=resp.decay,
|
||||
)
|
||||
|
||||
def fuzz_delta(self, card_id: CardId, interval: int) -> int:
|
||||
"The delta days of fuzz applied if reviewing the card in v3."
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
"""Helpers for serializing third-party collections to a common JSON form.
|
||||
"""
|
||||
"""Helpers for serializing third-party collections to a common JSON form."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
|
|
@ -167,9 +167,9 @@ class NoteImporter(Importer):
|
|||
firsts[fld0] = True
|
||||
# already exists?
|
||||
found = False
|
||||
if csum in csums:
|
||||
if csum in csums: # type: ignore[comparison-overlap]
|
||||
# csum is not a guarantee; have to check
|
||||
for id in csums[csum]:
|
||||
for id in csums[csum]: # type: ignore[index]
|
||||
flds = self.col.db.scalar("select flds from notes where id = ?", id)
|
||||
sflds = split_fields(flds)
|
||||
if fld0 == sflds[0]:
|
||||
|
|
|
@ -198,7 +198,9 @@ def get_def_lang(user_lang: str | None = None) -> tuple[int, str]:
|
|||
# getdefaultlocale() is deprecated since Python 3.11, but we need to keep using it as getlocale() behaves differently: https://bugs.python.org/issue38805
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
(sys_lang, enc) = locale.getdefaultlocale()
|
||||
(sys_lang, enc) = (
|
||||
locale.getdefaultlocale() # pylint: disable=deprecated-method
|
||||
)
|
||||
except AttributeError:
|
||||
# this will return a different format on Windows (e.g. Italian_Italy), resulting in us falling back to en_US
|
||||
# further below
|
||||
|
|
|
@ -76,7 +76,7 @@ class MediaManager(DeprecatedNamesMixin):
|
|||
return self.col._backend.strip_av_tags(text)
|
||||
|
||||
def _extract_filenames(self, text: str) -> list[str]:
|
||||
"This only exists do support a legacy function; do not use."
|
||||
"This only exists to support a legacy function; do not use."
|
||||
out = self.col._backend.extract_av_tags(text=text, question_side=True)
|
||||
return [
|
||||
x.filename
|
||||
|
|
|
@ -42,6 +42,7 @@ from anki.utils import ids2str, int_time
|
|||
|
||||
class SchedulerBase(DeprecatedNamesMixin):
|
||||
"Actions shared between schedulers."
|
||||
|
||||
version = 0
|
||||
|
||||
def __init__(self, col: anki.collection.Collection) -> None:
|
||||
|
|
|
@ -9,10 +9,14 @@ These can be accessed via eg card.question_av_tags()
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Union
|
||||
|
||||
from anki import hooks
|
||||
|
||||
|
||||
@dataclass
|
||||
class TTSTag:
|
||||
|
@ -34,10 +38,30 @@ class SoundOrVideoTag:
|
|||
"""Contains the filename inside a [sound:...] tag.
|
||||
|
||||
Video files also use [sound:...].
|
||||
|
||||
SECURITY: We should only ever construct this with basename(filename),
|
||||
as passing arbitrary paths to mpv from a shared deck is a security issue.
|
||||
|
||||
Anki add-ons can supply an absolute file path to play any file on disk
|
||||
using the built-in media player.
|
||||
"""
|
||||
|
||||
filename: str
|
||||
|
||||
def path(self, media_folder: str) -> str:
|
||||
"Prepend the media folder to the filename."
|
||||
if os.path.basename(self.filename) == self.filename:
|
||||
# Path in the current collection's media folder.
|
||||
# Turn it into a fully-qualified path so mpv can find it, and to
|
||||
# ensure the filename doesn't get treated like a non-file scheme.
|
||||
head, tail = media_folder, self.filename
|
||||
else:
|
||||
# Add-ons can use absolute paths to play arbitrary files on disk.
|
||||
# Example: sound.av_player.play_tags([SoundOrVideoTag("/path/to/file")])
|
||||
head, tail = os.path.split(os.path.abspath(self.filename))
|
||||
tail = hooks.media_file_filter(tail)
|
||||
return os.path.join(head, tail)
|
||||
|
||||
|
||||
# note this does not include image tags, which are handled with HTML.
|
||||
AVTag = Union[SoundOrVideoTag, TTSTag]
|
||||
|
|
|
@ -174,7 +174,7 @@ from revlog where type != {REVLOG_RESCHED} and id > ? """
|
|||
cards=cards, seconds=float(thetime)
|
||||
)
|
||||
# again/pass count
|
||||
b += "<br>" + "Again count: %s" % bold(failed)
|
||||
b += "<br>" + "Again count: %s" % bold(str(failed))
|
||||
if cards:
|
||||
b += " " + "(%s correct)" % bold(
|
||||
"%0.1f%%" % ((1 - failed / float(cards)) * 100)
|
||||
|
@ -182,7 +182,10 @@ from revlog where type != {REVLOG_RESCHED} and id > ? """
|
|||
# type breakdown
|
||||
b += "<br>"
|
||||
b += "Learn: %(a)s, Review: %(b)s, Relearn: %(c)s, Filtered: %(d)s" % dict(
|
||||
a=bold(lrn), b=bold(rev), c=bold(relrn), d=bold(filt)
|
||||
a=bold(str(lrn)),
|
||||
b=bold(str(rev)),
|
||||
c=bold(str(relrn)),
|
||||
d=bold(str(filt)),
|
||||
)
|
||||
# mature today
|
||||
mcnt, msum = self.col.db.first(
|
||||
|
|
|
@ -28,6 +28,7 @@ template_legacy.py file, using the legacy addHook() system.
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import os.path
|
||||
from collections.abc import Sequence
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Union
|
||||
|
@ -95,7 +96,7 @@ class PartiallyRenderedCard:
|
|||
def av_tag_to_native(tag: card_rendering_pb2.AVTag) -> AVTag:
|
||||
val = tag.WhichOneof("value")
|
||||
if val == "sound_or_video":
|
||||
return SoundOrVideoTag(filename=tag.sound_or_video)
|
||||
return SoundOrVideoTag(filename=os.path.basename(tag.sound_or_video))
|
||||
else:
|
||||
return TTSTag(
|
||||
field_text=tag.tts.field_text,
|
||||
|
@ -278,6 +279,7 @@ class TemplateRenderContext:
|
|||
@dataclass
|
||||
class TemplateRenderOutput:
|
||||
"Stores the rendered templates and extracted AV tags."
|
||||
|
||||
question_text: str
|
||||
answer_text: str
|
||||
question_av_tags: list[AVTag]
|
||||
|
|
|
@ -244,8 +244,8 @@ def call(argv: list[str], wait: bool = True, **kwargs: Any) -> int:
|
|||
# OS helpers
|
||||
##############################################################################
|
||||
|
||||
is_mac = sys.platform.startswith("darwin")
|
||||
is_win = sys.platform.startswith("win32")
|
||||
is_mac = sys.platform == "darwin"
|
||||
is_win = sys.platform == "win32"
|
||||
# also covers *BSD
|
||||
is_lin = not is_mac and not is_win
|
||||
is_gnome = (
|
||||
|
@ -309,12 +309,17 @@ def int_version() -> int:
|
|||
"""Anki's version as an integer in the form YYMMPP, e.g. 230900.
|
||||
(year, month, patch).
|
||||
In 2.1.x releases, this was just the last number."""
|
||||
import re
|
||||
|
||||
from anki.buildinfo import version
|
||||
|
||||
# Strip non-numeric characters (handles beta/rc suffixes like '25.02b1' or 'rc3')
|
||||
numeric_version = re.sub(r"[^0-9.]", "", version)
|
||||
|
||||
try:
|
||||
[year, month, patch] = version.split(".")
|
||||
[year, month, patch] = numeric_version.split(".")
|
||||
except ValueError:
|
||||
[year, month] = version.split(".")
|
||||
[year, month] = numeric_version.split(".")
|
||||
patch = "0"
|
||||
|
||||
year_num = int(year)
|
||||
|
|
42
pylib/hatch_build.py
Normal file
42
pylib/hatch_build.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
|
||||
|
||||
|
||||
class CustomBuildHook(BuildHookInterface):
|
||||
"""Build hook to include compiled rsbridge from out/pylib."""
|
||||
|
||||
PLUGIN_NAME = "custom"
|
||||
|
||||
def initialize(self, version: str, build_data: Dict[str, Any]) -> None:
|
||||
"""Initialize the build hook."""
|
||||
force_include = build_data.setdefault("force_include", {})
|
||||
|
||||
# Set platform-specific wheel tag
|
||||
if not (platform_tag := os.environ.get("ANKI_WHEEL_TAG")):
|
||||
# On Windows, uv invokes this build hook during the initial uv sync,
|
||||
# when the tag has not been declared by our build script.
|
||||
return
|
||||
build_data.setdefault("tag", platform_tag)
|
||||
|
||||
# Mark as non-pure Python since we include compiled extension
|
||||
build_data["pure_python"] = False
|
||||
|
||||
# Look for generated files in out/pylib/anki
|
||||
project_root = Path(self.root).parent
|
||||
generated_root = project_root / "out" / "pylib" / "anki"
|
||||
|
||||
assert generated_root.exists(), "you should build with --wheel"
|
||||
for path in generated_root.rglob("*"):
|
||||
if path.is_file():
|
||||
relative_path = path.relative_to(generated_root)
|
||||
# Place files under anki/ in the distribution
|
||||
dist_path = "anki" / relative_path
|
||||
force_include[str(path)] = str(dist_path)
|
34
pylib/pyproject.toml
Normal file
34
pylib/pyproject.toml
Normal file
|
@ -0,0 +1,34 @@
|
|||
[project]
|
||||
name = "anki"
|
||||
dynamic = ["version"]
|
||||
requires-python = ">=3.9"
|
||||
license = "AGPL-3.0-or-later"
|
||||
dependencies = [
|
||||
"beautifulsoup4",
|
||||
"decorator",
|
||||
"markdown",
|
||||
"orjson",
|
||||
"protobuf>=4.21",
|
||||
"requests[socks]",
|
||||
"typing_extensions",
|
||||
"types-protobuf",
|
||||
"types-requests",
|
||||
"types-orjson",
|
||||
# platform-specific dependencies
|
||||
"distro; sys_platform != 'darwin' and sys_platform != 'win32'",
|
||||
"psutil; sys_platform == 'win32'",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["anki"]
|
||||
|
||||
[tool.hatch.version]
|
||||
source = "code"
|
||||
path = "../python/version.py"
|
||||
|
||||
[tool.hatch.build.hooks.custom]
|
||||
path = "hatch_build.py"
|
|
@ -12,6 +12,7 @@ description = "Anki's Rust library code Python bindings"
|
|||
name = "rsbridge"
|
||||
crate-type = ["cdylib"]
|
||||
path = "lib.rs"
|
||||
test = false
|
||||
|
||||
[dependencies]
|
||||
anki.workspace = true
|
||||
|
|
|
@ -1,21 +1,33 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
fn main() {
|
||||
// macOS needs special link flags for PyO3
|
||||
if cfg!(target_os = "macos") {
|
||||
println!("cargo:rustc-link-arg=-undefined");
|
||||
println!("cargo:rustc-link-arg=dynamic_lookup");
|
||||
println!("cargo:rustc-link-arg=-mmacosx-version-min=10.13");
|
||||
println!("cargo:rustc-link-arg=-mmacosx-version-min=11");
|
||||
}
|
||||
|
||||
// On Windows, we need to be able to link with python3.lib
|
||||
if cfg!(windows) {
|
||||
let lib_path = Path::new("../../out/extracted/python/libs")
|
||||
.canonicalize()
|
||||
.expect("libs");
|
||||
println!("cargo:rustc-link-search={}", lib_path.display());
|
||||
use std::process::Command;
|
||||
|
||||
// Run Python to get sysconfig paths
|
||||
let output = Command::new("../../out/pyenv/scripts/python")
|
||||
.args([
|
||||
"-c",
|
||||
"import sysconfig; print(sysconfig.get_paths()['stdlib'])",
|
||||
])
|
||||
.output()
|
||||
.expect("Failed to execute Python");
|
||||
|
||||
let stdlib_path = String::from_utf8(output.stdout)
|
||||
.expect("Failed to parse Python output")
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
let libs_path = stdlib_path + "s";
|
||||
println!("cargo:rustc-link-search={}", libs_path);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,36 @@
|
|||
[tool.black]
|
||||
target-version = ["py39", "py310", "py311", "py312"]
|
||||
extend-exclude = "qt/bundle"
|
||||
[project]
|
||||
name = "anki-dev"
|
||||
version = "0.0.0"
|
||||
description = "Local-only environment"
|
||||
requires-python = ">=3.9"
|
||||
classifiers = ["Private :: Do Not Upload"]
|
||||
|
||||
[tool.pyright]
|
||||
include = ["pylib/anki", "qt/aqt"]
|
||||
stubPath = ""
|
||||
pythonVersion = "3.9"
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"black",
|
||||
"isort",
|
||||
"mypy",
|
||||
"mypy-protobuf",
|
||||
"pylint",
|
||||
"pytest",
|
||||
"PyChromeDevTools",
|
||||
"colorama", # for isort --color
|
||||
"wheel",
|
||||
"hatchling", # for type checking hatch_build.py files
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
sphinx = [
|
||||
"sphinx",
|
||||
"sphinx_rtd_theme",
|
||||
"sphinx-autoapi",
|
||||
]
|
||||
|
||||
[tool.uv.workspace]
|
||||
members = ["pylib", "qt"]
|
||||
|
||||
[[tool.uv.index]]
|
||||
name = "testpypi"
|
||||
url = "https://test.pypi.org/simple/"
|
||||
publish-url = "https://test.pypi.org/legacy/"
|
||||
explicit = true
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
- To achieve reproducible builds we use pip-tools to lock packages to a particular version - see
|
||||
update_python_deps.sh
|
||||
- write_wheel.py is used to generate our wheels.
|
|
@ -1,152 +0,0 @@
|
|||
[
|
||||
{
|
||||
"License": "BSD License",
|
||||
"Name": "Flask",
|
||||
"Version": "1.1.2"
|
||||
},
|
||||
{
|
||||
"License": "MIT License",
|
||||
"Name": "Flask-Cors",
|
||||
"Version": "3.0.9"
|
||||
},
|
||||
{
|
||||
"License": "BSD License",
|
||||
"Name": "Jinja2",
|
||||
"Version": "2.11.2"
|
||||
},
|
||||
{
|
||||
"License": "BSD License",
|
||||
"Name": "Markdown",
|
||||
"Version": "3.3.3"
|
||||
},
|
||||
{
|
||||
"License": "BSD License",
|
||||
"Name": "MarkupSafe",
|
||||
"Version": "1.1.1"
|
||||
},
|
||||
{
|
||||
"License": "GPL v3",
|
||||
"Name": "PyQt5",
|
||||
"Version": "5.15.1"
|
||||
},
|
||||
{
|
||||
"License": "SIP",
|
||||
"Name": "PyQt5-sip",
|
||||
"Version": "12.8.1"
|
||||
},
|
||||
{
|
||||
"License": "GPL v3",
|
||||
"Name": "PyQtWebEngine",
|
||||
"Version": "5.15.1"
|
||||
},
|
||||
{
|
||||
"License": "BSD",
|
||||
"Name": "PySocks",
|
||||
"Version": "1.7.1"
|
||||
},
|
||||
{
|
||||
"License": "BSD License",
|
||||
"Name": "Send2Trash",
|
||||
"Version": "1.5.0"
|
||||
},
|
||||
{
|
||||
"License": "BSD License",
|
||||
"Name": "Werkzeug",
|
||||
"Version": "1.0.1"
|
||||
},
|
||||
{
|
||||
"License": "MIT License",
|
||||
"Name": "attrs",
|
||||
"Version": "20.3.0"
|
||||
},
|
||||
{
|
||||
"License": "MIT License",
|
||||
"Name": "beautifulsoup4",
|
||||
"Version": "4.9.3"
|
||||
},
|
||||
{
|
||||
"License": "Mozilla Public License 2.0 (MPL 2.0)",
|
||||
"Name": "certifi",
|
||||
"Version": "2020.11.8"
|
||||
},
|
||||
{
|
||||
"License": "GNU Library or Lesser General Public License (LGPL)",
|
||||
"Name": "chardet",
|
||||
"Version": "3.0.4"
|
||||
},
|
||||
{
|
||||
"License": "BSD License",
|
||||
"Name": "click",
|
||||
"Version": "7.1.2"
|
||||
},
|
||||
{
|
||||
"License": "BSD License",
|
||||
"Name": "decorator",
|
||||
"Version": "4.4.2"
|
||||
},
|
||||
{
|
||||
"License": "BSD License",
|
||||
"Name": "idna",
|
||||
"Version": "2.10"
|
||||
},
|
||||
{
|
||||
"License": "BSD License",
|
||||
"Name": "itsdangerous",
|
||||
"Version": "1.1.0"
|
||||
},
|
||||
{
|
||||
"License": "MIT License",
|
||||
"Name": "jsonschema",
|
||||
"Version": "3.2.0"
|
||||
},
|
||||
{
|
||||
"License": "Apache Software License, MIT License",
|
||||
"Name": "orjson",
|
||||
"Version": "3.4.3"
|
||||
},
|
||||
{
|
||||
"License": "3-Clause BSD License",
|
||||
"Name": "protobuf",
|
||||
"Version": "3.13.0"
|
||||
},
|
||||
{
|
||||
"License": "BSD License",
|
||||
"Name": "psutil",
|
||||
"Version": "5.7.3"
|
||||
},
|
||||
{
|
||||
"License": "MIT License",
|
||||
"Name": "pyrsistent",
|
||||
"Version": "0.17.3"
|
||||
},
|
||||
{
|
||||
"License": "Python Software Foundation License",
|
||||
"Name": "pywin32",
|
||||
"Version": "228"
|
||||
},
|
||||
{
|
||||
"License": "Apache Software License",
|
||||
"Name": "requests",
|
||||
"Version": "2.25.0"
|
||||
},
|
||||
{
|
||||
"License": "MIT License",
|
||||
"Name": "six",
|
||||
"Version": "1.15.0"
|
||||
},
|
||||
{
|
||||
"License": "MIT License",
|
||||
"Name": "soupsieve",
|
||||
"Version": "2.0.1"
|
||||
},
|
||||
{
|
||||
"License": "MIT License",
|
||||
"Name": "urllib3",
|
||||
"Version": "1.26.1"
|
||||
},
|
||||
{
|
||||
"License": "Zope Public License",
|
||||
"Name": "waitress",
|
||||
"Version": "1.4.4"
|
||||
}
|
||||
]
|
|
@ -1,23 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Install runtime requirements into a venv and extract their licenses.
|
||||
# As Windows currently uses extra deps, running this on Windows should
|
||||
# capture all packages.
|
||||
# Run with 'bash licenses.sh' to update 'license.json'
|
||||
|
||||
set -e
|
||||
|
||||
# setup venv
|
||||
python -m venv venv
|
||||
|
||||
# build wheels
|
||||
../bazel.bat --output_base=/c/bazel/anki/base build //pylib/anki:wheel //qt/aqt:wheel
|
||||
|
||||
# install wheels, bound to constrained versions
|
||||
venv/tools/pip install -c requirements.txt ../bazel-bin/pylib/anki/*.whl ../bazel-bin/qt/aqt/*.whl pip-licenses
|
||||
|
||||
# dump licenses - ptable is a pip-licenses dep
|
||||
venv/tools/pip-licenses --format=json --ignore-packages anki aqt pip-license PTable > licenses.json
|
||||
|
||||
# clean up
|
||||
rm -rf venv
|
|
@ -1,9 +0,0 @@
|
|||
beautifulsoup4
|
||||
decorator
|
||||
markdown
|
||||
orjson
|
||||
protobuf>=4.21
|
||||
requests[socks]
|
||||
distro; sys_platform != "darwin" and sys_platform != "win32"
|
||||
psutil; sys_platform == "win32"
|
||||
typing_extensions
|
|
@ -1,10 +0,0 @@
|
|||
beautifulsoup4
|
||||
flask
|
||||
flask_cors
|
||||
jsonschema
|
||||
requests
|
||||
send2trash
|
||||
waitress>=2.0.0
|
||||
psutil; sys.platform == "win32"
|
||||
pywin32; sys.platform == "win32"
|
||||
pip-system-certs
|
|
@ -1,2 +0,0 @@
|
|||
pip-tools
|
||||
colorama # required on windows
|
|
@ -1,54 +0,0 @@
|
|||
build==1.2.1 \
|
||||
--hash=sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d \
|
||||
--hash=sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4
|
||||
# via pip-tools
|
||||
click==8.1.7 \
|
||||
--hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \
|
||||
--hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de
|
||||
# via pip-tools
|
||||
colorama==0.4.6 \
|
||||
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
|
||||
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
|
||||
# via -r requirements.base.in
|
||||
importlib-metadata==8.4.0 \
|
||||
--hash=sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1 \
|
||||
--hash=sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5
|
||||
# via build
|
||||
packaging==24.1 \
|
||||
--hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \
|
||||
--hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124
|
||||
# via build
|
||||
pip-tools==7.4.1 \
|
||||
--hash=sha256:4c690e5fbae2f21e87843e89c26191f0d9454f362d8acdbd695716493ec8b3a9 \
|
||||
--hash=sha256:864826f5073864450e24dbeeb85ce3920cdfb09848a3d69ebf537b521f14bcc9
|
||||
# via -r requirements.base.in
|
||||
pyproject-hooks==1.1.0 \
|
||||
--hash=sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965 \
|
||||
--hash=sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2
|
||||
# via
|
||||
# build
|
||||
# pip-tools
|
||||
tomli==2.0.1 \
|
||||
--hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
|
||||
--hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
|
||||
# via
|
||||
# build
|
||||
# pip-tools
|
||||
wheel==0.44.0 \
|
||||
--hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \
|
||||
--hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49
|
||||
# via pip-tools
|
||||
zipp==3.20.1 \
|
||||
--hash=sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064 \
|
||||
--hash=sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b
|
||||
# via importlib-metadata
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
pip==24.2 \
|
||||
--hash=sha256:2cd581cf58ab7fcfca4ce8efa6dcacd0de5bf8d0a3eb9ec927e07405f4d9e2a2 \
|
||||
--hash=sha256:5b5e490b5e9cb275c879595064adce9ebd31b854e3e803740b72f9ccf34a45b8
|
||||
# via pip-tools
|
||||
setuptools==74.1.1 \
|
||||
--hash=sha256:2353af060c06388be1cecbf5953dcdb1f38362f87a2356c480b6b4d5fcfc8847 \
|
||||
--hash=sha256:fc91b5f89e392ef5b77fe143b17e32f65d3024744fba66dc3afe07201684d766
|
||||
# via pip-tools
|
|
@ -1,8 +0,0 @@
|
|||
# currently broken in pyoxidizer
|
||||
jsonschema<4.2
|
||||
setuptools<70
|
||||
|
||||
-r requirements.base.in
|
||||
-r requirements.anki.in
|
||||
-r requirements.aqt.in
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
-r requirements.base.in
|
||||
-r requirements.anki.in
|
||||
-r requirements.aqt.in
|
||||
|
||||
black
|
||||
compare-locales
|
||||
isort
|
||||
mock
|
||||
mypy
|
||||
mypy-protobuf
|
||||
pip-tools
|
||||
pylint
|
||||
pytest
|
||||
PyChromeDevTools
|
||||
fluent.syntax
|
||||
types-decorator
|
||||
types-flask
|
||||
types-flask-cors
|
||||
types-markdown
|
||||
types-orjson
|
||||
types-protobuf
|
||||
types-requests
|
||||
types-waitress
|
||||
|
||||
# transitive windows dependencies
|
||||
atomicwrites
|
||||
colorama
|
|
@ -1,3 +0,0 @@
|
|||
pyqt5==5.14.1
|
||||
pyqtwebengine==5.14.0
|
||||
pyqt5_sip==12.8.1
|
|
@ -1,42 +0,0 @@
|
|||
pyqt5==5.14.1 \
|
||||
--hash=sha256:2d94ec761fb656707050c68b41958e3a9f755bb1df96c064470f4096d2899e32 \
|
||||
--hash=sha256:2f230f2dbd767099de7a0cb915abdf0cbc3256a0b5bb910eb09b99117db7a65b \
|
||||
--hash=sha256:31b142a868152d60c6323e0527edb692fdf05fd7cb4fe2fe9ce07d1ce560221a \
|
||||
--hash=sha256:713b9a201f5e7b2fca8691373e5d5c8c2552a51d87ca9ffbb1461e34e3241211 \
|
||||
--hash=sha256:a0bfe9fd718bca4de3e33000347e048f73126b6dc46530eb020b0251a638ee9d
|
||||
# via
|
||||
# -r requirements.in
|
||||
# pyqtwebengine
|
||||
pyqt5-sip==12.8.1 \
|
||||
--hash=sha256:0304ca9114b9817a270f67f421355075b78ff9fc25ac58ffd72c2601109d2194 \
|
||||
--hash=sha256:0cd969be528c27bbd4755bd323dff4a79a8fdda28215364e6ce3e069cb56c2a9 \
|
||||
--hash=sha256:2f35e82fd7ec1e1f6716e9154721c7594956a4f5bd4f826d8c6a6453833cc2f0 \
|
||||
--hash=sha256:30e944db9abee9cc757aea16906d4198129558533eb7fadbe48c5da2bd18e0bd \
|
||||
--hash=sha256:34dcd29be47553d5f016ff86e89e24cbc5eebae92eb2f96fb32d2d7ba028c43c \
|
||||
--hash=sha256:5a011aeff89660622a6d5c3388d55a9d76932f3b82c95e82fc31abd8b1d2990d \
|
||||
--hash=sha256:6c1ebee60f1d2b3c70aff866b7933d8d8d7646011f7c32f9321ee88c290aa4f9 \
|
||||
--hash=sha256:7b81382ce188d63890a0e35abe0f9bb946cabc873a31873b73583b0fc84ac115 \
|
||||
--hash=sha256:832fd60a264de4134c2824d393320838f3ab648180c9c357ec58a74524d24507 \
|
||||
--hash=sha256:84ba7746762bd223bed22428e8561aa267a229c28344c2d28c5d5d3f8970cffb \
|
||||
--hash=sha256:9312ec47cac4e33c11503bc1cbeeb0bdae619620472f38e2078c5a51020a930f \
|
||||
--hash=sha256:a1b8ef013086e224b8e86c93f880f776d01b59195bdfa2a8e0b23f0480678fec \
|
||||
--hash=sha256:a29e2ac399429d3b7738f73e9081e50783e61ac5d29344e0802d0dcd6056c5a2 \
|
||||
--hash=sha256:b6d42250baec52a5f77de64e2951d001c5501c3a2df2179f625b241cbaec3369 \
|
||||
--hash=sha256:bb5a87b66fc1445915104ee97f7a20a69decb42f52803e3b0795fa17ff88226c \
|
||||
--hash=sha256:c317ab1263e6417c498b81f5c970a9b1af7acefab1f80b4cc0f2f8e661f29fc5 \
|
||||
--hash=sha256:c9800729badcb247765e4ffe2241549d02da1fa435b9db224845bc37c3e99cb0 \
|
||||
--hash=sha256:c9d6d448c29dc6606bb7974696608f81f4316c8234f7c7216396ed110075e777 \
|
||||
--hash=sha256:da9c9f1e65b9d09e73bd75befc82961b6b61b5a3b9d0a7c832168e1415f163c6 \
|
||||
--hash=sha256:ed897c58acf4a3cdca61469daa31fe6e44c33c6c06a37c3f21fab31780b3b86a \
|
||||
--hash=sha256:f168f0a7f32b81bfeffdf003c36f25d81c97dee5eb67072a5183e761fe250f13
|
||||
# via
|
||||
# -r requirements.in
|
||||
# pyqt5
|
||||
# pyqtwebengine
|
||||
pyqtwebengine==5.14.0 \
|
||||
--hash=sha256:01cd7f38ba4efa5f4c0983219ab15dad7747a0ca9378c7832a3077a53988f5ea \
|
||||
--hash=sha256:37c4a820c5bcc82a6cb43ad33b8c81eee4c4772fc03e180a8fa37a59f99f6a48 \
|
||||
--hash=sha256:3d0cba04f64d4f66087cc92e254ff8b33ec4a4e6c7751417fe2bd53c3ed740a7 \
|
||||
--hash=sha256:85e1fac1b2c9bebf0b2e8cd9a75c14a38aad75165a8d8bcb8f6318944b779b25 \
|
||||
--hash=sha256:e11595051f8bfbfa49175d899b2c8c2eea3a3deac4141edf4db68c3555221c92
|
||||
# via -r requirements.in
|
|
@ -1,3 +0,0 @@
|
|||
pyqt5==5.15.5
|
||||
pyqtwebengine==5.15.5
|
||||
pyqt5_sip==12.9.0
|
|
@ -1,54 +0,0 @@
|
|||
pyqt5==5.15.5 \
|
||||
--hash=sha256:521130eea1eaac55cc6867b1dc627d292b6468fb8e525ce2a015cdf39028d6e8 \
|
||||
--hash=sha256:5966fb291f316f8e35bc8775dda63acf1bb9855baeb5af3e33d3e7c4f1cd98d4 \
|
||||
--hash=sha256:85e76b7a96995b9da12083850bf2a9f4f0aeba2b0b99461b3337ad7e44f428c3 \
|
||||
--hash=sha256:b411b7a8fa03901c9feb1dcbac7ea1fc3ce20b9ae682762b777cd5398749ca2b \
|
||||
--hash=sha256:b8e23c1a3fe1b7749c9106f36fba0bd4676dc77bcacca95304c6b840b782e24d
|
||||
# via
|
||||
# -r requirements.in
|
||||
# pyqtwebengine
|
||||
pyqt5-qt5==5.15.2 \
|
||||
--hash=sha256:1988f364ec8caf87a6ee5d5a3a5210d57539988bf8e84714c7d60972692e2f4a \
|
||||
--hash=sha256:750b78e4dba6bdf1607febedc08738e318ea09e9b10aea9ff0d73073f11f6962 \
|
||||
--hash=sha256:76980cd3d7ae87e3c7a33bfebfaee84448fd650bad6840471d6cae199b56e154 \
|
||||
--hash=sha256:9cc7a768b1921f4b982ebc00a318ccb38578e44e45316c7a4a850e953e1dd327
|
||||
# via pyqt5
|
||||
pyqt5-sip==12.9.0 \
|
||||
--hash=sha256:055581c6fed44ba4302b70eeb82e979ff70400037358908f251cd85cbb3dbd93 \
|
||||
--hash=sha256:0fc9aefacf502696710b36cdc9fa2a61487f55ee883dbcf2c2a6477e261546f7 \
|
||||
--hash=sha256:42274a501ab4806d2c31659170db14c282b8313d2255458064666d9e70d96206 \
|
||||
--hash=sha256:4347bd81d30c8e3181e553b3734f91658cfbdd8f1a19f254777f906870974e6d \
|
||||
--hash=sha256:485972daff2fb0311013f471998f8ec8262ea381bded244f9d14edaad5f54271 \
|
||||
--hash=sha256:4f8e05fe01d54275877c59018d8e82dcdd0bc5696053a8b830eecea3ce806121 \
|
||||
--hash=sha256:69a3ad4259172e2b1aa9060de211efac39ddd734a517b1924d9c6c0cc4f55f96 \
|
||||
--hash=sha256:6a8701892a01a5a2a4720872361197cc80fdd5f49c8482d488ddf38c9c84f055 \
|
||||
--hash=sha256:6d5bca2fc222d58e8093ee8a81a6e3437067bb22bc3f86d06ec8be721e15e90a \
|
||||
--hash=sha256:83c3220b1ca36eb8623ba2eb3766637b19eb0ce9f42336ad8253656d32750c0a \
|
||||
--hash=sha256:a25b9843c7da6a1608f310879c38e6434331aab1dc2fe6cb65c14f1ecf33780e \
|
||||
--hash=sha256:ac57d796c78117eb39edd1d1d1aea90354651efac9d3590aac67fa4983f99f1f \
|
||||
--hash=sha256:b09f4cd36a4831229fb77c424d89635fa937d97765ec90685e2f257e56a2685a \
|
||||
--hash=sha256:c446971c360a0a1030282a69375a08c78e8a61d568bfd6dab3dcc5cf8817f644 \
|
||||
--hash=sha256:c5216403d4d8d857ec4a61f631d3945e44fa248aa2415e9ee9369ab7c8a4d0c7 \
|
||||
--hash=sha256:d3e4489d7c2b0ece9d203ae66e573939f7f60d4d29e089c9f11daa17cfeaae32 \
|
||||
--hash=sha256:d59af63120d1475b2bf94fe8062610720a9be1e8940ea146c7f42bb449d49067 \
|
||||
--hash=sha256:d85002238b5180bce4b245c13d6face848faa1a7a9e5c6e292025004f2fd619a \
|
||||
--hash=sha256:d8b2bdff7bbf45bc975c113a03b14fd669dc0c73e1327f02706666a7dd51a197 \
|
||||
--hash=sha256:dd05c768c2b55ffe56a9d49ce6cc77cdf3d53dbfad935258a9e347cbfd9a5850 \
|
||||
--hash=sha256:fc43f2d7c438517ee33e929e8ae77132749c15909afab6aeece5fcf4147ffdb5
|
||||
# via
|
||||
# -r requirements.in
|
||||
# pyqt5
|
||||
# pyqtwebengine
|
||||
pyqtwebengine==5.15.5 \
|
||||
--hash=sha256:30cebf455406990d5a0b859eac261ba6b45c32ce18956271733e0e96dbdca9b7 \
|
||||
--hash=sha256:5c77f71d88d871bc7400c68ef6433fadc5bd57b86d1a9c4d8094cea42f3607f1 \
|
||||
--hash=sha256:782aeee6bc8699bc029fe5c169a045c2bc9533d781cf3f5e9fb424b85a204e68 \
|
||||
--hash=sha256:ab47608dccf2b5e4b950d5a3cc704b17711af035024d07a9b71ad29fc103b941 \
|
||||
--hash=sha256:b827ad7ba0a65d5cd176797478f0ec8f599df6746b06c548649ff5674482a022
|
||||
# via -r requirements.in
|
||||
pyqtwebengine-qt5==5.15.2 \
|
||||
--hash=sha256:24231f19e1595018779977de6722b5c69f3d03f34a5f7574ff21cd1e764ef76d \
|
||||
--hash=sha256:9e80b408d8de09d4e708d5d84c3ceaf3603292ff8f5e566ae44bb0320fa59c33 \
|
||||
--hash=sha256:bc7b1fd1f4f8138d59b0b0245d601fb2c5c0aa1e1e7e853b713e52a3165d147e \
|
||||
--hash=sha256:ec2acb1780c0124ef060c310e00ca701f388d8b6c35bba9127f7a6f0dc536f77
|
||||
# via pyqtwebengine
|
|
@ -1,5 +0,0 @@
|
|||
pyqt6==6.6.1
|
||||
pyqt6-qt6==6.6.2
|
||||
pyqt6-webengine==6.6.0
|
||||
pyqt6-webengine-qt6==6.6.2
|
||||
pyqt6_sip==13.6.0
|
|
@ -1,56 +0,0 @@
|
|||
pyqt6==6.6.1 \
|
||||
--hash=sha256:03a656d5dc5ac31b6a9ad200f7f4f7ef49fa00ad7ce7a991b9bb691617141d12 \
|
||||
--hash=sha256:5aa0e833cb5a79b93813f8181d9f145517dd5a46f4374544bcd1e93a8beec537 \
|
||||
--hash=sha256:6b43878d0bbbcf8b7de165d305ec0cb87113c8930c92de748a11c473a6db5085 \
|
||||
--hash=sha256:9f158aa29d205142c56f0f35d07784b8df0be28378d20a97bcda8bd64ffd0379
|
||||
# via
|
||||
# -r requirements.qt6_6.in
|
||||
# pyqt6-webengine
|
||||
pyqt6-qt6==6.6.2 \
|
||||
--hash=sha256:5a41fe9d53b9e29e9ec5c23f3c5949dba160f90ca313ee8b96b8ffe6a5059387 \
|
||||
--hash=sha256:7ef446d3ffc678a8586ff6dc9f0d27caf4dff05dea02c353540d2f614386faf9 \
|
||||
--hash=sha256:8d7f674a4ec43ca00191e14945ca4129acbe37a2172ed9d08214ad58b170bc11 \
|
||||
--hash=sha256:b8363d88623342a72ac17da9127dc12f259bb3148796ea029762aa2d499778d9
|
||||
# via
|
||||
# -r requirements.qt6_6.in
|
||||
# pyqt6
|
||||
pyqt6-sip==13.6.0 \
|
||||
--hash=sha256:0dfd22cfedd87e96f9d51e0778ca2ba3dc0be83e424e9e0f98f6994d8d9c90f0 \
|
||||
--hash=sha256:13885361ca2cb2f5085d50359ba61b3fabd41b139fb58f37332acbe631ef2357 \
|
||||
--hash=sha256:24441032a29791e82beb7dfd76878339058def0e97fdb7c1cea517f3a0e6e96b \
|
||||
--hash=sha256:2486e1588071943d4f6657ba09096dc9fffd2322ad2c30041e78ea3f037b5778 \
|
||||
--hash=sha256:3075d8b325382750829e6cde6971c943352309d35768a4d4da0587459606d562 \
|
||||
--hash=sha256:33ea771fe777eb0d1a2c3ef35bcc3f7a286eb3ff09cd5b2fdd3d87d1f392d7e8 \
|
||||
--hash=sha256:39854dba35f8e5a4288da26ecb5f40b4c5ec1932efffb3f49d5ea435a7f37fb3 \
|
||||
--hash=sha256:3bf03e130fbfd75c9c06e687b86ba375410c7a9e835e4e03285889e61dd4b0c4 \
|
||||
--hash=sha256:43fb8551796030aae3d66d6e35e277494071ec6172cd182c9569ab7db268a2f5 \
|
||||
--hash=sha256:58f68a48400e0b3d1ccb18090090299bad26e3aed7ccb7057c65887b79b8aeea \
|
||||
--hash=sha256:5b9c6b6f9cfccb48cbb78a59603145a698fb4ffd176764d7083e5bf47631d8df \
|
||||
--hash=sha256:747f6ca44af81777a2c696bd501bc4815a53ec6fc94d4e25830e10bc1391f8ab \
|
||||
--hash=sha256:86a7b67c64436e32bffa9c28c9f21bf14a9faa54991520b12c3f6f435f24df7f \
|
||||
--hash=sha256:8c282062125eea5baf830c6998587d98c50be7c3a817a057fb95fef647184012 \
|
||||
--hash=sha256:8f9df9f7ccd8a9f0f1d36948c686f03ce1a1281543a3e636b7b7d5e086e1a436 \
|
||||
--hash=sha256:98bf954103b087162fa63b3a78f30b0b63da22fd6450b610ec1b851dbb798228 \
|
||||
--hash=sha256:9adf672f9114687533a74d5c2d4c03a9a929ad5ad9c3e88098a7da1a440ab916 \
|
||||
--hash=sha256:a6ce80bc24618d8a41be8ca51ad9f10e8bc4296dd90ab2809573df30a23ae0e5 \
|
||||
--hash=sha256:d6b5f699aaed0ac1fcd23e8fbca70d8a77965831b7c1ce474b81b1678817a49d \
|
||||
--hash=sha256:fa759b6339ff7e25f9afe2a6b651b775f0a36bcb3f5fa85e81a90d3b033c83f4 \
|
||||
--hash=sha256:fa7b10af7488efc5e53b41dd42c0f421bde6c2865a107af7ae259aff9d841da9
|
||||
# via
|
||||
# -r requirements.qt6_6.in
|
||||
# pyqt6
|
||||
# pyqt6-webengine
|
||||
pyqt6-webengine==6.6.0 \
|
||||
--hash=sha256:9d542738ed6e11c1978ce59035c07627def7c63eef0f59581d327f01209141bc \
|
||||
--hash=sha256:cb7793f06525ca054fcc6039afd93e23b82228b880d0b1301ce635f7f3ed2edf \
|
||||
--hash=sha256:d50b984c3f85e409e692b156132721522d4e8cf9b6c25e0cf927eea2dfb39487 \
|
||||
--hash=sha256:fded35fba636c4916fec84aa7c6840ad2e75d211462feb3e966f9545a59d56e6
|
||||
# via -r requirements.qt6_6.in
|
||||
pyqt6-webengine-qt6==6.6.2 \
|
||||
--hash=sha256:27b1b6a6f4ea115b3dd300d2df906d542009d9eb0e62b05e6b7cb85dfe68e9c3 \
|
||||
--hash=sha256:3da4db9ddd984b647d0b79fa10fc6cf65364dfe283cd702b12cb7164be2307cd \
|
||||
--hash=sha256:5d6f3ae521115cee77fea22b0248e7b219995390b951b51e4d519aef9c304ca8 \
|
||||
--hash=sha256:f2364dfa3a6e751ead71b7ba759081be677fcf1c6bbd8a2a2a250eb5f06432e8
|
||||
# via
|
||||
# -r requirements.qt6_6.in
|
||||
# pyqt6-webengine
|
|
@ -1,5 +0,0 @@
|
|||
pyqt6==6.8.0
|
||||
pyqt6-qt6==6.8.1
|
||||
pyqt6-webengine==6.8.0
|
||||
pyqt6-webengine-qt6==6.8.1
|
||||
pyqt6_sip==13.9.1
|
|
@ -1,71 +0,0 @@
|
|||
pyqt6==6.8.0 \
|
||||
--hash=sha256:3a4354816f11e812b727206a9ea6e79ff3774f1bb7228ad4b9318442d2c64ff9 \
|
||||
--hash=sha256:452bae5840077bf0f146c798d7777f70d7bdd0c7dcfa9ee7a415c1daf2d10038 \
|
||||
--hash=sha256:48bace7b87676bba5e6114482f3a20ca20be90c7f261b5d340464313f5f2bf5e \
|
||||
--hash=sha256:6d8628de4c2a050f0b74462e4c9cb97f839bf6ffabbca91711722ffb281570d9 \
|
||||
--hash=sha256:8c5c05f5fdff31a5887dbc29b27615b09df467631238d7b449283809ffca6228 \
|
||||
--hash=sha256:a9913d479f1ffee804bf7f232079baea4fb4b221a8f4890117588917a54ea30d \
|
||||
--hash=sha256:cf7123caea14e7ecf10bd12cae48e8d9970ef7caf627bc7d7988b0baa209adb3
|
||||
# via
|
||||
# -r requirements.qt6_8.in
|
||||
# pyqt6-webengine
|
||||
pyqt6-qt6==6.8.1 \
|
||||
--hash=sha256:006d786693d0511fbcf184a862edbd339c6ed1bb3bd9de363d73a19ed4b23dff \
|
||||
--hash=sha256:08065d595f1e6fc2dde9f4450eeff89082f4bad26f600a8e9b9cc5966716bfcf \
|
||||
--hash=sha256:1eb8460a1fdb38d0b2458c2974c01d471c1e59e4eb19ea63fc447aaba3ad530e \
|
||||
--hash=sha256:20843cb86bd94942d1cd99e39bf1aeabb875b241a35a8ab273e4bbbfa63776db \
|
||||
--hash=sha256:2f4b8b55b1414b93f340f22e8c88d25550efcdebc4b65a3927dd947b73bd4358 \
|
||||
--hash=sha256:98aa99fe38ae68c5318284cd28f3479ba538c40bf6ece293980abae0925c1b24 \
|
||||
--hash=sha256:9f3790c4ce4dc576e48b8718d55fb8743057e6cbd53a6ca1dd253ffbac9b7287 \
|
||||
--hash=sha256:a8bc2ed4ee5e7c6ff4dd1c7db0b27705d151fee5dc232bbd1bf17618f937f515 \
|
||||
--hash=sha256:d6ca5d2b9d2ec0ee4a814b2175f641a5c4299cb80b45e0f5f8356632663f89b3
|
||||
# via
|
||||
# -r requirements.qt6_8.in
|
||||
# pyqt6
|
||||
pyqt6-sip==13.9.1 \
|
||||
--hash=sha256:14f95c6352e3b85dc26bf59cfbf77a470ecbd5fcdcf00af4b648f0e1b9eefb9e \
|
||||
--hash=sha256:15be741d1ae8c82bb7afe9a61f3cf8c50457f7d61229a1c39c24cd6e8f4d86dc \
|
||||
--hash=sha256:1d322ded1d1fea339cc6ac65b768e72c69c486eebb7db6ccde061b5786d74cc5 \
|
||||
--hash=sha256:1ec52e962f54137a19208b6e95b6bd9f7a403eb25d7237768a99306cd9db26d1 \
|
||||
--hash=sha256:1fb405615970e85b622b13b4cad140ff1e4182eb8334a0b27a4698e6217b89b0 \
|
||||
--hash=sha256:22d66256b800f552ade51a463510bf905f3cb318aae00ff4288fae4de5d0e600 \
|
||||
--hash=sha256:2ab85aaf155828331399c59ebdd4d3b0358e42c08250e86b43d56d9873df148a \
|
||||
--hash=sha256:3c269052c770c09b61fce2f2f9ea934a67dfc65f443d59629b4ccc8f89751890 \
|
||||
--hash=sha256:5004514b08b045ad76425cf3618187091a668d972b017677b1b4b193379ef553 \
|
||||
--hash=sha256:552ff8fdc41f5769d3eccc661f022ed496f55f6e0a214c20aaf56e56385d61b6 \
|
||||
--hash=sha256:5643c92424fe62cb0b33378fef3d28c1525f91ada79e8a15bd9a05414a09503d \
|
||||
--hash=sha256:56ce0afb19cd8a8c63ff93ae506dffb74f844b88adaa6673ebc0dec43af48a76 \
|
||||
--hash=sha256:57b5312ef13c1766bdf69b317041140b184eb24a51e1e23ce8fc5386ba8dffb2 \
|
||||
--hash=sha256:5d7726556d1ca7a7ed78e19ba53285b64a2a8f6ad7ff4cb18a1832efca1a3102 \
|
||||
--hash=sha256:69a879cfc94f4984d180321b76f52923861cd5bf4969aa885eef7591ee932517 \
|
||||
--hash=sha256:6e6c1e2592187934f4e790c0c099d0033e986dcef7bdd3c06e3895ffa995e9fc \
|
||||
--hash=sha256:8b2ac36d6e04db6099614b9c1178a2f87788c7ffc3826571fb63d36ddb4c401d \
|
||||
--hash=sha256:8c207528992d59b0801458aa6fcff118e5c099608ef0fc6ff8bccbdc23f29c04 \
|
||||
--hash=sha256:976c7758f668806d4df7a8853f390ac123d5d1f73591ed368bdb8963574ff589 \
|
||||
--hash=sha256:accab6974b2758296400120fdcc9d1f37785b2ea2591f00656e1776f058ded6c \
|
||||
--hash=sha256:c1942e107b0243ced9e510d507e0f27aeea9d6b13e0a1b7c06fd52a62e0d41f7 \
|
||||
--hash=sha256:c800db3464481e87b1d2b84523b075df1e8fc7856c6f9623dc243f89be1cb604 \
|
||||
--hash=sha256:e996d320744ca8342cad6f9454345330d4f06bce129812d032bda3bad6967c5c \
|
||||
--hash=sha256:fa27b51ae4c7013b3700cf0ecf46907d1333ae396fc6511311920485cbce094b
|
||||
# via
|
||||
# -r requirements.qt6_8.in
|
||||
# pyqt6
|
||||
# pyqt6-webengine
|
||||
pyqt6-webengine==6.8.0 \
|
||||
--hash=sha256:5b5090dcc71dd36172ca8370db7dcaadfa0a022a8e58f6e172301289036c666b \
|
||||
--hash=sha256:5b9231b58014965b72504e49f39a6dbc3ecd05d4d725af011d75e6c8a7e2d5f7 \
|
||||
--hash=sha256:64045ea622b6a41882c2b18f55ae9714b8660acff06a54e910eb72822c2f3ff2 \
|
||||
--hash=sha256:c549f0f72c285eeea94000f6764dfaebf6bb3b13224580c7169a409bf1bf1bb7 \
|
||||
--hash=sha256:c7a5731923112acf23fbf93efad91f7b1545221063572106273e34c15a029fe7 \
|
||||
--hash=sha256:d7366809d681bcc096fa565f2a81d0ab040f7da5bb4f12f78e834a2b173c87d1
|
||||
# via -r requirements.qt6_8.in
|
||||
pyqt6-webengine-qt6==6.8.1 \
|
||||
--hash=sha256:0405b6ce35f406affb27547c6c3608dc82405568af71505fefae4081c8b4ac39 \
|
||||
--hash=sha256:0ced2a10433da2571cfa29ed882698e0e164184d54068d17ba73799c45af5f0f \
|
||||
--hash=sha256:79f67a459ecb452f865e04f19122a1d6f30c83d9a1ffd06e7e6f0d652204083a \
|
||||
--hash=sha256:8059118591641cc9da6616343d893c77fbd065bef3e0764679543345e2c75123 \
|
||||
--hash=sha256:a375dbb34e03707b0ab4830b61e4d77a31dc3ef880421c8936472f2af34a3f80 \
|
||||
--hash=sha256:e36574aa55b30633a12aa000835f01e488a0f0c13513fd9a0d50c2281e0a9068
|
||||
# via
|
||||
# -r requirements.qt6_8.in
|
||||
# pyqt6-webengine
|
|
@ -1,2 +0,0 @@
|
|||
pywin32
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
pywin32==305 \
|
||||
--hash=sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d \
|
||||
--hash=sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1 \
|
||||
--hash=sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2 \
|
||||
--hash=sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990 \
|
||||
--hash=sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116 \
|
||||
--hash=sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863 \
|
||||
--hash=sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db \
|
||||
--hash=sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271 \
|
||||
--hash=sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7 \
|
||||
--hash=sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478 \
|
||||
--hash=sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4 \
|
||||
--hash=sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918 \
|
||||
--hash=sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504 \
|
||||
--hash=sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496
|
||||
# via -r requirements.win.in
|
|
@ -1,25 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
if [ "$1" == "all" ]; then
|
||||
upgrade="--upgrade"
|
||||
elif [ "$1" != "" ]; then
|
||||
upgrade="--upgrade-package $1"
|
||||
else
|
||||
upgrade=""
|
||||
fi
|
||||
|
||||
args="--resolver=backtracking --allow-unsafe --no-header --strip-extras --generate-hashes"
|
||||
|
||||
# initial pyenv bootstrap
|
||||
../out/pyenv/bin/pip-compile $args $upgrade requirements.base.in
|
||||
|
||||
# during build/development/testing
|
||||
../out/pyenv/bin/pip-compile $args $upgrade requirements.dev.in
|
||||
|
||||
# during bundle
|
||||
../out/pyenv/bin/pip-compile $args $upgrade requirements.bundle.in
|
||||
for i in requirements.{bundle,qt6*}.in; do ../out/pyenv/bin/pip-compile $args $upgrade $i; done
|
||||
|
||||
|
|
@ -1 +0,0 @@
|
|||
..\out\pyenv\scripts\pip-compile --resolver=backtracking --allow-unsafe --no-header --strip-extras --generate-hashes requirements.win.in
|
10
python/version.py
Normal file
10
python/version.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
"""Version helper for wheel builds."""
|
||||
|
||||
import pathlib
|
||||
|
||||
# Read version from .version file in project root
|
||||
_version_file = pathlib.Path(__file__).parent.parent / ".version"
|
||||
__version__ = _version_file.read_text().strip()
|
|
@ -1,191 +0,0 @@
|
|||
# Based on https://github.com/ziglang/zig-pypi/blob/de14cf728fa35c014821f62a4fa9abd9f4bb560e/make_wheels.py
|
||||
# MIT
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from email.message import EmailMessage
|
||||
from pathlib import Path
|
||||
from typing import Sequence
|
||||
from zipfile import ZIP_DEFLATED, ZipInfo
|
||||
|
||||
from wheel.wheelfile import WheelFile
|
||||
|
||||
def make_message(headers, payload=None):
|
||||
msg = EmailMessage()
|
||||
for name, value in headers.items():
|
||||
if name == "_dependencies":
|
||||
for dep in value:
|
||||
if isinstance(dep, ExtraRequires):
|
||||
msg["Provides-Extra"] = dep.name
|
||||
for inner_dep in dep.deps:
|
||||
msg["Requires-Dist"] = f"{inner_dep}; extra == '{dep.name}'"
|
||||
else:
|
||||
msg["Requires-Dist"] = dep
|
||||
elif isinstance(value, list):
|
||||
for value_part in value:
|
||||
msg[name] = value_part
|
||||
else:
|
||||
msg[name] = value
|
||||
if payload:
|
||||
msg.set_payload(payload)
|
||||
# EmailMessage wraps the license line, which results in an invalid file
|
||||
out = bytes(msg)
|
||||
out = out.replace(b"License v3 or\n later", b"License v3 or later")
|
||||
return out
|
||||
|
||||
|
||||
def write_wheel_file(filename, contents):
|
||||
with WheelFile(filename, "w") as wheel:
|
||||
for member_info, member_source in contents.items():
|
||||
if not isinstance(member_info, ZipInfo):
|
||||
member_info = ZipInfo(member_info)
|
||||
member_info.external_attr = 0o644 << 16
|
||||
member_info.file_size = len(member_source)
|
||||
member_info.compress_type = ZIP_DEFLATED
|
||||
wheel.writestr(member_info, bytes(member_source))
|
||||
return filename
|
||||
|
||||
|
||||
def write_wheel(
|
||||
wheel_path,
|
||||
*,
|
||||
name,
|
||||
version,
|
||||
tag,
|
||||
metadata,
|
||||
description,
|
||||
contents,
|
||||
entrypoints: list[str] | None = None,
|
||||
top_level: list[str] | None = None,
|
||||
):
|
||||
dist_info = f"{name}-{version}.dist-info"
|
||||
extra = {}
|
||||
if entrypoints:
|
||||
entrypoints_joined = "\n".join(entrypoints)
|
||||
text = f"[console_scripts]\n{entrypoints_joined}"
|
||||
file = f"{dist_info}/entry_points.txt"
|
||||
extra[file] = text.encode("utf8")
|
||||
if top_level:
|
||||
top_level_joined = "\n".join(top_level) + "\n"
|
||||
file = f"{dist_info}/top_level.txt"
|
||||
extra[file] = top_level_joined.encode("utf8")
|
||||
return write_wheel_file(
|
||||
wheel_path,
|
||||
{
|
||||
**contents,
|
||||
**extra,
|
||||
f"{dist_info}/METADATA": make_message(
|
||||
{
|
||||
"Metadata-Version": "2.1",
|
||||
"Name": name,
|
||||
"Version": version,
|
||||
**metadata,
|
||||
},
|
||||
description,
|
||||
),
|
||||
f"{dist_info}/WHEEL": make_message(
|
||||
{
|
||||
"Wheel-Version": "1.0",
|
||||
"Generator": "anki write_wheel.py",
|
||||
"Root-Is-Purelib": "false",
|
||||
"Tag": tag,
|
||||
}
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def merge_sources(contents, root, exclude):
|
||||
root = Path(root)
|
||||
for path in root.glob("**/*"):
|
||||
if path.is_dir() or exclude(path):
|
||||
continue
|
||||
path_str = str(path.relative_to(root.parent))
|
||||
if path_str.endswith(".pyc"):
|
||||
continue
|
||||
contents[path_str] = path.read_bytes()
|
||||
|
||||
|
||||
def split_wheel_path(path: str):
|
||||
path2 = Path(path)
|
||||
components = path2.stem.split("-", maxsplit=2)
|
||||
return components
|
||||
|
||||
|
||||
class ExtraRequires:
|
||||
def __init__(self, name, deps):
|
||||
self.name = name
|
||||
self.deps = deps
|
||||
|
||||
|
||||
src_root = sys.argv[1]
|
||||
generated_root = sys.argv[2]
|
||||
wheel_path = sys.argv[3]
|
||||
|
||||
name, version, tag = split_wheel_path(wheel_path)
|
||||
|
||||
|
||||
def exclude_aqt(path: Path) -> bool:
|
||||
if path.suffix in [".ui", ".scss", ".map", ".ts"]:
|
||||
return True
|
||||
if path.name.startswith("tsconfig"):
|
||||
return True
|
||||
if "/aqt/data" in str(path):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def exclude_nothing(path: Path) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def extract_requirements(path: Path) -> list[str]:
|
||||
return path.read_text().splitlines()
|
||||
|
||||
|
||||
if name == "aqt":
|
||||
exclude = exclude_aqt
|
||||
else:
|
||||
exclude = exclude_nothing
|
||||
|
||||
contents: dict[str, str] = {}
|
||||
merge_sources(contents, src_root, exclude)
|
||||
merge_sources(contents, generated_root, exclude)
|
||||
all_requires: Sequence[str | ExtraRequires]
|
||||
|
||||
if name == "anki":
|
||||
all_requires = extract_requirements(Path("python/requirements.anki.in"))
|
||||
entrypoints = None
|
||||
top_level = None
|
||||
else:
|
||||
all_requires = extract_requirements(Path("python/requirements.aqt.in")) + [
|
||||
"anki==" + version,
|
||||
"pyqt6>=6.2",
|
||||
"pyqt6-webengine>=6.2",
|
||||
]
|
||||
entrypoints = ["anki = aqt:run"]
|
||||
top_level = ["aqt", "_aqt"]
|
||||
|
||||
# reproducible builds
|
||||
os.environ["SOURCE_DATE_EPOCH"] = "0"
|
||||
|
||||
write_wheel(
|
||||
wheel_path,
|
||||
name=name,
|
||||
version=version,
|
||||
tag=tag,
|
||||
metadata={
|
||||
"License": "AGPL-3",
|
||||
"Classifier": [
|
||||
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
|
||||
],
|
||||
"Requires-Python": ">=3.9",
|
||||
"_dependencies": all_requires,
|
||||
},
|
||||
description="Please see https://apps.ankiweb.net\n\n",
|
||||
contents=contents,
|
||||
entrypoints=entrypoints,
|
||||
top_level=top_level,
|
||||
)
|
|
@ -5,10 +5,16 @@ from __future__ import annotations
|
|||
|
||||
import atexit
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
from typing import TYPE_CHECKING, Any, Union, cast
|
||||
|
||||
if "ANKI_FIRST_RUN" in os.environ:
|
||||
from .package import first_run_setup
|
||||
|
||||
first_run_setup()
|
||||
|
||||
try:
|
||||
import pip_system_certs.wrapt_requests
|
||||
except ModuleNotFoundError:
|
||||
|
@ -32,24 +38,14 @@ if "--syncserver" in sys.argv:
|
|||
from anki.syncserver import run_sync_server
|
||||
from anki.utils import is_mac
|
||||
|
||||
from .package import _fix_protobuf_path
|
||||
|
||||
if is_mac and getattr(sys, "frozen", False):
|
||||
_fix_protobuf_path()
|
||||
|
||||
# does not return
|
||||
run_sync_server()
|
||||
|
||||
from .package import packaged_build_setup
|
||||
|
||||
packaged_build_setup()
|
||||
|
||||
import argparse
|
||||
import builtins
|
||||
import cProfile
|
||||
import getpass
|
||||
import locale
|
||||
import os
|
||||
import tempfile
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
|
@ -270,13 +266,7 @@ def setupLangAndBackend(
|
|||
# load qt translations
|
||||
_qtrans = QTranslator()
|
||||
|
||||
if is_mac and getattr(sys, "frozen", False):
|
||||
qt_dir = os.path.join(sys.prefix, "../Resources/qt_translations")
|
||||
else:
|
||||
if qtmajor == 5:
|
||||
qt_dir = QLibraryInfo.location(QLibraryInfo.TranslationsPath) # type: ignore
|
||||
else:
|
||||
qt_dir = QLibraryInfo.path(QLibraryInfo.LibraryPath.TranslationsPath)
|
||||
qt_dir = QLibraryInfo.path(QLibraryInfo.LibraryPath.TranslationsPath)
|
||||
qt_lang = lang.replace("-", "_")
|
||||
if _qtrans.load(f"qtbase_{qt_lang}", qt_dir):
|
||||
app.installTranslator(_qtrans)
|
||||
|
@ -294,7 +284,7 @@ def setupLangAndBackend(
|
|||
class NativeEventFilter(QAbstractNativeEventFilter):
|
||||
def nativeEventFilter(
|
||||
self, eventType: Any, message: Any
|
||||
) -> tuple[bool, sip.voidptr | None]:
|
||||
) -> tuple[bool, Any | None]:
|
||||
|
||||
if eventType == "windows_generic_MSG":
|
||||
import ctypes.wintypes
|
||||
|
@ -386,6 +376,8 @@ class AnkiApp(QApplication):
|
|||
|
||||
def onRecv(self) -> None:
|
||||
sock = self._srv.nextPendingConnection()
|
||||
if sock is None:
|
||||
return
|
||||
if not sock.waitForReadyRead(self.TMOUT):
|
||||
sys.stderr.write(sock.errorString())
|
||||
return
|
||||
|
@ -416,14 +408,12 @@ class AnkiApp(QApplication):
|
|||
QRadioButton,
|
||||
QMenu,
|
||||
QSlider,
|
||||
# classes with PyQt5 compatibility proxy
|
||||
without_qt5_compat_wrapper(QToolButton),
|
||||
without_qt5_compat_wrapper(QTabBar),
|
||||
QToolButton,
|
||||
QTabBar,
|
||||
)
|
||||
if evt.type() in [QEvent.Type.Enter, QEvent.Type.HoverEnter]:
|
||||
if (isinstance(src, pointer_classes) and src.isEnabled()) or (
|
||||
isinstance(src, without_qt5_compat_wrapper(QComboBox))
|
||||
and not src.isEditable()
|
||||
isinstance(src, QComboBox) and not src.isEditable()
|
||||
):
|
||||
self.setOverrideCursor(QCursor(Qt.CursorShape.PointingHandCursor))
|
||||
else:
|
||||
|
@ -535,15 +525,12 @@ def setupGL(pm: aqt.profiles.ProfileManager) -> None:
|
|||
QQuickWindow.setGraphicsApi(QSGRendererInterface.GraphicsApi.OpenGL)
|
||||
elif driver in (VideoDriver.Software, VideoDriver.ANGLE):
|
||||
if is_win:
|
||||
# on Windows, this appears to be sufficient on Qt5/Qt6.
|
||||
# on Windows, this appears to be sufficient
|
||||
# On Qt6, ANGLE is excluded by the enum.
|
||||
os.environ["QT_OPENGL"] = driver.value
|
||||
elif is_mac:
|
||||
QCoreApplication.setAttribute(Qt.ApplicationAttribute.AA_UseSoftwareOpenGL)
|
||||
elif is_lin:
|
||||
# Qt5 only
|
||||
os.environ["QT_XCB_FORCE_SOFTWARE_OPENGL"] = "1"
|
||||
# Required on Qt6
|
||||
if "QTWEBENGINE_CHROMIUM_FLAGS" not in os.environ:
|
||||
os.environ["QTWEBENGINE_CHROMIUM_FLAGS"] = "--disable-gpu"
|
||||
if qtmajor > 5:
|
||||
|
@ -607,14 +594,13 @@ def _run(argv: list[str] | None = None, exec: bool = True) -> AnkiApp | None:
|
|||
profiler = cProfile.Profile()
|
||||
profiler.enable()
|
||||
|
||||
packaged = getattr(sys, "frozen", False)
|
||||
x11_available = os.getenv("DISPLAY")
|
||||
wayland_configured = qtmajor > 5 and (
|
||||
os.getenv("QT_QPA_PLATFORM") == "wayland" or os.getenv("WAYLAND_DISPLAY")
|
||||
)
|
||||
wayland_forced = os.getenv("ANKI_WAYLAND")
|
||||
|
||||
if (packaged or is_gnome) and wayland_configured:
|
||||
if is_gnome and wayland_configured:
|
||||
if wayland_forced or not x11_available:
|
||||
# Work around broken fractional scaling in Wayland
|
||||
# https://bugreports.qt.io/browse/QTBUG-113574
|
||||
|
@ -674,12 +660,6 @@ def _run(argv: list[str] | None = None, exec: bool = True) -> AnkiApp | None:
|
|||
if is_win and "QT_QPA_PLATFORM" not in os.environ:
|
||||
os.environ["QT_QPA_PLATFORM"] = "windows:altgr"
|
||||
|
||||
# Disable sandbox on Qt5 PyPi/packaged builds, as it causes blank screens on modern
|
||||
# glibc versions. We check for specific patch versions, because distros may have
|
||||
# fixed the issue in their own Qt builds.
|
||||
if is_lin and qtfullversion in ([5, 15, 2], [5, 14, 1]):
|
||||
os.environ["QTWEBENGINE_DISABLE_SANDBOX"] = "1"
|
||||
|
||||
# create the app
|
||||
QCoreApplication.setApplicationName("Anki")
|
||||
QGuiApplication.setDesktopFileName("anki")
|
||||
|
|
|
@ -3,55 +3,11 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
from ctypes import CDLL, CFUNCTYPE, c_bool, c_char_p
|
||||
|
||||
import aqt
|
||||
import aqt.utils
|
||||
|
||||
|
||||
class _MacOSHelper:
|
||||
def __init__(self) -> None:
|
||||
if getattr(sys, "frozen", False):
|
||||
path = os.path.join(sys.prefix, "libankihelper.dylib")
|
||||
else:
|
||||
path = os.path.join(
|
||||
aqt.utils.aqt_data_folder(), "lib", "libankihelper.dylib"
|
||||
)
|
||||
|
||||
self._dll = CDLL(path)
|
||||
self._dll.system_is_dark.restype = c_bool
|
||||
|
||||
def system_is_dark(self) -> bool:
|
||||
return self._dll.system_is_dark()
|
||||
|
||||
def set_darkmode_enabled(self, enabled: bool) -> bool:
|
||||
return self._dll.set_darkmode_enabled(enabled)
|
||||
|
||||
def start_wav_record(self, path: str, on_error: Callable[[str], None]) -> None:
|
||||
global _on_audio_error
|
||||
_on_audio_error = on_error
|
||||
self._dll.start_wav_record(path.encode("utf8"), _audio_error_callback)
|
||||
|
||||
def end_wav_record(self) -> None:
|
||||
"On completion, file should be saved if no error has arrived."
|
||||
self._dll.end_wav_record()
|
||||
|
||||
|
||||
# this must not be overwritten or deallocated
|
||||
@CFUNCTYPE(None, c_char_p) # type: ignore
|
||||
def _audio_error_callback(msg: str) -> None:
|
||||
if handler := _on_audio_error:
|
||||
handler(msg)
|
||||
|
||||
|
||||
_on_audio_error: Callable[[str], None] | None = None
|
||||
|
||||
macos_helper: _MacOSHelper | None = None
|
||||
if sys.platform == "darwin":
|
||||
try:
|
||||
macos_helper = _MacOSHelper()
|
||||
except Exception as e:
|
||||
print("macos_helper:", e)
|
||||
from anki_mac_helper import ( # pylint:disable=unused-import,import-error
|
||||
macos_helper,
|
||||
)
|
||||
else:
|
||||
macos_helper = None
|
||||
|
|
|
@ -1139,6 +1139,9 @@ class Browser(QMainWindow):
|
|||
dialog=dialog,
|
||||
),
|
||||
)
|
||||
if key := aqt.mw.pm.get_answer_key(ease):
|
||||
QShortcut(key, dialog, activated=btn.click) # type: ignore
|
||||
btn.setToolTip(tr.actions_shortcut_key(key))
|
||||
layout.addWidget(btn)
|
||||
|
||||
# Add cancel button
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
import aqt
|
||||
import aqt.browser
|
||||
from aqt.browser.sidebar.item import SidebarItem
|
||||
|
@ -107,11 +105,11 @@ class SidebarModel(QAbstractItemModel):
|
|||
return self.sidebar._on_rename(index.internalPointer(), text)
|
||||
|
||||
def supportedDropActions(self) -> Qt.DropAction:
|
||||
return cast(Qt.DropAction, Qt.DropAction.MoveAction)
|
||||
return Qt.DropAction.MoveAction
|
||||
|
||||
def flags(self, index: QModelIndex) -> Qt.ItemFlag:
|
||||
if not index.isValid():
|
||||
return cast(Qt.ItemFlag, Qt.ItemFlag.ItemIsEnabled)
|
||||
return Qt.ItemFlag.ItemIsEnabled
|
||||
flags = (
|
||||
Qt.ItemFlag.ItemIsEnabled
|
||||
| Qt.ItemFlag.ItemIsSelectable
|
||||
|
|
|
@ -325,15 +325,13 @@ class DataModel(QAbstractTableModel):
|
|||
return 0
|
||||
return self.len_columns()
|
||||
|
||||
_QFont = without_qt5_compat_wrapper(QFont)
|
||||
|
||||
def data(self, index: QModelIndex = QModelIndex(), role: int = 0) -> Any:
|
||||
if not index.isValid():
|
||||
return QVariant()
|
||||
if role == Qt.ItemDataRole.FontRole:
|
||||
if not self.column_at(index).uses_cell_font:
|
||||
return QVariant()
|
||||
qfont = self._QFont()
|
||||
qfont = QFont()
|
||||
row = self.get_row(index)
|
||||
qfont.setFamily(row.font_name)
|
||||
qfont.setPixelSize(row.font_size)
|
||||
|
|
|
@ -382,10 +382,7 @@ class Table:
|
|||
hh.setContextMenuPolicy(Qt.ContextMenuPolicy.CustomContextMenu)
|
||||
self._restore_header()
|
||||
qconnect(hh.customContextMenuRequested, self._on_header_context)
|
||||
if qtmajor == 5:
|
||||
qconnect(hh.sortIndicatorChanged, self._on_sort_column_changed_qt5)
|
||||
else:
|
||||
qconnect(hh.sortIndicatorChanged, self._on_sort_column_changed)
|
||||
qconnect(hh.sortIndicatorChanged, self._on_sort_column_changed)
|
||||
qconnect(hh.sectionMoved, self._on_column_moved)
|
||||
|
||||
# Slots
|
||||
|
@ -495,12 +492,6 @@ class Table:
|
|||
if checked:
|
||||
self._scroll_to_column(self._model.len_columns() - 1)
|
||||
|
||||
def _on_sort_column_changed_qt5(self, section: int, order: int) -> None:
|
||||
self._on_sort_column_changed(
|
||||
section,
|
||||
Qt.SortOrder.AscendingOrder if not order else Qt.SortOrder.DescendingOrder,
|
||||
)
|
||||
|
||||
def _on_sort_column_changed(self, section: int, order: Qt.SortOrder) -> None:
|
||||
column = self._model.column_at_section(section)
|
||||
sorting = column.sorting_notes if self.is_notes_mode() else column.sorting_cards
|
||||
|
|
|
@ -165,6 +165,7 @@ _mbox: QMessageBox | None = None
|
|||
|
||||
class ErrorHandler(QObject):
|
||||
"Catch stderr and write into buffer."
|
||||
|
||||
ivl = 100
|
||||
fatal_error_encountered = False
|
||||
|
||||
|
@ -235,6 +236,8 @@ class ErrorHandler(QObject):
|
|||
if "unable to get local issuer certificate" in error and is_win:
|
||||
showWarning(tr.errors_windows_ssl_updates())
|
||||
return
|
||||
if is_chromium_cert_error(error):
|
||||
return
|
||||
|
||||
debug_text = supportText() + "\n" + error
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue