Compare commits

..

No commits in common. "main" and "24.06rc2" have entirely different histories.

871 changed files with 24434 additions and 39104 deletions

View file

@ -0,0 +1,72 @@
FROM debian:10-slim
ARG DEBIAN_FRONTEND="noninteractive"
RUN useradd -d /state -m -u 998 user
RUN apt-get update && apt install --yes gnupg ca-certificates && \
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 32A37959C2FA5C3C99EFBC32A79206696452D198 \
&& echo "deb https://apt.buildkite.com/buildkite-agent stable main" > /etc/apt/sources.list.d/buildkite-agent.list \
&& apt-get update \
&& apt-get install --yes --no-install-recommends \
autoconf \
bash \
buildkite-agent \
ca-certificates \
curl \
findutils \
g++ \
gcc \
git \
grep \
libdbus-1-3 \
libegl1 \
libfontconfig1 \
libgl1 \
libgstreamer-gl1.0-0 \
libgstreamer-plugins-base1.0 \
libgstreamer1.0-0 \
libnss3 \
libpulse-mainloop-glib0 \
libpulse-mainloop-glib0 \
libssl-dev \
libxcomposite1 \
libxcursor1 \
libxi6 \
libxkbcommon-x11-0 \
libxkbcommon0 \
libxkbfile1 \
libxrandr2 \
libxrender1 \
libxtst6 \
make \
pkg-config \
portaudio19-dev \
python3-dev \
rsync \
unzip \
zstd \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir -p /etc/buildkite-agent/hooks && chown -R user /etc/buildkite-agent
COPY buildkite.cfg /etc/buildkite-agent/buildkite-agent.cfg
COPY environment /etc/buildkite-agent/hooks/environment
# Available in Debian 11 as ninja-build, but we're building with Debian 10
RUN curl -LO https://github.com/ninja-build/ninja/releases/download/v1.11.1/ninja-linux.zip \
&& unzip ninja-linux.zip \
&& chmod +x ninja \
&& mv ninja /usr/bin \
&& rm ninja-linux.zip
RUN mkdir /state/rust && chown user /state/rust
USER user
ENV CARGO_HOME=/state/rust/cargo
ENV RUSTUP_HOME=/state/rust/rustup
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain none
WORKDIR /code/buildkite
ENTRYPOINT ["/usr/bin/buildkite-agent", "start"]

View file

@ -1,6 +1,7 @@
FROM ubuntu:22.04 FROM debian:11-slim
ARG DEBIAN_FRONTEND="noninteractive" ARG DEBIAN_FRONTEND="noninteractive"
ENV PYTHONPATH=/usr/lib/python3/dist-packages
RUN useradd -d /state -m -u 998 user RUN useradd -d /state -m -u 998 user
@ -32,7 +33,6 @@ RUN apt-get update && apt install --yes gnupg ca-certificates && \
libssl-dev \ libssl-dev \
libxcomposite1 \ libxcomposite1 \
libxcursor1 \ libxcursor1 \
libxdamage1 \
libxi6 \ libxi6 \
libxkbcommon-x11-0 \ libxkbcommon-x11-0 \
libxkbcommon0 \ libxkbcommon0 \
@ -45,8 +45,12 @@ RUN apt-get update && apt install --yes gnupg ca-certificates && \
portaudio19-dev \ portaudio19-dev \
python3-dev \ python3-dev \
rsync \ rsync \
unzip \ # -- begin only required for arm64/debian11
zstd \ ninja-build \
clang-format \
python-is-python3 \
python3-pyqt5.qtwebengine \
# -- end only required for arm64/debian11
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
RUN mkdir -p /etc/buildkite-agent/hooks && chown -R user /etc/buildkite-agent RUN mkdir -p /etc/buildkite-agent/hooks && chown -R user /etc/buildkite-agent

View file

@ -1,11 +0,0 @@
#!/bin/bash
# builds an 'anki-[amd|arm]' image for the current platform
#
# for a cross-compile on recent Docker:
# docker buildx create --use
# docker run --privileged --rm tonistiigi/binfmt --install amd64
# docker buildx build --platform linux/amd64 --tag anki-amd64 . --load
. common.inc
DOCKER_BUILDKIT=1 docker build --tag anki-${platform} .

View file

@ -1,9 +0,0 @@
#!/bin/bash
set -e
if [[ "$(uname -m)" == "x86_64" ]]; then
platform="amd"
else
platform="arm"
fi

View file

@ -1,29 +1,33 @@
#!/bin/bash #!/bin/bash
# - use './run.sh' to run in the foreground # - use 'BUILD=1 ./run.sh' to build image & run.
# - use './run.sh serve' to daemonize. # - use './run.sh serve' to daemonize.
set -e set -e
. common.inc
if [ "$1" = "serve" ]; then if [ "$1" = "serve" ]; then
extra_args="-d --restart always" extra_args="-d --restart always"
else else
extra_args="-it" extra_args="-it"
fi fi
name=anki-${platform} if [ $(uname -m) = "aarch64" ]; then
arch=arm64
else
arch=amd64
fi
# Stop and remove the existing container if it exists. if [ -n "$BUILD" ]; then
# This doesn't delete the associated volume. DOCKER_BUILDKIT=1 docker build -f Dockerfile.${arch} --tag linci .
if docker container inspect $name > /dev/null 2>&1; then fi
docker stop $name || true
docker container rm $name if docker container inspect linci > /dev/null 2>&1; then
docker stop linci || true
docker container rm linci
fi fi
docker run $extra_args \ docker run $extra_args \
--name $name \ --name linci \
-v ${name}-state:/state \ -v ci-state:/state \
-e BUILDKITE_AGENT_TOKEN \ -e BUILDKITE_AGENT_TOKEN \
-e BUILDKITE_AGENT_TAGS \ -e BUILDKITE_AGENT_TAGS \
$name linci

View file

@ -22,7 +22,7 @@ echo "--- Ensure libs importable"
SKIP_RUN=1 ./run SKIP_RUN=1 ./run
echo "--- Check Rust libs" echo "--- Check Rust libs"
cargo install cargo-deny --version 0.14.24 cargo install cargo-deny --version 0.14.12
cargo deny check cargo deny check
echo "--- Cleanup" echo "--- Cleanup"

View file

@ -7,12 +7,7 @@ export BUILD_ROOT=/state/build
export RELEASE=2 export RELEASE=2
ln -sf out/node_modules . ln -sf out/node_modules .
echo "--- Install n2"
./tools/install-n2
echo "+++ Building"
if [ $(uname -m) = "aarch64" ]; then if [ $(uname -m) = "aarch64" ]; then
export PYTHONPATH=/usr/lib/python3/dist-packages
./ninja wheels:anki ./ninja wheels:anki
else else
./ninja bundle ./ninja bundle

View file

@ -5,11 +5,7 @@ DESCRIPTORS_BIN = { value = "out/rslib/proto/descriptors.bin", relative = true }
# build script will append .exe if necessary # build script will append .exe if necessary
PROTOC = { value = "out/extracted/protoc/bin/protoc", relative = true } PROTOC = { value = "out/extracted/protoc/bin/protoc", relative = true }
PYO3_NO_PYTHON = "1" PYO3_NO_PYTHON = "1"
MACOSX_DEPLOYMENT_TARGET = "11" MACOSX_DEPLOYMENT_TARGET = "10.13.4"
PYTHONDONTWRITEBYTECODE = "1" # prevent junk files on Windows
[term] [term]
color = "always" color = "always"
[target.'cfg(all(target_env = "msvc", target_os = "windows"))']
rustflags = ["-C", "target-feature=+crt-static"]

View file

@ -1,2 +0,0 @@
- To build and check the project, use ./check in the root folder (or check.bat on Windows)
- This will format files, then run lints and unit tests.

View file

@ -1,7 +0,0 @@
- We use the fluent system+code generation for translation.
- New strings should be added to rslib/core/. Ask for the appropriate file if you're not sure.
- Assuming a string addons-you-have-count has been added to addons.ftl, that string is accessible in our different languages as follows:
- Python: from aqt.utils import tr; msg = tr.addons_you_have_count(count=3)
- TypeScript: import * as tr from "@generated/ftl"; tr.addonsYouHaveCount({count: 3})
- Rust: collection.tr.addons_you_have_count(3)
- In Qt .ui files, strings that are marked as translatable will automatically use the registered ftl strings. So a QLabel with a title 'addons_you_have_count' that is marked as translatable will automatically use the translation defined in our addons.ftl file.

View file

@ -5,8 +5,8 @@
db-path = "~/.cargo/advisory-db" db-path = "~/.cargo/advisory-db"
db-urls = ["https://github.com/rustsec/advisory-db"] db-urls = ["https://github.com/rustsec/advisory-db"]
ignore = [ ignore = [
# burn depends on an unmaintained package 'paste' # safemem only used by makeapp
"RUSTSEC-2024-0436", "RUSTSEC-2023-0081",
] ]
[licenses] [licenses]
@ -14,15 +14,15 @@ allow = [
"MIT", "MIT",
"Apache-2.0", "Apache-2.0",
"Apache-2.0 WITH LLVM-exception", "Apache-2.0 WITH LLVM-exception",
"CDLA-Permissive-2.0",
"ISC", "ISC",
"MPL-2.0", "MPL-2.0",
"Unicode-DFS-2016",
"BSD-2-Clause", "BSD-2-Clause",
"BSD-3-Clause", "BSD-3-Clause",
"OpenSSL",
"CC0-1.0", "CC0-1.0",
"Unlicense", "Unlicense",
"Zlib", "Zlib",
"Unicode-3.0",
] ]
confidence-threshold = 0.8 confidence-threshold = 0.8
# eg { allow = ["Zlib"], name = "adler32", version = "*" }, # eg { allow = ["Zlib"], name = "adler32", version = "*" },

View file

@ -1,3 +0,0 @@
node_modules/
target/
out/

View file

@ -8,6 +8,13 @@
}, },
"markdown": {}, "markdown": {},
"toml": {}, "toml": {},
"prettier": {
"trailingComma": "all",
"printWidth": 88,
"tabWidth": 4,
"semi": true,
"htmlWhitespaceSensitivity": "ignore"
},
"includes": ["**/*.{ts,tsx,js,jsx,cjs,mjs,json,md,toml,svelte,scss}"], "includes": ["**/*.{ts,tsx,js,jsx,cjs,mjs,json,md,toml,svelte,scss}"],
"excludes": [ "excludes": [
".vscode", ".vscode",
@ -20,17 +27,18 @@
"ftl/usage", "ftl/usage",
"licenses.json", "licenses.json",
".dmypy.json", ".dmypy.json",
"qt/bundle/PyOxidizer",
"target", "target",
".mypy_cache", ".mypy_cache",
"extra", "extra",
"ts/.svelte-kit", "ts/.svelte-kit"
"ts/vite.config.ts.timestamp*"
], ],
"plugins": [ "plugins": [
"https://plugins.dprint.dev/typescript-0.91.6.wasm", "https://plugins.dprint.dev/typescript-0.85.1.wasm",
"https://plugins.dprint.dev/json-0.19.3.wasm", "https://plugins.dprint.dev/json-0.17.4.wasm",
"https://plugins.dprint.dev/markdown-0.17.6.wasm", "https://plugins.dprint.dev/markdown-0.15.3.wasm",
"https://plugins.dprint.dev/toml-0.6.2.wasm", "https://plugins.dprint.dev/toml-0.5.4.wasm",
"https://plugins.dprint.dev/disrupted/css-0.2.3.wasm" "https://plugins.dprint.dev/prettier-0.13.0.json@dc5d12b7c1bf1a4683eff317c2c87350e75a5a3dfcc127f3d5628931bfb534b1",
"https://plugins.dprint.dev/disrupted/css-0.2.2.wasm"
] ]
} }

View file

@ -50,7 +50,7 @@ module.exports = {
}, },
], ],
env: { browser: true, es2020: true }, env: { browser: true, es2020: true },
ignorePatterns: ["backend_proto.d.ts", "*.svelte.d.ts", "vendor", "extra/*", "vite.config.ts", "hooks.client.js"], ignorePatterns: ["backend_proto.d.ts", "*.svelte.d.ts", "vendor", "extra/*", "vite.config.ts"],
globals: { globals: {
globalThis: false, globalThis: false,
NodeListOf: false, NodeListOf: false,

4
.gitignore vendored
View file

@ -1,5 +1,4 @@
__pycache__ __pycache__
.mypy_cache
.DS_Store .DS_Store
anki.prof anki.prof
target target
@ -17,6 +16,3 @@ node_modules
/extra /extra
yarn-error.log yarn-error.log
ts/.svelte-kit ts/.svelte-kit
.yarn
.claude/settings.local.json
.claude/user.md

6
.gitmodules vendored
View file

@ -6,3 +6,9 @@
path = ftl/qt-repo path = ftl/qt-repo
url = https://github.com/ankitects/anki-desktop-ftl.git url = https://github.com/ankitects/anki-desktop-ftl.git
shallow = true shallow = true
[submodule "qt/bundle/PyOxidizer"]
path = qt/bundle/PyOxidizer
url = https://github.com/ankitects/PyOxidizer.git
shallow = true
update = none

10
.isort.cfg Normal file
View file

@ -0,0 +1,10 @@
[settings]
ensure_newline_before_comments=true
force_grid_wrap=0
include_trailing_comma=True
known_first_party=anki,aqt,tests
line_length=88
multi_line_output=3
profile=black
skip=
use_parentheses=True

110
.mypy.ini
View file

@ -1,15 +1,15 @@
[mypy] [mypy]
python_version = 3.9 python_version = 3.9
pretty = False pretty = false
strict_optional = False no_strict_optional = true
show_error_codes = True show_error_codes = true
check_untyped_defs = True check_untyped_defs = true
disallow_untyped_decorators = True disallow_untyped_decorators = True
warn_redundant_casts = True warn_redundant_casts = True
warn_unused_configs = True warn_unused_configs = True
strict_equality = True strict_equality = true
namespace_packages = True namespace_packages = true
explicit_package_bases = True explicit_package_bases = true
mypy_path = mypy_path =
pylib, pylib,
out/pylib, out/pylib,
@ -18,7 +18,7 @@ mypy_path =
ftl, ftl,
pylib/tools, pylib/tools,
python python
exclude = (pylib/anki/_vendor) exclude = (qt/bundle/PyOxidizer|pylib/anki/_vendor)
[mypy-anki.*] [mypy-anki.*]
disallow_untyped_defs = True disallow_untyped_defs = True
@ -26,92 +26,10 @@ disallow_untyped_defs = True
disallow_untyped_defs = False disallow_untyped_defs = False
[mypy-anki.exporting] [mypy-anki.exporting]
disallow_untyped_defs = False disallow_untyped_defs = False
[mypy-aqt]
strict_optional = True
[mypy-aqt.browser.*]
strict_optional = True
[mypy-aqt.data.*]
strict_optional = True
[mypy-aqt.forms.*]
strict_optional = True
[mypy-aqt.import_export.*]
strict_optional = True
[mypy-aqt.operations.*] [mypy-aqt.operations.*]
strict_optional = True no_strict_optional = false
[mypy-aqt.editor]
strict_optional = True
[mypy-aqt.importing]
strict_optional = True
[mypy-aqt.preferences]
strict_optional = True
[mypy-aqt.overview]
strict_optional = True
[mypy-aqt.customstudy]
strict_optional = True
[mypy-aqt.taglimit]
strict_optional = True
[mypy-aqt.modelchooser]
strict_optional = True
[mypy-aqt.deckdescription]
strict_optional = True
[mypy-aqt.deckbrowser]
strict_optional = True
[mypy-aqt.studydeck]
strict_optional = True
[mypy-aqt.tts]
strict_optional = True
[mypy-aqt.mediasrv]
strict_optional = True
[mypy-aqt.changenotetype]
strict_optional = True
[mypy-aqt.clayout]
strict_optional = True
[mypy-aqt.fields]
strict_optional = True
[mypy-aqt.filtered_deck]
strict_optional = True
[mypy-aqt.editcurrent]
strict_optional = True
[mypy-aqt.deckoptions]
strict_optional = True
[mypy-aqt.notetypechooser]
strict_optional = True
[mypy-aqt.stats]
strict_optional = True
[mypy-aqt.switch]
strict_optional = True
[mypy-aqt.debug_console]
strict_optional = True
[mypy-aqt.emptycards]
strict_optional = True
[mypy-aqt.flags]
strict_optional = True
[mypy-aqt.mediacheck]
strict_optional = True
[mypy-aqt.theme]
strict_optional = True
[mypy-aqt.toolbar]
strict_optional = True
[mypy-aqt.deckchooser]
strict_optional = True
[mypy-aqt.about]
strict_optional = True
[mypy-aqt.webview]
strict_optional = True
[mypy-aqt.mediasync]
strict_optional = True
[mypy-aqt.package]
strict_optional = True
[mypy-aqt.progress]
strict_optional = True
[mypy-aqt.tagedit]
strict_optional = True
[mypy-aqt.utils]
strict_optional = True
[mypy-aqt.sync]
strict_optional = True
[mypy-anki.scheduler.base] [mypy-anki.scheduler.base]
strict_optional = True no_strict_optional = false
[mypy-anki._backend.rsbridge] [mypy-anki._backend.rsbridge]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-anki._vendor.stringcase] [mypy-anki._vendor.stringcase]
@ -119,10 +37,10 @@ disallow_untyped_defs = False
[mypy-stringcase] [mypy-stringcase]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-aqt.mpv] [mypy-aqt.mpv]
disallow_untyped_defs = False disallow_untyped_defs=false
ignore_errors = True ignore_errors=true
[mypy-aqt.winpaths] [mypy-aqt.winpaths]
disallow_untyped_defs = False disallow_untyped_defs=false
[mypy-win32file] [mypy-win32file]
ignore_missing_imports = True ignore_missing_imports = True
@ -165,5 +83,3 @@ ignore_missing_imports = True
ignore_missing_imports = True ignore_missing_imports = True
[mypy-pip_system_certs.*] [mypy-pip_system_certs.*]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-anki_audio]
ignore_missing_imports = True

View file

@ -1,8 +0,0 @@
{
"trailingComma": "all",
"printWidth": 88,
"tabWidth": 4,
"semi": true,
"htmlWhitespaceSensitivity": "ignore",
"plugins": ["prettier-plugin-svelte"]
}

48
.pylintrc Normal file
View file

@ -0,0 +1,48 @@
[MASTER]
ignore-patterns=.*_pb2.*
persistent = no
extension-pkg-whitelist=orjson,PyQt6
init-hook="import sys; sys.path.extend(['pylib/anki/_vendor', 'out/qt'])"
[REPORTS]
output-format=colorized
[MESSAGES CONTROL]
disable=
R,
line-too-long,
too-many-lines,
missing-function-docstring,
missing-module-docstring,
missing-class-docstring,
import-outside-toplevel,
wrong-import-position,
wrong-import-order,
fixme,
unused-wildcard-import,
attribute-defined-outside-init,
redefined-builtin,
wildcard-import,
broad-except,
bare-except,
unused-argument,
unused-variable,
redefined-outer-name,
global-statement,
protected-access,
arguments-differ,
arguments-renamed,
consider-using-f-string,
invalid-name,
broad-exception-raised
[BASIC]
good-names =
id,
tr,
db,
ok,
ip,
[IMPORTS]
ignored-modules = anki.*_pb2, anki.sync_pb2, win32file,pywintypes,socket,win32pipe,pyaudio,anki.scheduler_pb2,anki.notetypes_pb2

View file

@ -1 +0,0 @@
3.13.5

View file

@ -1,91 +0,0 @@
lint.select = [
"E", # pycodestyle errors
"F", # Pyflakes errors
"PL", # Pylint rules
"I", # Isort rules
"ARG",
# "UP", # pyupgrade
# "B", # flake8-bugbear
# "SIM", # flake8-simplify
]
extend-exclude = ["*_pb2.py", "*_pb2.pyi"]
lint.ignore = [
# Docstring rules (missing-*-docstring in pylint)
"D100", # Missing docstring in public module
"D101", # Missing docstring in public class
"D103", # Missing docstring in public function
# Import rules (wrong-import-* in pylint)
"E402", # Module level import not at top of file
"E501", # Line too long
# pycodestyle rules
"E741", # ambiguous-variable-name
# Comment rules (fixme in pylint)
"FIX002", # Line contains TODO
# Pyflakes rules
"F402", # import-shadowed-by-loop-var
"F403", # undefined-local-with-import-star
"F405", # undefined-local-with-import-star-usage
# Naming rules (invalid-name in pylint)
"N801", # Class name should use CapWords convention
"N802", # Function name should be lowercase
"N803", # Argument name should be lowercase
"N806", # Variable in function should be lowercase
"N811", # Constant imported as non-constant
"N812", # Lowercase imported as non-lowercase
"N813", # Camelcase imported as lowercase
"N814", # Camelcase imported as constant
"N815", # Variable in class scope should not be mixedCase
"N816", # Variable in global scope should not be mixedCase
"N817", # CamelCase imported as acronym
"N818", # Error suffix in exception names
# Pylint rules
"PLW0603", # global-statement
"PLW2901", # redefined-loop-name
"PLC0415", # import-outside-top-level
"PLR2004", # magic-value-comparison
# Exception handling (broad-except, bare-except in pylint)
"BLE001", # Do not catch blind exception
# Argument rules (unused-argument in pylint)
"ARG001", # Unused function argument
"ARG002", # Unused method argument
"ARG005", # Unused lambda argument
# Access rules (protected-access in pylint)
"SLF001", # Private member accessed
# String formatting (consider-using-f-string in pylint)
"UP032", # Use f-string instead of format call
# Exception rules (broad-exception-raised in pylint)
"TRY301", # Abstract raise to an inner function
# Builtin shadowing (redefined-builtin in pylint)
"A001", # Variable shadows a Python builtin
"A002", # Argument shadows a Python builtin
"A003", # Class attribute shadows a Python builtin
]
[lint.per-file-ignores]
"**/anki/*_pb2.py" = ["ALL"]
[lint.pep8-naming]
ignore-names = ["id", "tr", "db", "ok", "ip"]
[lint.pylint]
max-args = 12
max-returns = 10
max-branches = 35
max-statements = 125
[lint.isort]
known-first-party = ["anki", "aqt", "tests"]

View file

@ -1 +1 @@
25.09.2 24.06

View file

@ -2,7 +2,7 @@
"recommendations": [ "recommendations": [
"dprint.dprint", "dprint.dprint",
"ms-python.python", "ms-python.python",
"charliermarsh.ruff", "ms-python.black-formatter",
"rust-lang.rust-analyzer", "rust-lang.rust-analyzer",
"svelte.svelte-vscode", "svelte.svelte-vscode",
"zxh404.vscode-proto3", "zxh404.vscode-proto3",

View file

@ -2,7 +2,7 @@
"editor.formatOnSave": true, "editor.formatOnSave": true,
"[python]": { "[python]": {
"editor.codeActionsOnSave": { "editor.codeActionsOnSave": {
"source.organizeImports": "explicit" "source.organizeImports": true
} }
}, },
"files.watcherExclude": { "files.watcherExclude": {
@ -18,12 +18,12 @@
"out/qt", "out/qt",
"qt" "qt"
], ],
"python.formatting.provider": "charliermarsh.ruff", "python.formatting.provider": "black",
"python.linting.mypyEnabled": false, "python.linting.mypyEnabled": false,
"python.analysis.diagnosticSeverityOverrides": { "python.analysis.diagnosticSeverityOverrides": {
"reportMissingModuleSource": "none" "reportMissingModuleSource": "none"
}, },
"rust-analyzer.check.allTargets": false, "rust-analyzer.checkOnSave.allTargets": false,
"rust-analyzer.files.excludeDirs": [".bazel", "node_modules"], "rust-analyzer.files.excludeDirs": [".bazel", "node_modules"],
"rust-analyzer.procMacro.enable": true, "rust-analyzer.procMacro.enable": true,
// this formats 'use' blocks in a nicer way, but requires you to run // this formats 'use' blocks in a nicer way, but requires you to run
@ -31,13 +31,11 @@
"rust-analyzer.rustfmt.extraArgs": ["+nightly"], "rust-analyzer.rustfmt.extraArgs": ["+nightly"],
"search.exclude": { "search.exclude": {
"**/node_modules": true, "**/node_modules": true,
".bazel/**": true ".bazel/**": true,
"qt/bundle/PyOxidizer": true
}, },
"rust-analyzer.cargo.buildScripts.enable": true, "rust-analyzer.cargo.buildScripts.enable": true,
"python.analysis.typeCheckingMode": "off", "python.analysis.typeCheckingMode": "off",
"python.analysis.exclude": [
"out/launcher/**"
],
"terminal.integrated.env.windows": { "terminal.integrated.env.windows": {
"PATH": "c:\\msys64\\usr\\bin;${env:Path}" "PATH": "c:\\msys64\\usr\\bin;${env:Path}"
} }

View file

@ -1,2 +0,0 @@
nodeLinker: node-modules
enableScripts: false

View file

@ -1,86 +0,0 @@
# Claude Code Configuration
## Project Overview
Anki is a spaced repetition flashcard program with a multi-layered architecture. Main components:
- Web frontend: Svelte/TypeScript in ts/
- PyQt GUI, which embeds the web components in aqt/
- Python library which wraps our rust Layer (pylib/, with Rust module in pylib/rsbridge)
- Core Rust layer in rslib/
- Protobuf definitions in proto/ that are used by the different layers to
talk to each other.
## Building/checking
./check (check.bat) will format the code and run the main build & checks.
Please do this as a final step before marking a task as completed.
## Quick iteration
During development, you can build/check subsections of our code:
- Rust: 'cargo check'
- Python: './tools/dmypy', and if wheel-related, './ninja wheels'
- TypeScript/Svelte: './ninja check:svelte'
Be mindful that some changes (such as modifications to .proto files) may
need a full build with './check' first.
## Build tooling
'./check' and './ninja' invoke our build system, which is implemented in build/. It takes care of downloading required deps and invoking our build
steps.
## Translations
ftl/ contains our Fluent translation files. We have scripts in rslib/i18n
to auto-generate an API for Rust, TypeScript and Python so that our code can
access the translations in a type-safe manner. Changes should be made to
ftl/core or ftl/qt. Except for features specific to our Qt interface, prefer
the core module. When adding new strings, confirm the appropriate ftl file
first, and try to match the existing style.
## Protobuf and IPC
Our build scripts use the .proto files to define our Rust library's
non-Rust API. pylib/rsbridge exposes that API, and _backend.py exposes
snake_case methods for each protobuf RPC that call into the API.
Similar tooling creates a @generated/backend TypeScript module for
communicating with the Rust backend (which happens over POST requests).
## Fixing errors
When dealing with build errors or failing tests, invoke 'check' or one
of the quick iteration commands regularly. This helps verify your changes
are correct. To locate other instances of a problem, run the check again -
don't attempt to grep the codebase.
## Ignores
The files in out/ are auto-generated. Mostly you should ignore that folder,
though you may sometimes find it useful to view out/{pylib/anki,qt/_aqt,ts/lib/generated} when dealing with cross-language communication or our other generated sourcecode.
## Launcher/installer
The code for our launcher is in qt/launcher, with separate code for each
platform.
## Rust dependencies
Prefer adding to the root workspace, and using dep.workspace = true in the individual Rust project.
## Rust utilities
rslib/{process,io} contain some helpers for file and process operations,
which provide better error messages/context and some ergonomics. Use them
when possible.
## Rust error handling
in rslib, use error/mod.rs's AnkiError/Result and snafu. In our other Rust modules, prefer anyhow + additional context where appropriate. Unwrapping
in build scripts/tests is fine.
## Individual preferences
See @.claude/user.md

View file

@ -32,8 +32,8 @@ AMBOSS MD Inc. <https://www.amboss.com/>
Aristotelis P. <https://glutanimate.com/contact> Aristotelis P. <https://glutanimate.com/contact>
Erez Volk <erez.volk@gmail.com> Erez Volk <erez.volk@gmail.com>
zjosua <zjosua@hotmail.com> zjosua <zjosua@hotmail.com>
Yngve Hoiseth <yngve@hoiseth.net>
Arthur Milchior <arthur@milchior.fr> Arthur Milchior <arthur@milchior.fr>
Yngve Hoiseth <yngve@hoiseth.net>
Ijgnd Ijgnd
Yoonchae Lee <bluegreenmagick@gmail.com> Yoonchae Lee <bluegreenmagick@gmail.com>
Evandro Coan <github.com/evandrocoan> Evandro Coan <github.com/evandrocoan>
@ -49,7 +49,6 @@ Sander Santema <github.com/sandersantema/>
Thomas Brownback <https://github.com/brownbat/> Thomas Brownback <https://github.com/brownbat/>
Andrew Gaul <andrew@gaul.org> Andrew Gaul <andrew@gaul.org>
kenden kenden
Emil Hamrin <github.com/e-hamrin>
Nickolay Yudin <kelciour@gmail.com> Nickolay Yudin <kelciour@gmail.com>
neitrinoweb <github.com/neitrinoweb/> neitrinoweb <github.com/neitrinoweb/>
Andreas Reis <github.com/nwwt> Andreas Reis <github.com/nwwt>
@ -64,7 +63,6 @@ Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com>
Akshara Balachandra <akshara.bala.18@gmail.com> Akshara Balachandra <akshara.bala.18@gmail.com>
lukkea <github.com/lukkea/> lukkea <github.com/lukkea/>
David Allison <davidallisongithub@gmail.com> David Allison <davidallisongithub@gmail.com>
David Allison <62114487+david-allison@users.noreply.github.com>
Tsung-Han Yu <johan456789@gmail.com> Tsung-Han Yu <johan456789@gmail.com>
Piotr Kubowicz <piotr.kubowicz@gmail.com> Piotr Kubowicz <piotr.kubowicz@gmail.com>
RumovZ <gp5glkw78@relay.firefox.com> RumovZ <gp5glkw78@relay.firefox.com>
@ -100,7 +98,7 @@ gnnoh <gerongfenh@gmail.com>
Sachin Govind <sachin.govind.too@gmail.com> Sachin Govind <sachin.govind.too@gmail.com>
Bruce Harris <github.com/bruceharris> Bruce Harris <github.com/bruceharris>
Patric Cunha <patricc@agap2.pt> Patric Cunha <patricc@agap2.pt>
Brayan Oliveira <69634269+BrayanDSO@users.noreply.github.com> Brayan Oliveira <github.com/BrayanDSO>
Luka Warren <github.com/lukawarren> Luka Warren <github.com/lukawarren>
wisherhxl <wisherhxl@gmail.com> wisherhxl <wisherhxl@gmail.com>
dobefore <1432338032@qq.com> dobefore <1432338032@qq.com>
@ -120,7 +118,7 @@ yellowjello <github.com/yellowjello>
Ingemar Berg <github.com/ingemarberg> Ingemar Berg <github.com/ingemarberg>
Ben Kerman <ben@kermanic.org> Ben Kerman <ben@kermanic.org>
Euan Kemp <euank@euank.com> Euan Kemp <euank@euank.com>
Kieran Black <kieranlblack@gmail.com> Kieran Black <kieranlblack@gmail.com>
XeR <github.com/XeR> XeR <github.com/XeR>
mgrottenthaler <github.com/mgrottenthaler> mgrottenthaler <github.com/mgrottenthaler>
Austin Siew <github.com/Aquafina-water-bottle> Austin Siew <github.com/Aquafina-water-bottle>
@ -140,7 +138,7 @@ Monty Evans <montyevans@gmail.com>
Nil Admirari <https://github.com/nihil-admirari> Nil Admirari <https://github.com/nihil-admirari>
Michael Winkworth <github.com/SteelColossus> Michael Winkworth <github.com/SteelColossus>
Mateusz Wojewoda <kawa1.11@o2.pl> Mateusz Wojewoda <kawa1.11@o2.pl>
Jarrett Ye <jarrett.ye@outlook.com> Jarrett Ye <jarrett.ye@outlook.com>
Sam Waechter <github.com/swektr> Sam Waechter <github.com/swektr>
Michael Eliachevitch <m.eliachevitch@posteo.de> Michael Eliachevitch <m.eliachevitch@posteo.de>
Carlo Quick <https://github.com/CarloQuick> Carlo Quick <https://github.com/CarloQuick>
@ -150,7 +148,6 @@ user1823 <92206575+user1823@users.noreply.github.com>
Gustaf Carefall <https://github.com/Gustaf-C> Gustaf Carefall <https://github.com/Gustaf-C>
virinci <github.com/virinci> virinci <github.com/virinci>
snowtimeglass <snowtimeglass@gmail.com> snowtimeglass <snowtimeglass@gmail.com>
brishtibheja <136738526+brishtibheja@users.noreply.github.com>
Ben Olson <github.com/grepgrok> Ben Olson <github.com/grepgrok>
Akash Reddy <akashreddy2003@gmail.com> Akash Reddy <akashreddy2003@gmail.com>
Lucio Sauer <watermanpaint@posteo.net> Lucio Sauer <watermanpaint@posteo.net>
@ -165,7 +162,7 @@ Lucas Scharenbroch <lucasscharenbroch@gmail.com>
Antonio Cavallo <a.cavallo@cavallinux.eu> Antonio Cavallo <a.cavallo@cavallinux.eu>
Han Yeong-woo <han@yeongwoo.dev> Han Yeong-woo <han@yeongwoo.dev>
Jean Khawand <jk@jeankhawand.com> Jean Khawand <jk@jeankhawand.com>
Pedro Schreiber <schreiber.mmb@gmail.com> Pedro Schreiber <schreiber.mmb@gmail.com>
Foxy_null <https://github.com/Foxy-null> Foxy_null <https://github.com/Foxy-null>
Arbyste <arbyste@outlook.com> Arbyste <arbyste@outlook.com>
Vasll <github.com/vasll> Vasll <github.com/vasll>
@ -178,71 +175,6 @@ Loudwig <https://github.com/Loudwig>
Wu Yi-Wei <https://github.com/Ianwu0812> Wu Yi-Wei <https://github.com/Ianwu0812>
RRomeroJr <117.rromero@gmail.com> RRomeroJr <117.rromero@gmail.com>
Xidorn Quan <me@upsuper.org> Xidorn Quan <me@upsuper.org>
Alexander Bocken <alexander@bocken.org>
James Elmore <email@jameselmore.org>
Ian Samir Yep Manzano <https://github.com/isym444>
David Culley <6276049+davidculley@users.noreply.github.com>
Rastislav Kish <rastislav.kish@protonmail.com>
jake <jake@sharnoth.com>
Expertium <https://github.com/Expertium>
Christian Donat <https://github.com/cdonat2>
Asuka Minato <https://asukaminato.eu.org>
Dillon Baldwin <https://github.com/DillBal>
Voczi <https://github.com/voczi>
Ben Nguyen <105088397+bpnguyen107@users.noreply.github.com>
Themis Demetriades <themis100@outlook.com>
Luke Bartholomew <lukesbart@icloud.com>
Gregory Abrasaldo <degeemon@gmail.com>
Taylor Obyen <162023405+taylorobyen@users.noreply.github.com>
Kris Cherven <krischerven@gmail.com>
twwn <github.com/twwn>
Cy Pokhrel <cy@cy7.sh>
Park Hyunwoo <phu54321@naver.com>
Tomas Fabrizio Orsi <torsi@fi.uba.ar>
Dongjin Ouyang <1113117424@qq.com>
Sawan Sunar <sawansunar24072002@gmail.com>
hideo aoyama <https://github.com/boukendesho>
Ross Brown <rbrownwsws@googlemail.com>
🦙 <gh@siid.sh>
Lukas Sommer <sommerluk@gmail.com>
Luca Auer <lolle2000.la@gmail.com>
Niclas Heinz <nheinz@hpost.net>
Omar Kohl <omarkohl@posteo.net>
David Elizalde <david.elizalde.r.a@gmail.com>
beyondcompute <beyondcompute@gmail.com>
Yuki <https://github.com/YukiNagat0>
wackbyte <wackbyte@protonmail.com>
GithubAnon0000 <GithubAnon0000@users.noreply.github.com>
Mike Hardy <github@mikehardy.net>
Danika_Dakika <https://github.com/Danika-Dakika>
Mumtaz Hajjo Alrifai <mumtazrifai@protonmail.com>
Thomas Graves <fate@hey.com>
Jakub Fidler <jakub.fidler@protonmail.com>
Valerie Enfys <val@unidentified.systems>
Julien Chol <https://github.com/chel-ou>
ikkz <ylei.mk@gmail.com>
derivativeoflog7 <https://github.com/derivativeoflog7>
rreemmii-dev <https://github.com/rreemmii-dev>
babofitos <https://github.com/babofitos>
Jonathan Schoreels <https://github.com/JSchoreels>
JL710
Matt Brubeck <mbrubeck@limpet.net>
Yaoliang Chen <yaoliang.ch@gmail.com>
KolbyML <https://github.com/KolbyML>
Adnane Taghi <dev@soleuniverse.me>
Spiritual Father <https://github.com/spiritualfather>
Emmanuel Ferdman <https://github.com/emmanuel-ferdman>
Sunong2008 <https://github.com/Sunrongguo2008>
Marvin Kopf <marvinkopf@outlook.com>
Kevin Nakamura <grinkers@grinkers.net>
Bradley Szoke <bradleyszoke@gmail.com>
jcznk <https://github.com/jcznk>
Thomas Rixen <thomas.rixen@student.uclouvain.be>
Siyuan Mattuwu Yan <syan4@ualberta.ca>
Lee Doughty <32392044+leedoughty@users.noreply.github.com>
memchr <memchr@proton.me>
Max Romanowski <maxr777@proton.me>
Aldlss <ayaldlss@gmail.com>
******************** ********************

5692
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,27 +1,29 @@
[workspace.package] [workspace.package]
version = "0.0.0" version = "0.0.0"
authors = ["Ankitects Pty Ltd and contributors <https://help.ankiweb.net>"] authors = ["Ankitects Pty Ltd and contributors <https://help.ankiweb.net>"]
edition = "2021"
license = "AGPL-3.0-or-later" license = "AGPL-3.0-or-later"
rust-version = "1.80" rust-version = "1.65"
edition = "2021"
[workspace] [workspace]
members = [ members = [
"rslib",
"rslib/i18n",
"rslib/linkchecker",
"rslib/proto",
"rslib/io",
"rslib/process",
"rslib/sync",
"pylib/rsbridge",
"build/configure", "build/configure",
"build/ninja_gen", "build/ninja_gen",
"build/runner", "build/runner",
"ftl", "ftl",
"pylib/rsbridge",
"qt/launcher",
"rslib",
"rslib/i18n",
"rslib/io",
"rslib/linkchecker",
"rslib/process",
"rslib/proto",
"rslib/sync",
"tools/minilints", "tools/minilints",
"qt/bundle/win",
"qt/bundle/mac",
] ]
exclude = ["qt/bundle"]
resolver = "2" resolver = "2"
[workspace.dependencies.percent-encoding-iri] [workspace.dependencies.percent-encoding-iri]
@ -33,8 +35,9 @@ git = "https://github.com/ankitects/linkcheck.git"
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca" rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
[workspace.dependencies.fsrs] [workspace.dependencies.fsrs]
version = "5.1.0" version = "0.6.4"
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git" # git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
# rev = "58ca25ed2bc4bb1dc376208bbcaed7f5a501b941"
# path = "../open-spaced-repetition/fsrs-rs" # path = "../open-spaced-repetition/fsrs-rs"
[workspace.dependencies] [workspace.dependencies]
@ -42,8 +45,8 @@ version = "5.1.0"
anki = { path = "rslib" } anki = { path = "rslib" }
anki_i18n = { path = "rslib/i18n" } anki_i18n = { path = "rslib/i18n" }
anki_io = { path = "rslib/io" } anki_io = { path = "rslib/io" }
anki_process = { path = "rslib/process" }
anki_proto = { path = "rslib/proto" } anki_proto = { path = "rslib/proto" }
anki_process = { path = "rslib/process" }
anki_proto_gen = { path = "rslib/proto_gen" } anki_proto_gen = { path = "rslib/proto_gen" }
ninja_gen = { "path" = "build/ninja_gen" } ninja_gen = { "path" = "build/ninja_gen" }
@ -51,101 +54,99 @@ ninja_gen = { "path" = "build/ninja_gen" }
unicase = "=2.6.0" # any changes could invalidate sqlite indexes unicase = "=2.6.0" # any changes could invalidate sqlite indexes
# normal # normal
ammonia = "4.1.0" ammonia = "3.3.0"
anyhow = "1.0.98" anyhow = "1.0.80"
async-compression = { version = "0.4.24", features = ["zstd", "tokio"] } apple-bundles = "0.17.0"
async-stream = "0.3.6" async-compression = { version = "0.4.6", features = ["zstd", "tokio"] }
async-trait = "0.1.88" async-stream = "0.3.5"
axum = { version = "0.8.4", features = ["multipart", "macros"] } async-trait = "0.1.77"
axum-client-ip = "1.1.3" axum = { version = "0.6.20", features = ["multipart", "macros", "headers"] }
axum-extra = { version = "0.10.1", features = ["typed-header"] } axum-client-ip = "0.4.2"
bitflags = "2.9.1" blake3 = "1.5.0"
blake3 = "1.8.2" bytes = "1.5.0"
bytes = "1.10.1" camino = "1.1.6"
camino = "1.1.10" chrono = { version = "0.4.34", default-features = false, features = ["std", "clock"] }
chrono = { version = "0.4.41", default-features = false, features = ["std", "clock"] } clap = { version = "4.3.24", features = ["derive"] }
clap = { version = "4.5.40", features = ["derive"] } coarsetime = "0.1.34"
coarsetime = "0.1.36" convert_case = "0.6.0"
convert_case = "0.8.0" criterion = { version = "0.5.1" }
criterion = { version = "0.6.0" } csv = "1.3.0"
csv = "1.3.1" data-encoding = "2.5.0"
data-encoding = "2.9.0"
difflib = "0.4.0" difflib = "0.4.0"
dirs = "6.0.0" flate2 = "1.0.28"
dunce = "1.0.5" fluent = "0.16.0"
embed-resource = "3.0.4" fluent-bundle = "0.15.2"
envy = "0.4.2" fluent-syntax = "0.11.0"
flate2 = "1.1.2"
fluent = "0.17.0"
fluent-bundle = "0.16.0"
fluent-syntax = "0.12.0"
fnv = "1.0.7" fnv = "1.0.7"
futures = "0.3.31" futures = "0.3.30"
globset = "0.4.16" glob = "0.3.1"
globset = "0.4.14"
hex = "0.4.3" hex = "0.4.3"
htmlescape = "0.3.1" htmlescape = "0.3.1"
hyper = "1" hyper = "0.14.28"
id_tree = "1.8.0" id_tree = "1.8.0"
inflections = "1.1.1" inflections = "1.1.1"
intl-memoizer = "0.5.3" intl-memoizer = "0.5.1"
itertools = "0.14.0" itertools = "0.12.1"
junction = "1.2.0" junction = "1.0.0"
libc = "0.2" lazy_static = "1.4.0"
libc-stdhandle = "0.1"
maplit = "1.0.2" maplit = "1.0.2"
nom = "8.0.0" nom = "7.1.3"
num-format = "0.4.4" num-format = "0.4.4"
num_cpus = "1.17.0" num_cpus = "1.16.0"
num_enum = "0.7.3" num_enum = "0.7.2"
once_cell = "1.21.3" once_cell = "1.19.0"
pbkdf2 = { version = "0.12", features = ["simple"] } pbkdf2 = { version = "0.12", features = ["simple"] }
phf = { version = "0.11.3", features = ["macros"] } phf = { version = "0.11.2", features = ["macros"] }
pin-project = "1.1.10" pin-project = "1.1.4"
prettyplease = "0.2.34" plist = "1.5.1"
prost = "0.13" prettyplease = "0.2.16"
prost-build = "0.13" prost = "0.12.3"
prost-reflect = "0.14.7" prost-build = "0.12.3"
prost-types = "0.13" prost-reflect = "0.12.0"
pulldown-cmark = "0.13.0" prost-types = "0.12.3"
pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] } pulldown-cmark = "0.9.6"
rand = "0.9.1" pyo3 = { version = "0.20.3", features = ["extension-module", "abi3", "abi3-py39"] }
rayon = "1.10.0" rand = "0.8.5"
regex = "1.11.1" regex = "1.10.3"
reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] } reqwest = { version = "0.11.24", default-features = false, features = ["json", "socks", "stream", "multipart"] }
rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] } rusqlite = { version = "0.30.0", features = ["trace", "functions", "collation", "bundled"] }
rustls-pemfile = "2.2.0" rustls-pemfile = "2.1.2"
scopeguard = "1.2.0" scopeguard = "1.2.0"
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1.0.197", features = ["derive"] }
serde-aux = "4.7.0" serde-aux = "4.5.0"
serde_json = "1.0.140" serde_json = "1.0.114"
serde_repr = "0.1.20" serde_repr = "0.1.18"
serde_tuple = "1.1.0" serde_tuple = "0.5.0"
sha1 = "0.10.6" sha1 = "0.10.6"
sha2 = { version = "0.10.9" } sha2 = { version = "0.10.8" }
snafu = { version = "0.8.6", features = ["rust_1_61"] } simple-file-manifest = "0.11.0"
strum = { version = "0.27.1", features = ["derive"] } snafu = { version = "0.8.1", features = ["rust_1_61"] }
syn = { version = "2.0.103", features = ["parsing", "printing"] } strum = { version = "0.26.1", features = ["derive"] }
tar = "0.4.44" syn = { version = "2.0.51", features = ["parsing", "printing"] }
tempfile = "3.20.0" tar = "0.4.40"
tempfile = "3.10.1"
termcolor = "1.4.1" termcolor = "1.4.1"
tokio = { version = "1.45", features = ["fs", "rt-multi-thread", "macros", "signal"] } tokio = { version = "1.36", features = ["fs", "rt-multi-thread", "macros", "signal"] }
tokio-util = { version = "0.7.15", features = ["io"] } tokio-util = { version = "0.7.10", features = ["io"] }
tower-http = { version = "0.6.6", features = ["trace"] } tower-http = { version = "0.4.4", features = ["trace"] }
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] } tracing = { version = "0.1.40", features = ["max_level_trace", "release_max_level_debug"] }
tracing-appender = "0.2.3" tracing-appender = "0.2.3"
tracing-subscriber = { version = "0.3.20", features = ["fmt", "env-filter"] } tracing-subscriber = { version = "0.3.18", features = ["fmt", "env-filter"] }
unic-langid = { version = "0.9.6", features = ["macros"] } tugger-windows-codesign = "0.10.0"
unic-langid = { version = "0.9.4", features = ["macros"] }
unic-ucd-category = "0.9.0" unic-ucd-category = "0.9.0"
unicode-normalization = "0.1.24" unicode-normalization = "0.1.23"
walkdir = "2.5.0" utime = "0.3.1"
which = "8.0.0" walkdir = "2.4.0"
widestring = "1.1.0" which = "5.0.0"
winapi = { version = "0.3", features = ["wincon", "winreg"] } wiremock = "0.5.22"
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_Foundation", "Win32_UI_Shell", "Wdk_System_SystemServices"] }
wiremock = "0.6.3"
xz2 = "0.1.7" xz2 = "0.1.7"
zip = { version = "4.1.0", default-features = false, features = ["deflate", "time"] } zip = { version = "0.6.6", default-features = false, features = ["deflate", "time"] }
zstd = { version = "0.13.3", features = ["zstdmt"] } zstd = { version = "0.13.0", features = ["zstdmt"] }
envy = "0.4.2"
dirs = "5.0.1"
dunce = "1.0.4"
# Apply mild optimizations to our dependencies in dev mode, which among other things # Apply mild optimizations to our dependencies in dev mode, which among other things
# improves sha2 performance by about 21x. Opt 1 chosen due to # improves sha2 performance by about 21x. Opt 1 chosen due to

View file

@ -6,6 +6,8 @@ The following included source code items use a license other than AGPL3:
In the pylib folder: In the pylib folder:
* The SuperMemo importer: GPL3 and 0BSD.
* The Pauker importer: BSD-3.
* statsbg.py: CC BY 4.0. * statsbg.py: CC BY 4.0.
In the qt folder: In the qt folder:

View file

@ -1,4 +1,4 @@
# Anki® # Anki
[![Build status](https://badge.buildkite.com/c9edf020a4aec976f9835e54751cc5409d843adbb66d043bd3.svg?branch=main)](https://buildkite.com/ankitects/anki-ci) [![Build status](https://badge.buildkite.com/c9edf020a4aec976f9835e54751cc5409d843adbb66d043bd3.svg?branch=main)](https://buildkite.com/ankitects/anki-ci)

View file

@ -27,6 +27,7 @@ pub fn build_and_check_aqt(build: &mut Build) -> Result<()> {
build_forms(build)?; build_forms(build)?;
build_generated_sources(build)?; build_generated_sources(build)?;
build_data_folder(build)?; build_data_folder(build)?;
build_macos_helper(build)?;
build_wheel(build)?; build_wheel(build)?;
check_python(build)?; check_python(build)?;
Ok(()) Ok(())
@ -38,6 +39,7 @@ fn build_forms(build: &mut Build) -> Result<()> {
let mut py_files = vec![]; let mut py_files = vec![];
for path in ui_files.resolve() { for path in ui_files.resolve() {
let outpath = outdir.join(path.file_name().unwrap()).into_string(); let outpath = outdir.join(path.file_name().unwrap()).into_string();
py_files.push(outpath.replace(".ui", "_qt5.py"));
py_files.push(outpath.replace(".ui", "_qt6.py")); py_files.push(outpath.replace(".ui", "_qt6.py"));
} }
build.add_action( build.add_action(
@ -123,7 +125,7 @@ fn copy_sveltekit(build: &mut Build) -> Result<()> {
inputs: inputs![":sveltekit:folder"], inputs: inputs![":sveltekit:folder"],
target_folder: "qt/_aqt/data/web/", target_folder: "qt/_aqt/data/web/",
strip_prefix: "$builddir/", strip_prefix: "$builddir/",
extra_args: "-a --delete", extra_args: "-a",
}, },
) )
} }
@ -335,25 +337,47 @@ impl BuildAction for BuildThemedIcon<'_> {
} }
} }
fn build_macos_helper(build: &mut Build) -> Result<()> {
if cfg!(target_os = "macos") {
build.add_action(
"qt:aqt:data:lib:libankihelper",
RunCommand {
command: ":pyenv:bin",
args: "$script $out $in",
inputs: hashmap! {
"script" => inputs!["qt/mac/helper_build.py"],
"in" => inputs![glob!["qt/mac/*.swift"]],
"" => inputs!["out/env"],
},
outputs: hashmap! {
"out" => vec!["qt/_aqt/data/lib/libankihelper.dylib"],
},
},
)?;
}
Ok(())
}
fn build_wheel(build: &mut Build) -> Result<()> { fn build_wheel(build: &mut Build) -> Result<()> {
build.add_action( build.add_action(
"wheels:aqt", "wheels:aqt",
BuildWheel { BuildWheel {
name: "aqt", name: "aqt",
version: anki_version(), version: anki_version(),
src_folder: "qt/aqt",
gen_folder: "$builddir/qt/_aqt",
platform: None, platform: None,
deps: inputs![ deps: inputs![":qt:aqt", glob!("qt/aqt/**"), "python/requirements.aqt.in"],
":qt:aqt",
glob!("qt/aqt/**"),
"qt/pyproject.toml",
"qt/hatch_build.py"
],
}, },
) )
} }
fn check_python(build: &mut Build) -> Result<()> { fn check_python(build: &mut Build) -> Result<()> {
python_format(build, "qt", inputs![glob!("qt/**/*.py")])?; python_format(
build,
"qt",
inputs![glob!("qt/**/*.py", "qt/bundle/PyOxidizer/**")],
)?;
build.add_action( build.add_action(
"check:pytest:aqt", "check:pytest:aqt",

View file

@ -0,0 +1,505 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::env;
use anyhow::Result;
use ninja_gen::action::BuildAction;
use ninja_gen::archives::download_and_extract;
use ninja_gen::archives::empty_manifest;
use ninja_gen::archives::with_exe;
use ninja_gen::archives::OnlineArchive;
use ninja_gen::archives::Platform;
use ninja_gen::build::BuildProfile;
use ninja_gen::cargo::CargoBuild;
use ninja_gen::cargo::RustOutput;
use ninja_gen::git::SyncSubmodule;
use ninja_gen::glob;
use ninja_gen::input::BuildInput;
use ninja_gen::inputs;
use ninja_gen::python::PythonEnvironment;
use ninja_gen::Build;
use ninja_gen::Utf8Path;
use crate::anki_version;
use crate::platform::overriden_python_target_platform;
use crate::platform::overriden_rust_target_triple;
#[derive(Debug, PartialEq, Eq)]
enum DistKind {
Standard,
Alternate,
}
impl DistKind {
fn folder_name(&self) -> &'static str {
match self {
DistKind::Standard => "std",
DistKind::Alternate => "alt",
}
}
fn name(&self) -> &'static str {
match self {
DistKind::Standard => "standard",
DistKind::Alternate => "alternate",
}
}
}
pub fn build_bundle(build: &mut Build) -> Result<()> {
// install into venv
setup_primary_venv(build)?;
install_anki_wheels(build)?;
// bundle venv into output binary + extra_files
build_pyoxidizer(build)?;
build_artifacts(build)?;
build_binary(build)?;
// package up outputs with Qt/other deps
download_dist_folder_deps(build)?;
build_dist_folder(build, DistKind::Standard)?;
// repeat for Qt5
if !targetting_macos_arm() {
if !cfg!(target_os = "macos") {
setup_qt5_venv(build)?;
}
build_dist_folder(build, DistKind::Alternate)?;
}
build_packages(build)?;
Ok(())
}
fn targetting_macos_arm() -> bool {
cfg!(all(target_os = "macos", target_arch = "aarch64"))
&& overriden_python_target_platform().is_none()
}
const WIN_AUDIO: OnlineArchive = OnlineArchive {
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/audio-win-amd64.tar.gz",
sha256: "0815a601baba05e03bc36b568cdc2332b1cf4aa17125fc33c69de125f8dd687f",
};
const MAC_ARM_AUDIO: OnlineArchive = OnlineArchive {
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-05-26/audio-mac-arm64.tar.gz",
sha256: "f6c4af9be59ae1c82a16f5c6307f13cbf31b49ad7b69ce1cb6e0e7b403cfdb8f",
};
const MAC_AMD_AUDIO: OnlineArchive = OnlineArchive {
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-05-26/audio-mac-amd64.tar.gz",
sha256: "ecbb3c878805cdd58b1a0b8e3fd8c753b8ce3ad36c8b5904a79111f9db29ff42",
};
const MAC_ARM_QT6: OnlineArchive = OnlineArchive {
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2024-02-29/pyqt6.6-mac-arm64.tar.zst",
sha256: "9b2ade4ae9b80506689062845e83e8c60f7fa9843545bf7bb2d11d3e2f105878",
};
const MAC_AMD_QT6: OnlineArchive = OnlineArchive {
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2024-02-29/pyqt6.6-mac-amd64.tar.zst",
sha256: "dbd0871e4da22820d1fa9ab29220d631467d1178038dcab4b15169ad7f499b1b",
};
const MAC_AMD_QT5: OnlineArchive = OnlineArchive {
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/pyqt5.14-mac-amd64.tar.gz",
sha256: "474951bed79ddb9570ee4c5a6079041772551ea77e77171d9e33d6f5e7877ec1",
};
const LINUX_QT_PLUGINS: OnlineArchive = OnlineArchive {
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-02/qt-plugins-linux-amd64.tar.gz",
sha256: "66bb568aca7242bc55ad419bf5c96755ca15d2a743e1c3a09cba8b83230b138b",
};
const NSIS_PLUGINS: OnlineArchive = OnlineArchive {
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-19/nsis.tar.zst",
sha256: "6133f730ece699de19714d0479c73bc848647d277e9cc80dda9b9ebe532b40a8",
};
fn download_dist_folder_deps(build: &mut Build) -> Result<()> {
let mut bundle_deps = vec![":wheels"];
if cfg!(windows) {
download_and_extract(build, "win_amd64_audio", WIN_AUDIO, empty_manifest())?;
download_and_extract(build, "nsis_plugins", NSIS_PLUGINS, empty_manifest())?;
bundle_deps.extend([":extract:win_amd64_audio", ":extract:nsis_plugins"]);
} else if cfg!(target_os = "macos") {
if targetting_macos_arm() {
download_and_extract(build, "mac_arm_audio", MAC_ARM_AUDIO, empty_manifest())?;
download_and_extract(build, "mac_arm_qt6", MAC_ARM_QT6, empty_manifest())?;
bundle_deps.extend([":extract:mac_arm_audio", ":extract:mac_arm_qt6"]);
} else {
download_and_extract(build, "mac_amd_audio", MAC_AMD_AUDIO, empty_manifest())?;
download_and_extract(build, "mac_amd_qt6", MAC_AMD_QT6, empty_manifest())?;
download_and_extract(build, "mac_amd_qt5", MAC_AMD_QT5, empty_manifest())?;
bundle_deps.extend([
":extract:mac_amd_audio",
":extract:mac_amd_qt6",
":extract:mac_amd_qt5",
]);
}
} else {
download_and_extract(
build,
"linux_qt_plugins",
LINUX_QT_PLUGINS,
empty_manifest(),
)?;
bundle_deps.extend([":extract:linux_qt_plugins"]);
}
build.add_dependency(
"bundle:deps",
inputs![bundle_deps
.iter()
.map(ToString::to_string)
.collect::<Vec<_>>()],
);
Ok(())
}
struct Venv {
label: &'static str,
path_without_builddir: &'static str,
}
impl Venv {
fn label_as_target(&self, suffix: &str) -> String {
format!(":{}{suffix}", self.label)
}
}
const PRIMARY_VENV: Venv = Venv {
label: "bundle:pyenv",
path_without_builddir: "bundle/pyenv",
};
/// Only used for copying Qt libs on Windows/Linux.
const QT5_VENV: Venv = Venv {
label: "bundle:pyenv-qt5",
path_without_builddir: "bundle/pyenv-qt5",
};
fn setup_primary_venv(build: &mut Build) -> Result<()> {
let mut qt6_reqs = inputs![
"python/requirements.bundle.txt",
if cfg!(windows) {
"python/requirements.qt6_win.txt"
} else if cfg!(target_os = "macos") {
"python/requirements.qt6_mac.txt"
} else {
"python/requirements.qt6_lin.txt"
}
];
if cfg!(windows) {
qt6_reqs = inputs![qt6_reqs, "python/requirements.win.txt"];
}
build.add_action(
PRIMARY_VENV.label,
PythonEnvironment {
folder: PRIMARY_VENV.path_without_builddir,
base_requirements_txt: "python/requirements.base.txt".into(),
requirements_txt: qt6_reqs,
extra_binary_exports: &[],
},
)?;
Ok(())
}
fn setup_qt5_venv(build: &mut Build) -> Result<()> {
let qt5_reqs = inputs![
"python/requirements.base.txt",
if cfg!(target_os = "macos") {
"python/requirements.qt5_14.txt"
} else {
"python/requirements.qt5_15.txt"
}
];
build.add_action(
QT5_VENV.label,
PythonEnvironment {
folder: QT5_VENV.path_without_builddir,
base_requirements_txt: "python/requirements.base.txt".into(),
requirements_txt: qt5_reqs,
extra_binary_exports: &[],
},
)
}
struct InstallAnkiWheels {
venv: Venv,
}
impl BuildAction for InstallAnkiWheels {
fn command(&self) -> &str {
"$pip install --force-reinstall --no-deps $in"
}
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
build.add_inputs("pip", inputs![self.venv.label_as_target(":pip")]);
build.add_inputs("in", inputs![":wheels"]);
build.add_output_stamp("bundle/wheels.stamp");
}
}
fn install_anki_wheels(build: &mut Build) -> Result<()> {
build.add_action(
"bundle:add_wheels:qt6",
InstallAnkiWheels { venv: PRIMARY_VENV },
)?;
Ok(())
}
fn build_pyoxidizer(build: &mut Build) -> Result<()> {
let offline_build = env::var("OFFLINE_BUILD").is_ok();
build.add_action(
"bundle:pyoxidizer:repo",
SyncSubmodule {
path: "qt/bundle/PyOxidizer",
offline_build,
},
)?;
build.add_action(
"bundle:pyoxidizer:bin",
CargoBuild {
inputs: inputs![":bundle:pyoxidizer:repo", glob!["qt/bundle/PyOxidizer/**"]],
// can't use ::Binary() here, as we're in a separate workspace
outputs: &[RustOutput::Data(
"bin",
&with_exe("bundle/rust/release/pyoxidizer"),
)],
target: None,
extra_args: &format!(
"--manifest-path={} --target-dir={} -p pyoxidizer",
"qt/bundle/PyOxidizer/Cargo.toml", "$builddir/bundle/rust"
),
release_override: Some(BuildProfile::Release),
},
)?;
Ok(())
}
struct BuildArtifacts {}
impl BuildAction for BuildArtifacts {
fn command(&self) -> &str {
"$runner build-artifacts $bundle_root $pyoxidizer_bin"
}
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
build.add_inputs("pyoxidizer_bin", inputs![":bundle:pyoxidizer:bin"]);
build.add_inputs("", inputs![PRIMARY_VENV.label_as_target("")]);
build.add_inputs("", inputs![":bundle:add_wheels:qt6", glob!["qt/bundle/**"]]);
build.add_variable("bundle_root", "$builddir/bundle");
build.add_outputs_ext(
"pyo3_config",
vec!["bundle/artifacts/pyo3-build-config-file.txt"],
true,
);
}
fn check_output_timestamps(&self) -> bool {
true
}
}
fn build_artifacts(build: &mut Build) -> Result<()> {
build.add_action("bundle:artifacts", BuildArtifacts {})
}
struct BuildBundle {}
impl BuildAction for BuildBundle {
fn command(&self) -> &str {
"$runner build-bundle-binary"
}
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
build.add_inputs("", inputs![":bundle:artifacts", glob!["qt/bundle/**"]]);
build.add_outputs(
"",
vec![RustOutput::Binary("anki").path(
Utf8Path::new("$builddir/bundle/rust"),
Some(
overriden_rust_target_triple()
.unwrap_or_else(|| Platform::current().as_rust_triple()),
),
// our pyoxidizer bin uses lto on the release profile
BuildProfile::Release,
)],
);
}
}
fn build_binary(build: &mut Build) -> Result<()> {
build.add_action("bundle:binary", BuildBundle {})
}
struct BuildDistFolder {
kind: DistKind,
deps: BuildInput,
}
impl BuildAction for BuildDistFolder {
fn command(&self) -> &str {
"$runner build-dist-folder $kind $out_folder "
}
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
build.add_inputs("", &self.deps);
build.add_variable("kind", self.kind.name());
let folder = match self.kind {
DistKind::Standard => "bundle/std",
DistKind::Alternate => "bundle/alt",
};
build.add_outputs("out_folder", vec![folder]);
build.add_outputs("stamp", vec![format!("{folder}.stamp")]);
}
fn check_output_timestamps(&self) -> bool {
true
}
}
fn build_dist_folder(build: &mut Build, kind: DistKind) -> Result<()> {
let mut deps = inputs![":bundle:deps", ":bundle:binary", glob!["qt/bundle/**"]];
if kind == DistKind::Alternate && !cfg!(target_os = "macos") {
deps = inputs![deps, QT5_VENV.label_as_target("")];
}
let group = match kind {
DistKind::Standard => "bundle:folder:std",
DistKind::Alternate => "bundle:folder:alt",
};
build.add_action(group, BuildDistFolder { kind, deps })
}
fn build_packages(build: &mut Build) -> Result<()> {
if cfg!(windows) {
build_windows_installers(build)
} else if cfg!(target_os = "macos") {
build_mac_app(build, DistKind::Standard)?;
if !targetting_macos_arm() {
build_mac_app(build, DistKind::Alternate)?;
}
build_dmgs(build)
} else {
build_tarball(build, DistKind::Standard)?;
build_tarball(build, DistKind::Alternate)
}
}
struct BuildTarball {
kind: DistKind,
}
impl BuildAction for BuildTarball {
fn command(&self) -> &str {
"chmod -R a+r $folder && tar -I '$zstd' --transform $transform -cf $tarball -C $folder ."
}
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
let input_folder_name = self.kind.folder_name();
let input_folder_target = format!(":bundle:folder:{input_folder_name}");
let input_folder_path = format!("$builddir/bundle/{input_folder_name}");
let version = anki_version();
let qt = match self.kind {
DistKind::Standard => "qt6",
DistKind::Alternate => "qt5",
};
let output_folder_base = format!("anki-{version}-linux-{qt}");
let output_tarball = format!("bundle/package/{output_folder_base}.tar.zst");
build.add_inputs("", inputs![input_folder_target]);
build.add_variable("zstd", "zstd -c --long -T0 -18");
build.add_variable("transform", format!("s%^.%{output_folder_base}%S"));
build.add_variable("folder", input_folder_path);
build.add_outputs("tarball", vec![output_tarball]);
}
}
fn build_tarball(build: &mut Build, kind: DistKind) -> Result<()> {
let name = kind.folder_name();
build.add_action(format!("bundle:package:{name}"), BuildTarball { kind })
}
struct BuildWindowsInstallers {}
impl BuildAction for BuildWindowsInstallers {
fn command(&self) -> &str {
"cargo run -p makeexe --target-dir=out/rust -- $version $src_root $bundle_root $out"
}
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
let version = anki_version();
let outputs = ["qt6", "qt5"].iter().map(|qt| {
let output_base = format!("anki-{version}-windows-{qt}");
format!("bundle/package/{output_base}.exe")
});
build.add_inputs("", inputs![":bundle:folder:std", ":bundle:folder:alt"]);
build.add_variable("version", &version);
build.add_variable("bundle_root", "$builddir/bundle");
build.add_outputs("out", outputs);
}
}
fn build_windows_installers(build: &mut Build) -> Result<()> {
build.add_action("bundle:package", BuildWindowsInstallers {})
}
struct BuildMacApp {
kind: DistKind,
}
impl BuildAction for BuildMacApp {
fn command(&self) -> &str {
"cargo run -p makeapp --target-dir=out/rust -- build-app $version $kind $stamp"
}
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
let folder_name = self.kind.folder_name();
build.add_inputs("", inputs![format!(":bundle:folder:{folder_name}")]);
build.add_variable("version", anki_version());
build.add_variable("kind", self.kind.name());
build.add_outputs("stamp", vec![format!("bundle/app/{folder_name}.stamp")]);
}
}
fn build_mac_app(build: &mut Build, kind: DistKind) -> Result<()> {
build.add_action(format!("bundle:app:{}", kind.name()), BuildMacApp { kind })
}
struct BuildDmgs {}
impl BuildAction for BuildDmgs {
fn command(&self) -> &str {
"cargo run -p makeapp --target-dir=out/rust -- build-dmgs $dmgs"
}
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
let version = anki_version();
let platform = if targetting_macos_arm() {
"apple"
} else {
"intel"
};
let qt = if targetting_macos_arm() {
&["qt6"][..]
} else {
&["qt6", "qt5"]
};
let dmgs = qt
.iter()
.map(|qt| format!("bundle/dmg/anki-{version}-mac-{platform}-{qt}.dmg"));
build.add_inputs("", inputs![":bundle:app"]);
build.add_outputs("dmgs", dmgs);
}
}
fn build_dmgs(build: &mut Build) -> Result<()> {
build.add_action("bundle:dmg", BuildDmgs {})
}

View file

@ -1,44 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anyhow::Result;
use ninja_gen::archives::download_and_extract;
use ninja_gen::archives::empty_manifest;
use ninja_gen::archives::OnlineArchive;
use ninja_gen::command::RunCommand;
use ninja_gen::hashmap;
use ninja_gen::inputs;
use ninja_gen::Build;
pub fn setup_uv_universal(build: &mut Build) -> Result<()> {
if !cfg!(target_arch = "aarch64") {
return Ok(());
}
build.add_action(
"launcher:uv_universal",
RunCommand {
command: "/usr/bin/lipo",
args: "-create -output $out $arm_bin $x86_bin",
inputs: hashmap! {
"arm_bin" => inputs![":extract:uv:bin"],
"x86_bin" => inputs![":extract:uv_mac_x86:bin"],
},
outputs: hashmap! {
"out" => vec!["launcher/uv"],
},
},
)
}
pub fn build_launcher(build: &mut Build) -> Result<()> {
setup_uv_universal(build)?;
download_and_extract(build, "nsis_plugins", NSIS_PLUGINS, empty_manifest())?;
Ok(())
}
const NSIS_PLUGINS: OnlineArchive = OnlineArchive {
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-19/nsis.tar.zst",
sha256: "6133f730ece699de19714d0479c73bc848647d277e9cc80dda9b9ebe532b40a8",
};

View file

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
mod aqt; mod aqt;
mod launcher; mod bundle;
mod platform; mod platform;
mod pylib; mod pylib;
mod python; mod python;
@ -13,14 +13,13 @@ use std::env;
use anyhow::Result; use anyhow::Result;
use aqt::build_and_check_aqt; use aqt::build_and_check_aqt;
use launcher::build_launcher; use bundle::build_bundle;
use ninja_gen::glob; use ninja_gen::glob;
use ninja_gen::inputs; use ninja_gen::inputs;
use ninja_gen::protobuf::check_proto; use ninja_gen::protobuf::check_proto;
use ninja_gen::protobuf::setup_protoc; use ninja_gen::protobuf::setup_protoc;
use ninja_gen::python::setup_uv; use ninja_gen::python::setup_python;
use ninja_gen::Build; use ninja_gen::Build;
use platform::overriden_python_venv_platform;
use pylib::build_pylib; use pylib::build_pylib;
use pylib::check_pylib; use pylib::check_pylib;
use python::check_python; use python::check_python;
@ -48,10 +47,7 @@ fn main() -> Result<()> {
check_proto(build, inputs![glob!["proto/**/*.proto"]])?; check_proto(build, inputs![glob!["proto/**/*.proto"]])?;
if env::var("OFFLINE_BUILD").is_err() { if env::var("OFFLINE_BUILD").is_err() {
setup_uv( setup_python(build)?;
build,
overriden_python_venv_platform().unwrap_or(build.host_platform),
)?;
} }
setup_venv(build)?; setup_venv(build)?;
@ -61,7 +57,7 @@ fn main() -> Result<()> {
build_and_check_aqt(build)?; build_and_check_aqt(build)?;
if env::var("OFFLINE_BUILD").is_err() { if env::var("OFFLINE_BUILD").is_err() {
build_launcher(build)?; build_bundle(build)?;
} }
setup_sphinx(build)?; setup_sphinx(build)?;

View file

@ -5,30 +5,18 @@ use std::env;
use ninja_gen::archives::Platform; use ninja_gen::archives::Platform;
/// Please see [`overriden_python_target_platform()`] for details. /// Usually None to use the host architecture; can be overriden by setting
/// MAC_X86 to build for x86_64 on Apple Silicon
pub fn overriden_rust_target_triple() -> Option<&'static str> { pub fn overriden_rust_target_triple() -> Option<&'static str> {
overriden_python_wheel_platform().map(|p| p.as_rust_triple()) overriden_python_target_platform().map(|p| p.as_rust_triple())
} }
/// Usually None to use the host architecture, except on Windows which /// Usually None to use the host architecture; can be overriden by setting
/// always uses x86_64, since WebEngine is unavailable for ARM64. /// MAC_X86 to build for x86_64 on Apple Silicon
pub fn overriden_python_venv_platform() -> Option<Platform> { pub fn overriden_python_target_platform() -> Option<Platform> {
if cfg!(target_os = "windows") { if env::var("MAC_X86").is_ok() {
Some(Platform::WindowsX64) Some(Platform::MacX64)
} else { } else {
None None
} }
} }
/// Like [`overriden_python_venv_platform`], but:
/// If MAC_X86 is set, an X86 wheel will be built on macOS ARM.
/// If LIN_ARM64 is set, an ARM64 wheel will be built on Linux AMD64.
pub fn overriden_python_wheel_platform() -> Option<Platform> {
if env::var("MAC_X86").is_ok() {
Some(Platform::MacX64)
} else if env::var("LIN_ARM64").is_ok() {
Some(Platform::LinuxArm)
} else {
overriden_python_venv_platform()
}
}

View file

@ -14,7 +14,7 @@ use ninja_gen::python::PythonTest;
use ninja_gen::Build; use ninja_gen::Build;
use crate::anki_version; use crate::anki_version;
use crate::platform::overriden_python_wheel_platform; use crate::platform::overriden_python_target_platform;
use crate::python::BuildWheel; use crate::python::BuildWheel;
use crate::python::GenPythonProto; use crate::python::GenPythonProto;
@ -50,7 +50,7 @@ pub fn build_pylib(build: &mut Build) -> Result<()> {
output: &format!( output: &format!(
"pylib/anki/_rsbridge.{}", "pylib/anki/_rsbridge.{}",
match build.host_platform { match build.host_platform {
Platform::WindowsX64 | Platform::WindowsArm => "pyd", Platform::WindowsX64 => "pyd",
_ => "so", _ => "so",
} }
), ),
@ -64,12 +64,13 @@ pub fn build_pylib(build: &mut Build) -> Result<()> {
BuildWheel { BuildWheel {
name: "anki", name: "anki",
version: anki_version(), version: anki_version(),
platform: overriden_python_wheel_platform().or(Some(build.host_platform)), src_folder: "pylib/anki",
gen_folder: "$builddir/pylib/anki",
platform: overriden_python_target_platform().or(Some(build.host_platform)),
deps: inputs![ deps: inputs![
":pylib:anki", ":pylib:anki",
glob!("pylib/anki/**"), glob!("pylib/anki/**"),
"pylib/pyproject.toml", "python/requirements.anki.in",
"pylib/hatch_build.py"
], ],
}, },
)?; )?;

View file

@ -1,73 +1,82 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::env;
use anyhow::Result; use anyhow::Result;
use ninja_gen::action::BuildAction; use ninja_gen::action::BuildAction;
use ninja_gen::archives::Platform; use ninja_gen::archives::Platform;
use ninja_gen::build::FilesHandle; use ninja_gen::build::FilesHandle;
use ninja_gen::command::RunCommand;
use ninja_gen::copy::CopyFiles; use ninja_gen::copy::CopyFiles;
use ninja_gen::glob; use ninja_gen::glob;
use ninja_gen::hashmap;
use ninja_gen::input::BuildInput; use ninja_gen::input::BuildInput;
use ninja_gen::inputs; use ninja_gen::inputs;
use ninja_gen::python::python_format; use ninja_gen::python::python_format;
use ninja_gen::python::PythonEnvironment; use ninja_gen::python::PythonEnvironment;
use ninja_gen::python::PythonLint;
use ninja_gen::python::PythonTypecheck; use ninja_gen::python::PythonTypecheck;
use ninja_gen::python::RuffCheck; use ninja_gen::rsync::RsyncFiles;
use ninja_gen::Build; use ninja_gen::Build;
/// Normalize version string by removing leading zeros from numeric parts
/// while preserving pre-release markers (b1, rc2, a3, etc.)
fn normalize_version(version: &str) -> String {
version
.split('.')
.map(|part| {
// Check if the part contains only digits
if part.chars().all(|c| c.is_ascii_digit()) {
// Numeric part: remove leading zeros
part.parse::<u32>().unwrap_or(0).to_string()
} else {
// Mixed part (contains both numbers and pre-release markers)
// Split on first non-digit character and normalize the numeric prefix
let chars = part.chars();
let mut numeric_prefix = String::new();
let mut rest = String::new();
let mut found_non_digit = false;
for ch in chars {
if ch.is_ascii_digit() && !found_non_digit {
numeric_prefix.push(ch);
} else {
found_non_digit = true;
rest.push(ch);
}
}
if numeric_prefix.is_empty() {
part.to_string()
} else {
let normalized_prefix = numeric_prefix.parse::<u32>().unwrap_or(0).to_string();
format!("{normalized_prefix}{rest}")
}
}
})
.collect::<Vec<_>>()
.join(".")
}
pub fn setup_venv(build: &mut Build) -> Result<()> { pub fn setup_venv(build: &mut Build) -> Result<()> {
let extra_binary_exports = &["mypy", "ruff", "pytest", "protoc-gen-mypy"]; let platform_deps = if cfg!(windows) {
inputs![
"python/requirements.qt6_win.txt",
"python/requirements.win.txt",
]
} else if cfg!(target_os = "macos") {
inputs!["python/requirements.qt6_mac.txt",]
} else if cfg!(all(target_os = "linux", target_arch = "aarch64")) {
// system-provided Qt on ARM64
inputs![]
} else {
// normal linux
inputs!["python/requirements.qt6_lin.txt"]
};
let requirements_txt = inputs!["python/requirements.dev.txt", platform_deps];
build.add_action( build.add_action(
"pyenv", "pyenv",
PythonEnvironment { PythonEnvironment {
venv_folder: "pyenv", folder: "pyenv",
deps: inputs![ base_requirements_txt: inputs!["python/requirements.base.txt"],
"pyproject.toml", requirements_txt,
"pylib/pyproject.toml", extra_binary_exports: &[
"qt/pyproject.toml", "pip-compile",
"uv.lock" "pip-sync",
"mypy",
"black", // Required for offline build
"isort",
"pylint",
"pytest",
"protoc-gen-mypy", // ditto
], ],
extra_args: "--all-packages --extra qt --extra audio", },
extra_binary_exports, )?;
// optional venvs for testing with Qt5
let mut reqs_qt5 = inputs!["python/requirements.bundle.txt"];
if cfg!(windows) {
reqs_qt5 = inputs![reqs_qt5, "python/requirements.win.txt"];
}
build.add_action(
"pyenv-qt5.15",
PythonEnvironment {
folder: "pyenv-qt5.15",
base_requirements_txt: inputs!["python/requirements.base.txt"],
requirements_txt: inputs![&reqs_qt5, "python/requirements.qt5_15.txt"],
extra_binary_exports: &[],
},
)?;
build.add_action(
"pyenv-qt5.14",
PythonEnvironment {
folder: "pyenv-qt5.14",
base_requirements_txt: inputs!["python/requirements.base.txt"],
requirements_txt: inputs![reqs_qt5, "python/requirements.qt5_14.txt"],
extra_binary_exports: &[],
}, },
)?; )?;
@ -105,7 +114,7 @@ impl BuildAction for GenPythonProto {
build.add_outputs("", python_outputs); build.add_outputs("", python_outputs);
} }
fn hide_progress(&self) -> bool { fn hide_last_line(&self) -> bool {
true true
} }
} }
@ -113,66 +122,45 @@ impl BuildAction for GenPythonProto {
pub struct BuildWheel { pub struct BuildWheel {
pub name: &'static str, pub name: &'static str,
pub version: String, pub version: String,
pub src_folder: &'static str,
pub gen_folder: &'static str,
pub platform: Option<Platform>, pub platform: Option<Platform>,
pub deps: BuildInput, pub deps: BuildInput,
} }
impl BuildAction for BuildWheel { impl BuildAction for BuildWheel {
fn command(&self) -> &str { fn command(&self) -> &str {
"$uv build --wheel --out-dir=$out_dir --project=$project_dir" "$pyenv_bin $script $src $gen $out"
} }
fn files(&mut self, build: &mut impl FilesHandle) { fn files(&mut self, build: &mut impl FilesHandle) {
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") { build.add_inputs("pyenv_bin", inputs![":pyenv:bin"]);
let uv_path = build.add_inputs("script", inputs!["python/write_wheel.py"]);
std::env::var("UV_BINARY").expect("UV_BINARY must be set in OFFLINE_BUILD mode");
build.add_inputs("uv", inputs![uv_path]);
} else {
build.add_inputs("uv", inputs![":uv_binary"]);
}
build.add_inputs("", &self.deps); build.add_inputs("", &self.deps);
build.add_variable("src", self.src_folder);
build.add_variable("gen", self.gen_folder);
// Set the project directory based on which package we're building
let project_dir = if self.name == "anki" { "pylib" } else { "qt" };
build.add_variable("project_dir", project_dir);
// Set environment variable for uv to use our pyenv
build.add_variable("pyenv_path", "$builddir/pyenv");
build.add_env_var("UV_PROJECT_ENVIRONMENT", "$pyenv_path");
// Set output directory
build.add_variable("out_dir", "$builddir/wheels/");
// Calculate the wheel filename that uv will generate
let tag = if let Some(platform) = self.platform { let tag = if let Some(platform) = self.platform {
let platform_tag = match platform { let platform = match platform {
Platform::LinuxX64 => "manylinux_2_36_x86_64", Platform::LinuxX64 => "manylinux_2_28_x86_64",
Platform::LinuxArm => "manylinux_2_36_aarch64", Platform::LinuxArm => "manylinux_2_31_aarch64",
Platform::MacX64 => "macosx_12_0_x86_64", Platform::MacX64 => "macosx_10_13_x86_64",
Platform::MacArm => "macosx_12_0_arm64", Platform::MacArm => "macosx_11_0_arm64",
Platform::WindowsX64 => "win_amd64", Platform::WindowsX64 => "win_amd64",
Platform::WindowsArm => "win_arm64",
}; };
format!("cp39-abi3-{platform_tag}") format!("cp39-abi3-{platform}")
} else { } else {
"py3-none-any".into() "py3-none-any".into()
}; };
// Set environment variable for hatch_build.py to use the correct platform tag
build.add_variable("wheel_tag", &tag);
build.add_env_var("ANKI_WHEEL_TAG", "$wheel_tag");
let name = self.name; let name = self.name;
let version = &self.version;
let normalized_version = normalize_version(&self.version); let wheel_path = format!("wheels/{name}-{version}-{tag}.whl");
let wheel_path = format!("wheels/{name}-{normalized_version}-{tag}.whl");
build.add_outputs("out", vec![wheel_path]); build.add_outputs("out", vec![wheel_path]);
} }
} }
pub fn check_python(build: &mut Build) -> Result<()> { pub fn check_python(build: &mut Build) -> Result<()> {
python_format(build, "ftl", inputs![glob!("ftl/**/*.py")])?;
python_format(build, "tools", inputs![glob!("tools/**/*.py")])?; python_format(build, "tools", inputs![glob!("tools/**/*.py")])?;
build.add_action( build.add_action(
@ -184,6 +172,7 @@ pub fn check_python(build: &mut Build) -> Result<()> {
"qt/tools", "qt/tools",
"out/pylib/anki", "out/pylib/anki",
"out/qt/_aqt", "out/qt/_aqt",
"ftl",
"python", "python",
"tools", "tools",
], ],
@ -195,26 +184,60 @@ pub fn check_python(build: &mut Build) -> Result<()> {
}, },
)?; )?;
let ruff_folders = &["qt/aqt", "ftl", "pylib/tools", "tools", "python"]; add_pylint(build)?;
let ruff_deps = inputs![
glob!["{pylib,ftl,qt,python,tools}/**/*.py"], Ok(())
":pylib:anki", }
":qt:aqt"
]; fn add_pylint(build: &mut Build) -> Result<()> {
// pylint does not support PEP420 implicit namespaces split across import paths,
// so we need to merge our pylib sources and generated files before invoking it,
// and add a top-level __init__.py
build.add_action( build.add_action(
"check:ruff", "check:pylint:copy_pylib",
RuffCheck { RsyncFiles {
folders: ruff_folders, inputs: inputs![":pylib:anki"],
deps: ruff_deps.clone(), target_folder: "pylint/anki",
check_only: true, strip_prefix: "$builddir/pylib/anki",
// avoid copying our large rsbridge binary
extra_args: "--links",
}, },
)?; )?;
build.add_action( build.add_action(
"fix:ruff", "check:pylint:copy_pylib",
RuffCheck { RsyncFiles {
folders: ruff_folders, inputs: inputs![glob!["pylib/anki/**"]],
deps: ruff_deps, target_folder: "pylint/anki",
check_only: false, strip_prefix: "pylib/anki",
extra_args: "",
},
)?;
build.add_action(
"check:pylint:copy_pylib",
RunCommand {
command: ":pyenv:bin",
args: "$script $out",
inputs: hashmap! { "script" => inputs!["python/mkempty.py"] },
outputs: hashmap! { "out" => vec!["pylint/anki/__init__.py"] },
},
)?;
build.add_action(
"check:pylint",
PythonLint {
folders: &[
"$builddir/pylint/anki",
"qt/aqt",
"ftl",
"pylib/tools",
"tools",
"python",
],
pylint_ini: inputs![".pylintrc"],
deps: inputs![
":check:pylint:copy_pylib",
":qt:aqt",
glob!("{pylib/tools,ftl,qt,python,tools}/**/*.py")
],
}, },
)?; )?;
@ -227,23 +250,17 @@ struct Sphinx {
impl BuildAction for Sphinx { impl BuildAction for Sphinx {
fn command(&self) -> &str { fn command(&self) -> &str {
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") { if env::var("OFFLINE_BUILD").is_err() {
"$python python/sphinx/build.py" "$pip install sphinx sphinx_rtd_theme sphinx-autoapi \
&& $python python/sphinx/build.py"
} else { } else {
"$uv sync --extra sphinx && $python python/sphinx/build.py" "$python python/sphinx/build.py"
} }
} }
fn files(&mut self, build: &mut impl FilesHandle) { fn files(&mut self, build: &mut impl FilesHandle) {
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") { if env::var("OFFLINE_BUILD").is_err() {
let uv_path = build.add_inputs("pip", inputs![":pyenv:pip"]);
std::env::var("UV_BINARY").expect("UV_BINARY must be set in OFFLINE_BUILD mode");
build.add_inputs("uv", inputs![uv_path]);
} else {
build.add_inputs("uv", inputs![":uv_binary"]);
// Set environment variable to use the existing pyenv
build.add_variable("pyenv_path", "$builddir/pyenv");
build.add_env_var("UV_PROJECT_ENVIRONMENT", "$pyenv_path");
} }
build.add_inputs("python", inputs![":pyenv:bin"]); build.add_inputs("python", inputs![":pyenv:bin"]);
build.add_inputs("", &self.deps); build.add_inputs("", &self.deps);
@ -266,35 +283,8 @@ pub(crate) fn setup_sphinx(build: &mut Build) -> Result<()> {
build.add_action( build.add_action(
"python:sphinx", "python:sphinx",
Sphinx { Sphinx {
deps: inputs![ deps: inputs![":pylib", ":qt", ":python:sphinx:copy_conf"],
":pylib",
":qt",
":python:sphinx:copy_conf",
"pyproject.toml"
],
}, },
)?; )?;
Ok(()) Ok(())
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_normalize_version_basic() {
assert_eq!(normalize_version("1.2.3"), "1.2.3");
assert_eq!(normalize_version("01.02.03"), "1.2.3");
assert_eq!(normalize_version("1.0.0"), "1.0.0");
}
#[test]
fn test_normalize_version_with_prerelease() {
assert_eq!(normalize_version("1.2.3b1"), "1.2.3b1");
assert_eq!(normalize_version("01.02.03b1"), "1.2.3b1");
assert_eq!(normalize_version("1.0.0rc2"), "1.0.0rc2");
assert_eq!(normalize_version("2.1.0a3"), "2.1.0a3");
assert_eq!(normalize_version("1.2.3beta1"), "1.2.3beta1");
assert_eq!(normalize_version("1.2.3alpha1"), "1.2.3alpha1");
}
}

View file

@ -154,7 +154,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
"$builddir/buildhash", "$builddir/buildhash",
// building on Windows requires python3.lib // building on Windows requires python3.lib
if cfg!(windows) { if cfg!(windows) {
inputs![":pyenv:bin"] inputs![":extract:python"]
} else { } else {
inputs![] inputs![]
} }
@ -169,7 +169,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
pub fn check_rust(build: &mut Build) -> Result<()> { pub fn check_rust(build: &mut Build) -> Result<()> {
let inputs = inputs![ let inputs = inputs![
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**,tools/minilints/**}"), glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,tools/workspace-hack/**}"),
"Cargo.lock", "Cargo.lock",
"Cargo.toml", "Cargo.toml",
"rust-toolchain.toml", "rust-toolchain.toml",
@ -246,8 +246,8 @@ pub fn check_minilints(build: &mut Build) -> Result<()> {
let files = inputs![ let files = inputs![
glob![ glob![
"**/*.{py,rs,ts,svelte,mjs,md}", "**/*.{py,rs,ts,svelte,mjs}",
"{node_modules,ts/.svelte-kit}/**" "{node_modules,qt/bundle/PyOxidizer,ts/.svelte-kit}/**"
], ],
"Cargo.lock" "Cargo.lock"
]; ];

View file

@ -14,7 +14,6 @@ use ninja_gen::node::DPrint;
use ninja_gen::node::EsbuildScript; use ninja_gen::node::EsbuildScript;
use ninja_gen::node::Eslint; use ninja_gen::node::Eslint;
use ninja_gen::node::GenTypescriptProto; use ninja_gen::node::GenTypescriptProto;
use ninja_gen::node::Prettier;
use ninja_gen::node::SqlFormat; use ninja_gen::node::SqlFormat;
use ninja_gen::node::SvelteCheck; use ninja_gen::node::SvelteCheck;
use ninja_gen::node::SveltekitBuild; use ninja_gen::node::SveltekitBuild;
@ -65,7 +64,6 @@ fn setup_node(build: &mut Build) -> Result<()> {
"vite", "vite",
"vitest", "vitest",
"protoc-gen-es", "protoc-gen-es",
"prettier",
], ],
hashmap! { hashmap! {
"jquery" => vec![ "jquery" => vec![
@ -184,16 +182,11 @@ fn build_and_check_pages(build: &mut Build) -> Result<()> {
let group = format!("ts:{name}"); let group = format!("ts:{name}");
let deps = inputs![deps, glob!(format!("ts/{name}/**"))]; let deps = inputs![deps, glob!(format!("ts/{name}/**"))];
let extra_exts = if html { &["css", "html"][..] } else { &["css"] }; let extra_exts = if html { &["css", "html"][..] } else { &["css"] };
let entrypoint = if html {
format!("ts/routes/{name}/index.ts")
} else {
format!("ts/{name}/index.ts")
};
build.add_action( build.add_action(
&group, &group,
EsbuildScript { EsbuildScript {
script: inputs!["ts/bundle_svelte.mjs"], script: inputs!["ts/bundle_svelte.mjs"],
entrypoint: inputs![entrypoint], entrypoint: inputs![format!("ts/{name}/index.ts")],
output_stem: &format!("ts/{name}/{name}"), output_stem: &format!("ts/{name}/{name}"),
deps: deps.clone(), deps: deps.clone(),
extra_exts, extra_exts,
@ -217,17 +210,6 @@ fn build_and_check_pages(build: &mut Build) -> Result<()> {
":sveltekit", ":sveltekit",
], ],
)?; )?;
build_page(
"congrats",
true,
inputs![
//
":ts:lib",
":ts:components",
":sass",
":sveltekit"
],
)?;
Ok(()) Ok(())
} }
@ -308,10 +290,10 @@ fn build_and_check_reviewer(build: &mut Build) -> Result<()> {
} }
fn check_web(build: &mut Build) -> Result<()> { fn check_web(build: &mut Build) -> Result<()> {
let fmt_excluded = "{target,ts/.svelte-kit,node_modules}/**"; let dprint_files = inputs![glob![
let dprint_files = inputs![glob!["**/*.{ts,mjs,js,md,json,toml,scss}", fmt_excluded]]; "**/*.{ts,mjs,js,md,json,toml,svelte,scss}",
let prettier_files = inputs![glob!["**/*.svelte", fmt_excluded]]; "{target,ts/.svelte-kit,node_modules}/**"
]];
build.add_action( build.add_action(
"check:format:dprint", "check:format:dprint",
DPrint { DPrint {
@ -326,24 +308,11 @@ fn check_web(build: &mut Build) -> Result<()> {
check_only: false, check_only: false,
}, },
)?; )?;
build.add_action(
"check:format:prettier",
Prettier {
inputs: prettier_files.clone(),
check_only: true,
},
)?;
build.add_action(
"format:prettier",
Prettier {
inputs: prettier_files,
check_only: false,
},
)?;
build.add_action( build.add_action(
"check:vitest", "check:vitest",
ViteTest { ViteTest {
deps: inputs![ deps: inputs![
"yarn",
":node_modules", ":node_modules",
":ts:generated", ":ts:generated",
glob!["ts/{svelte.config.js,vite.config.ts,tsconfig.json}"], glob!["ts/{svelte.config.js,vite.config.ts,tsconfig.json}"],
@ -356,6 +325,7 @@ fn check_web(build: &mut Build) -> Result<()> {
SvelteCheck { SvelteCheck {
tsconfig: inputs!["ts/tsconfig.json"], tsconfig: inputs!["ts/tsconfig.json"],
inputs: inputs![ inputs: inputs![
"yarn",
":node_modules", ":node_modules",
":ts:generated", ":ts:generated",
glob!["ts/**/*", "ts/.svelte-kit/**"], glob!["ts/**/*", "ts/.svelte-kit/**"],

View file

@ -14,28 +14,8 @@ camino.workspace = true
dunce.workspace = true dunce.workspace = true
globset.workspace = true globset.workspace = true
itertools.workspace = true itertools.workspace = true
lazy_static.workspace = true
maplit.workspace = true maplit.workspace = true
num_cpus.workspace = true num_cpus.workspace = true
regex.workspace = true
serde_json.workspace = true
sha2.workspace = true
walkdir.workspace = true walkdir.workspace = true
which.workspace = true which.workspace = true
[target.'cfg(windows)'.dependencies]
reqwest = { workspace = true, features = ["blocking", "json", "native-tls"] }
[target.'cfg(not(windows))'.dependencies]
reqwest = { workspace = true, features = ["blocking", "json", "rustls-tls"] }
[[bin]]
name = "update_uv"
path = "src/bin/update_uv.rs"
[[bin]]
name = "update_protoc"
path = "src/bin/update_protoc.rs"
[[bin]]
name = "update_node"
path = "src/bin/update_node.rs"

View file

@ -44,51 +44,11 @@ pub trait BuildAction {
true true
} }
fn hide_progress(&self) -> bool { fn hide_last_line(&self) -> bool {
false false
} }
fn name(&self) -> &'static str { fn name(&self) -> &'static str {
std::any::type_name::<Self>() std::any::type_name::<Self>().split("::").last().unwrap()
.split("::")
.last()
.unwrap()
.split('<')
.next()
.unwrap()
} }
} }
#[cfg(test)]
trait TestBuildAction {}
#[cfg(test)]
impl<T: TestBuildAction + ?Sized> BuildAction for T {
fn command(&self) -> &str {
"test"
}
fn files(&mut self, _build: &mut impl FilesHandle) {}
}
#[allow(dead_code, unused_variables)]
#[test]
fn should_strip_regions_in_type_name() {
struct Bare;
impl TestBuildAction for Bare {}
assert_eq!(Bare {}.name(), "Bare");
struct WithLifeTime<'a>(&'a str);
impl TestBuildAction for WithLifeTime<'_> {}
assert_eq!(WithLifeTime("test").name(), "WithLifeTime");
struct WithMultiLifeTime<'a, 'b>(&'a str, &'b str);
impl TestBuildAction for WithMultiLifeTime<'_, '_> {}
assert_eq!(
WithMultiLifeTime("test", "test").name(),
"WithMultiLifeTime"
);
struct WithGeneric<T>(T);
impl<T> TestBuildAction for WithGeneric<T> {}
assert_eq!(WithGeneric(3).name(), "WithGeneric");
}

View file

@ -26,21 +26,22 @@ pub enum Platform {
MacX64, MacX64,
MacArm, MacArm,
WindowsX64, WindowsX64,
WindowsArm,
} }
impl Platform { impl Platform {
pub fn current() -> Self { pub fn current() -> Self {
let os = std::env::consts::OS; if cfg!(windows) {
let arch = std::env::consts::ARCH; Self::WindowsX64
match (os, arch) { } else {
("linux", "x86_64") => Self::LinuxX64, let os = std::env::consts::OS;
("linux", "aarch64") => Self::LinuxArm, let arch = std::env::consts::ARCH;
("macos", "x86_64") => Self::MacX64, match (os, arch) {
("macos", "aarch64") => Self::MacArm, ("linux", "x86_64") => Self::LinuxX64,
("windows", "x86_64") => Self::WindowsX64, ("linux", "aarch64") => Self::LinuxArm,
("windows", "aarch64") => Self::WindowsArm, ("macos", "x86_64") => Self::MacX64,
_ => panic!("unsupported os/arch {os} {arch} - PR welcome!"), ("macos", "aarch64") => Self::MacArm,
_ => panic!("unsupported os/arch {os} {arch} - PR welcome!"),
}
} }
} }
@ -61,13 +62,12 @@ impl Platform {
Platform::MacX64 => "x86_64-apple-darwin", Platform::MacX64 => "x86_64-apple-darwin",
Platform::MacArm => "aarch64-apple-darwin", Platform::MacArm => "aarch64-apple-darwin",
Platform::WindowsX64 => "x86_64-pc-windows-msvc", Platform::WindowsX64 => "x86_64-pc-windows-msvc",
Platform::WindowsArm => "aarch64-pc-windows-msvc",
} }
} }
} }
/// Append .exe to path if on Windows. /// Append .exe to path if on Windows.
pub fn with_exe(path: &str) -> Cow<'_, str> { pub fn with_exe(path: &str) -> Cow<str> {
if cfg!(windows) { if cfg!(windows) {
format!("{path}.exe").into() format!("{path}.exe").into()
} else { } else {

View file

@ -1,268 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::error::Error;
use std::fs;
use std::path::Path;
use regex::Regex;
use reqwest::blocking::Client;
use serde_json::Value;
#[derive(Debug)]
struct NodeRelease {
version: String,
files: Vec<NodeFile>,
}
#[derive(Debug)]
struct NodeFile {
filename: String,
url: String,
}
fn main() -> Result<(), Box<dyn Error>> {
let release_info = fetch_node_release_info()?;
let new_text = generate_node_archive_function(&release_info)?;
update_node_text(&new_text)?;
println!("Node.js archive function updated successfully!");
Ok(())
}
fn fetch_node_release_info() -> Result<NodeRelease, Box<dyn Error>> {
let client = Client::new();
// Get the Node.js release info
let response = client
.get("https://nodejs.org/dist/index.json")
.header("User-Agent", "anki-build-updater")
.send()?;
let releases: Vec<Value> = response.json()?;
// Find the latest LTS release
let latest = releases
.iter()
.find(|release| {
// LTS releases have a non-false "lts" field
release["lts"].as_str().is_some() && release["lts"] != false
})
.ok_or("No LTS releases found")?;
let version = latest["version"]
.as_str()
.ok_or("Version not found")?
.to_string();
let files = latest["files"]
.as_array()
.ok_or("Files array not found")?
.iter()
.map(|f| f.as_str().unwrap_or(""))
.collect::<Vec<_>>();
let lts_name = latest["lts"].as_str().unwrap_or("unknown");
println!("Found Node.js LTS version: {version} ({lts_name})");
// Map platforms to their expected file keys and full filenames
let platform_mapping = vec![
(
"linux-x64",
"linux-x64",
format!("node-{version}-linux-x64.tar.xz"),
),
(
"linux-arm64",
"linux-arm64",
format!("node-{version}-linux-arm64.tar.xz"),
),
(
"darwin-x64",
"osx-x64-tar",
format!("node-{version}-darwin-x64.tar.xz"),
),
(
"darwin-arm64",
"osx-arm64-tar",
format!("node-{version}-darwin-arm64.tar.xz"),
),
(
"win-x64",
"win-x64-zip",
format!("node-{version}-win-x64.zip"),
),
(
"win-arm64",
"win-arm64-zip",
format!("node-{version}-win-arm64.zip"),
),
];
let mut node_files = Vec::new();
for (platform, file_key, filename) in platform_mapping {
// Check if this file exists in the release
if files.contains(&file_key) {
let url = format!("https://nodejs.org/dist/{version}/{filename}");
node_files.push(NodeFile {
filename: filename.clone(),
url,
});
println!("Found file for {platform}: {filename} (key: {file_key})");
} else {
return Err(
format!("File not found for {platform} (key: {file_key}): {filename}").into(),
);
}
}
Ok(NodeRelease {
version,
files: node_files,
})
}
fn generate_node_archive_function(release: &NodeRelease) -> Result<String, Box<dyn Error>> {
let client = Client::new();
// Fetch the SHASUMS256.txt file once
println!("Fetching SHA256 checksums...");
let shasums_url = format!("https://nodejs.org/dist/{}/SHASUMS256.txt", release.version);
let shasums_response = client
.get(&shasums_url)
.header("User-Agent", "anki-build-updater")
.send()?;
let shasums_text = shasums_response.text()?;
// Create a mapping from filename patterns to platform names - using the exact
// patterns we stored in files
let platform_mapping = vec![
("linux-x64.tar.xz", "LinuxX64"),
("linux-arm64.tar.xz", "LinuxArm"),
("darwin-x64.tar.xz", "MacX64"),
("darwin-arm64.tar.xz", "MacArm"),
("win-x64.zip", "WindowsX64"),
("win-arm64.zip", "WindowsArm"),
];
let mut platform_blocks = Vec::new();
for (file_pattern, platform_name) in platform_mapping {
// Find the file that ends with this pattern
if let Some(file) = release
.files
.iter()
.find(|f| f.filename.ends_with(file_pattern))
{
// Find the SHA256 for this file
let sha256 = shasums_text
.lines()
.find(|line| line.contains(&file.filename))
.and_then(|line| line.split_whitespace().next())
.ok_or_else(|| format!("SHA256 not found for {}", file.filename))?;
println!(
"Found SHA256 for {}: {} => {}",
platform_name, file.filename, sha256
);
let block = format!(
" Platform::{} => OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }},",
platform_name, file.url, sha256
);
platform_blocks.push(block);
} else {
return Err(format!(
"File not found for platform {platform_name}: no file ending with {file_pattern}"
)
.into());
}
}
let function = format!(
"pub fn node_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}",
platform_blocks.join("\n")
);
Ok(function)
}
fn update_node_text(new_function: &str) -> Result<(), Box<dyn Error>> {
let node_rs_content = read_node_rs()?;
// Regex to match the entire node_archive function with proper multiline
// matching
let re = Regex::new(
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}",
)?;
let updated_content = re.replace(&node_rs_content, new_function);
write_node_rs(&updated_content)?;
Ok(())
}
fn read_node_rs() -> Result<String, Box<dyn Error>> {
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
let manifest_dir =
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
let path = Path::new(&manifest_dir).join("src").join("node.rs");
Ok(fs::read_to_string(path)?)
}
fn write_node_rs(content: &str) -> Result<(), Box<dyn Error>> {
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
let manifest_dir =
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
let path = Path::new(&manifest_dir).join("src").join("node.rs");
fs::write(path, content)?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_regex_replacement() {
let sample_content = r#"Some other code
pub fn node_archive(platform: Platform) -> OnlineArchive {
match platform {
Platform::LinuxX64 => OnlineArchive {
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz",
sha256: "old_hash",
},
Platform::MacX64 => OnlineArchive {
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz",
sha256: "old_hash",
},
}
}
More code here"#;
let new_function = r#"pub fn node_archive(platform: Platform) -> OnlineArchive {
match platform {
Platform::LinuxX64 => OnlineArchive {
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-linux-x64.tar.xz",
sha256: "new_hash",
},
Platform::MacX64 => OnlineArchive {
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-darwin-x64.tar.xz",
sha256: "new_hash",
},
}
}"#;
let re = Regex::new(
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}"
).unwrap();
let result = re.replace(sample_content, new_function);
assert!(result.contains("v21.0.0"));
assert!(result.contains("new_hash"));
assert!(!result.contains("old_hash"));
assert!(result.contains("Some other code"));
assert!(result.contains("More code here"));
}
}

View file

@ -1,125 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::error::Error;
use std::fs;
use std::path::Path;
use regex::Regex;
use reqwest::blocking::Client;
use serde_json::Value;
use sha2::Digest;
use sha2::Sha256;
fn fetch_protoc_release_info() -> Result<String, Box<dyn Error>> {
let client = Client::new();
println!("Fetching latest protoc release info from GitHub...");
// Fetch latest release info
let response = client
.get("https://api.github.com/repos/protocolbuffers/protobuf/releases/latest")
.header("User-Agent", "Anki-Build-Script")
.send()?;
let release_info: Value = response.json()?;
let assets = release_info["assets"]
.as_array()
.expect("assets should be an array");
// Map platform names to their corresponding asset patterns
let platform_patterns = [
("LinuxX64", "linux-x86_64"),
("LinuxArm", "linux-aarch_64"),
("MacX64", "osx-universal_binary"), // Mac uses universal binary for both
("MacArm", "osx-universal_binary"),
("WindowsX64", "win64"), // Windows uses x86 binary for both archs
("WindowsArm", "win64"),
];
let mut match_blocks = Vec::new();
for (platform, pattern) in platform_patterns {
// Find the asset matching the platform pattern
let asset = assets.iter().find(|asset| {
let name = asset["name"].as_str().unwrap_or("");
name.starts_with("protoc-") && name.contains(pattern) && name.ends_with(".zip")
});
if asset.is_none() {
eprintln!("No asset found for platform {platform} pattern {pattern}");
continue;
}
let asset = asset.unwrap();
let download_url = asset["browser_download_url"].as_str().unwrap();
let asset_name = asset["name"].as_str().unwrap();
// Download the file and calculate SHA256 locally
println!("Downloading and checksumming {asset_name} for {platform}...");
let response = client
.get(download_url)
.header("User-Agent", "Anki-Build-Script")
.send()?;
let bytes = response.bytes()?;
let mut hasher = Sha256::new();
hasher.update(&bytes);
let sha256 = format!("{:x}", hasher.finalize());
// Handle platform-specific match patterns
let match_pattern = match platform {
"MacX64" => "Platform::MacX64 | Platform::MacArm",
"MacArm" => continue, // Skip MacArm since it's handled with MacX64
"WindowsX64" => "Platform::WindowsX64 | Platform::WindowsArm",
"WindowsArm" => continue, // Skip WindowsArm since it's handled with WindowsX64
_ => &format!("Platform::{platform}"),
};
match_blocks.push(format!(
" {match_pattern} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}"
));
}
Ok(format!(
"pub fn protoc_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}",
match_blocks.join(",\n")
))
}
fn read_protobuf_rs() -> Result<String, Box<dyn Error>> {
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
let path = Path::new(&manifest_dir).join("src/protobuf.rs");
println!("Reading {}", path.display());
let content = fs::read_to_string(path)?;
Ok(content)
}
fn update_protoc_text(old_text: &str, new_protoc_text: &str) -> Result<String, Box<dyn Error>> {
let re =
Regex::new(r"(?ms)^pub fn protoc_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\}")
.unwrap();
if !re.is_match(old_text) {
return Err("Could not find protoc_archive function block to replace".into());
}
let new_content = re.replace(old_text, new_protoc_text).to_string();
println!("Original lines: {}", old_text.lines().count());
println!("Updated lines: {}", new_content.lines().count());
Ok(new_content)
}
fn write_protobuf_rs(content: &str) -> Result<(), Box<dyn Error>> {
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
let path = Path::new(&manifest_dir).join("src/protobuf.rs");
println!("Writing to {}", path.display());
fs::write(path, content)?;
Ok(())
}
fn main() -> Result<(), Box<dyn Error>> {
let new_protoc_archive = fetch_protoc_release_info()?;
let content = read_protobuf_rs()?;
let updated_content = update_protoc_text(&content, &new_protoc_archive)?;
write_protobuf_rs(&updated_content)?;
println!("Successfully updated protoc_archive function in protobuf.rs");
Ok(())
}

View file

@ -1,140 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::error::Error;
use std::fs;
use std::path::Path;
use regex::Regex;
use reqwest::blocking::Client;
use serde_json::Value;
fn fetch_uv_release_info() -> Result<String, Box<dyn Error>> {
let client = Client::new();
println!("Fetching latest uv release info from GitHub...");
// Fetch latest release info
let response = client
.get("https://api.github.com/repos/astral-sh/uv/releases/latest")
.header("User-Agent", "Anki-Build-Script")
.send()?;
let release_info: Value = response.json()?;
let assets = release_info["assets"]
.as_array()
.expect("assets should be an array");
// Map platform names to their corresponding asset patterns
let platform_patterns = [
("LinuxX64", "x86_64-unknown-linux-gnu"),
("LinuxArm", "aarch64-unknown-linux-gnu"),
("MacX64", "x86_64-apple-darwin"),
("MacArm", "aarch64-apple-darwin"),
("WindowsX64", "x86_64-pc-windows-msvc"),
("WindowsArm", "aarch64-pc-windows-msvc"),
];
let mut match_blocks = Vec::new();
for (platform, pattern) in platform_patterns {
// Find the asset matching the platform pattern (the binary)
let asset = assets.iter().find(|asset| {
let name = asset["name"].as_str().unwrap_or("");
name.contains(pattern) && (name.ends_with(".tar.gz") || name.ends_with(".zip"))
});
if asset.is_none() {
eprintln!("No asset found for platform {platform} pattern {pattern}");
continue;
}
let asset = asset.unwrap();
let download_url = asset["browser_download_url"].as_str().unwrap();
let asset_name = asset["name"].as_str().unwrap();
// Find the corresponding .sha256 or .sha256sum asset
let sha_asset = assets.iter().find(|a| {
let name = a["name"].as_str().unwrap_or("");
name == format!("{asset_name}.sha256") || name == format!("{asset_name}.sha256sum")
});
if sha_asset.is_none() {
eprintln!("No sha256 asset found for {asset_name}");
continue;
}
let sha_asset = sha_asset.unwrap();
let sha_url = sha_asset["browser_download_url"].as_str().unwrap();
println!("Fetching SHA256 for {platform}...");
let sha_text = client
.get(sha_url)
.header("User-Agent", "Anki-Build-Script")
.send()?
.text()?;
// The sha file is usually of the form: "<sha256> <filename>"
let sha256 = sha_text.split_whitespace().next().unwrap_or("");
match_blocks.push(format!(
" Platform::{platform} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}"
));
}
Ok(format!(
"pub fn uv_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}",
match_blocks.join(",\n")
))
}
fn read_python_rs() -> Result<String, Box<dyn Error>> {
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
let path = Path::new(&manifest_dir).join("src/python.rs");
println!("Reading {}", path.display());
let content = fs::read_to_string(path)?;
Ok(content)
}
fn update_uv_text(old_text: &str, new_uv_text: &str) -> Result<String, Box<dyn Error>> {
let re = Regex::new(r"(?ms)^pub fn uv_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}\s*\n\s*\}").unwrap();
if !re.is_match(old_text) {
return Err("Could not find uv_archive function block to replace".into());
}
let new_content = re.replace(old_text, new_uv_text).to_string();
println!("Original lines: {}", old_text.lines().count());
println!("Updated lines: {}", new_content.lines().count());
Ok(new_content)
}
fn write_python_rs(content: &str) -> Result<(), Box<dyn Error>> {
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
let path = Path::new(&manifest_dir).join("src/python.rs");
println!("Writing to {}", path.display());
fs::write(path, content)?;
Ok(())
}
fn main() -> Result<(), Box<dyn Error>> {
let new_uv_archive = fetch_uv_release_info()?;
let content = read_python_rs()?;
let updated_content = update_uv_text(&content, &new_uv_archive)?;
write_python_rs(&updated_content)?;
println!("Successfully updated uv_archive function in python.rs");
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_update_uv_text_with_actual_file() {
let content = fs::read_to_string("src/python.rs").unwrap();
let original_lines = content.lines().count();
const EXPECTED_LINES_REMOVED: usize = 38;
let updated = update_uv_text(&content, "").unwrap();
let updated_lines = updated.lines().count();
assert_eq!(
updated_lines,
original_lines - EXPECTED_LINES_REMOVED,
"Expected line count to decrease by exactly {EXPECTED_LINES_REMOVED} lines (original: {original_lines}, updated: {updated_lines})"
);
}
}

View file

@ -271,14 +271,14 @@ impl BuildStatement<'_> {
stmt.rule_variables.push(("pool".into(), pool.into())); stmt.rule_variables.push(("pool".into(), pool.into()));
} }
if have_n2 { if have_n2 {
if action.hide_success() { stmt.rule_variables.push((
stmt.rule_variables "hide_success".into(),
.push(("hide_success".into(), "1".into())); (action.hide_success() as u8).to_string(),
} ));
if action.hide_progress() { stmt.rule_variables.push((
stmt.rule_variables "hide_last_line".into(),
.push(("hide_progress".into(), "1".into())); (action.hide_last_line() as u8).to_string(),
} ));
} }
stmt stmt
@ -300,7 +300,7 @@ impl BuildStatement<'_> {
writeln!(buf, "build {outputs_str}: {action_name} {inputs_str}").unwrap(); writeln!(buf, "build {outputs_str}: {action_name} {inputs_str}").unwrap();
for (key, value) in self.variables.iter().sorted() { for (key, value) in self.variables.iter().sorted() {
writeln!(buf, " {key} = {value}").unwrap(); writeln!(buf, " {key} = {}", value).unwrap();
} }
writeln!(buf).unwrap(); writeln!(buf).unwrap();
@ -368,8 +368,8 @@ pub trait FilesHandle {
/// different variables. This is a shortcut for calling .expand_inputs() /// different variables. This is a shortcut for calling .expand_inputs()
/// and then .add_inputs_vec() /// and then .add_inputs_vec()
/// - If the variable name is non-empty, a variable of the same name will be /// - If the variable name is non-empty, a variable of the same name will be
/// created so the file list can be accessed in the command. By /// created so the file list can be accessed in the command. By convention,
/// convention, this is often `in`. /// this is often `in`.
fn add_inputs(&mut self, variable: &'static str, inputs: impl AsRef<BuildInput>); fn add_inputs(&mut self, variable: &'static str, inputs: impl AsRef<BuildInput>);
fn add_inputs_vec(&mut self, variable: &'static str, inputs: Vec<String>); fn add_inputs_vec(&mut self, variable: &'static str, inputs: Vec<String>);
fn add_order_only_inputs(&mut self, variable: &'static str, inputs: impl AsRef<BuildInput>); fn add_order_only_inputs(&mut self, variable: &'static str, inputs: impl AsRef<BuildInput>);
@ -392,14 +392,14 @@ pub trait FilesHandle {
/// Add outputs to the build statement. Can be called multiple times with /// Add outputs to the build statement. Can be called multiple times with
/// different variables. /// different variables.
/// - Each output automatically has $builddir/ prefixed to it if it does not /// - Each output automatically has $builddir/ prefixed to it if it does not
/// already start with it. /// already start with it.
/// - If the variable name is non-empty, a variable of the same name will be /// - If the variable name is non-empty, a variable of the same name will be
/// created so the file list can be accessed in the command. By /// created so the file list can be accessed in the command. By convention,
/// convention, this is often `out`. /// this is often `out`.
/// - If subgroup is true, the files are also placed in a subgroup. Eg if a /// - If subgroup is true, the files are also placed in a subgroup. Eg
/// rule `foo` exists and subgroup `bar` is provided, the files are /// if a rule `foo` exists and subgroup `bar` is provided, the files are
/// accessible via `:foo:bar`. The variable name must not be empty, or /// accessible via `:foo:bar`. The variable name must not be empty, or
/// called `out`. /// called `out`.
fn add_outputs_ext( fn add_outputs_ext(
&mut self, &mut self,
variable: impl Into<String>, variable: impl Into<String>,
@ -476,7 +476,7 @@ impl FilesHandle for BuildStatement<'_> {
let outputs = outputs.into_iter().map(|v| { let outputs = outputs.into_iter().map(|v| {
let v = v.as_ref(); let v = v.as_ref();
let v = if !v.starts_with("$builddir/") && !v.starts_with("$builddir\\") { let v = if !v.starts_with("$builddir/") && !v.starts_with("$builddir\\") {
format!("$builddir/{v}") format!("$builddir/{}", v)
} else { } else {
v.to_owned() v.to_owned()
}; };

View file

@ -162,7 +162,7 @@ impl BuildAction for CargoTest {
"cargo-nextest", "cargo-nextest",
CargoInstall { CargoInstall {
binary_name: "cargo-nextest", binary_name: "cargo-nextest",
args: "cargo-nextest --version 0.9.99 --locked --no-default-features --features default-no-update", args: "cargo-nextest --version 0.9.57 --locked",
}, },
)?; )?;
setup_flags(build) setup_flags(build)

View file

@ -3,7 +3,6 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::fmt::Display; use std::fmt::Display;
use std::sync::LazyLock;
use camino::Utf8PathBuf; use camino::Utf8PathBuf;
@ -119,7 +118,9 @@ pub struct Glob {
pub exclude: Option<String>, pub exclude: Option<String>,
} }
static CACHED_FILES: LazyLock<Vec<Utf8PathBuf>> = LazyLock::new(cache_files); lazy_static::lazy_static! {
static ref CACHED_FILES: Vec<Utf8PathBuf> = cache_files();
}
/// Walking the source tree once instead of for each glob yields ~4x speed /// Walking the source tree once instead of for each glob yields ~4x speed
/// improvements. /// improvements.

View file

@ -19,28 +19,24 @@ use crate::input::BuildInput;
pub fn node_archive(platform: Platform) -> OnlineArchive { pub fn node_archive(platform: Platform) -> OnlineArchive {
match platform { match platform {
Platform::LinuxX64 => OnlineArchive { Platform::LinuxX64 => OnlineArchive {
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-x64.tar.xz", url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz",
sha256: "325c0f1261e0c61bcae369a1274028e9cfb7ab7949c05512c5b1e630f7e80e12", sha256: "822780369d0ea309e7d218e41debbd1a03f8cdf354ebf8a4420e89f39cc2e612",
}, },
Platform::LinuxArm => OnlineArchive { Platform::LinuxArm => OnlineArchive {
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-arm64.tar.xz", url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-arm64.tar.xz",
sha256: "140aee84be6774f5fb3f404be72adbe8420b523f824de82daeb5ab218dab7b18", sha256: "f6df68c6793244071f69023a9b43a0cf0b13d65cbe86d55925c28e4134d9aafb",
}, },
Platform::MacX64 => OnlineArchive { Platform::MacX64 => OnlineArchive {
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-x64.tar.xz", url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz",
sha256: "f79de1f64df4ac68493a344bb5ab7d289d0275271e87b543d1278392c9de778a", sha256: "d4b4ab81ebf1f7aab09714f834992f27270ad0079600da00c8110f8950ca6c5a",
}, },
Platform::MacArm => OnlineArchive { Platform::MacArm => OnlineArchive {
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-arm64.tar.xz", url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-arm64.tar.xz",
sha256: "cc9cc294eaf782dd93c8c51f460da610cc35753c6a9947411731524d16e97914", sha256: "f18a7438723d48417f5e9be211a2f3c0520ffbf8e02703469e5153137ca0f328",
}, },
Platform::WindowsX64 => OnlineArchive { Platform::WindowsX64 => OnlineArchive {
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-x64.zip", url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-x64.zip",
sha256: "721ab118a3aac8584348b132767eadf51379e0616f0db802cc1e66d7f0d98f85", sha256: "893115cd92ad27bf178802f15247115e93c0ef0c753b93dca96439240d64feb5",
},
Platform::WindowsArm => OnlineArchive {
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-arm64.zip",
sha256: "78355dc9ca117bb71d3f081e4b1b281855e2b134f3939bb0ca314f7567b0e621",
}, },
} }
} }
@ -98,7 +94,7 @@ impl BuildAction for YarnInstall<'_> {
} }
} }
fn with_cmd_ext(bin: &str) -> Cow<'_, str> { fn with_cmd_ext(bin: &str) -> Cow<str> {
if cfg!(windows) { if cfg!(windows) {
format!("{bin}.cmd").into() format!("{bin}.cmd").into()
} else { } else {
@ -211,31 +207,6 @@ impl BuildAction for DPrint {
} }
} }
pub struct Prettier {
pub inputs: BuildInput,
pub check_only: bool,
}
impl BuildAction for Prettier {
fn command(&self) -> &str {
"$yarn prettier --cache $mode $pattern"
}
fn files(&mut self, build: &mut impl build::FilesHandle) {
build.add_inputs("yarn", inputs![":yarn:bin"]);
build.add_inputs("prettier", inputs![":node_modules:prettier"]);
build.add_inputs("", &self.inputs);
build.add_variable("pattern", r#""**/*.svelte""#);
let (file_ext, mode) = if self.check_only {
("fmt", "--check")
} else {
("check", "--write")
};
build.add_variable("mode", mode);
build.add_output_stamp(format!("tests/prettier.{file_ext}"));
}
}
pub struct SvelteCheck { pub struct SvelteCheck {
pub tsconfig: BuildInput, pub tsconfig: BuildInput,
pub inputs: BuildInput, pub inputs: BuildInput,
@ -243,20 +214,35 @@ pub struct SvelteCheck {
impl BuildAction for SvelteCheck { impl BuildAction for SvelteCheck {
fn command(&self) -> &str { fn command(&self) -> &str {
"$yarn svelte-check:once" if cfg!(windows) {
"cmd /c yarn svelte-check:once"
} else {
"./yarn svelte-check:once"
}
} }
fn files(&mut self, build: &mut impl build::FilesHandle) { fn files(&mut self, build: &mut impl build::FilesHandle) {
build.add_inputs("svelte-check", inputs![":node_modules:svelte-check"]); build.add_inputs("svelte-check", inputs![":node_modules:svelte-check"]);
build.add_inputs("tsconfig", &self.tsconfig); build.add_inputs("tsconfig", &self.tsconfig);
build.add_inputs("yarn", inputs![":yarn:bin"]);
build.add_inputs("", &self.inputs); build.add_inputs("", &self.inputs);
build.add_inputs("", inputs!["yarn.lock"]); build.add_inputs("", inputs!["yarn.lock"]);
build.add_variable(
"compiler_warnings",
[
"a11y-click-events-have-key-events",
"a11y-no-noninteractive-tabindex",
"a11y-no-static-element-interactions",
]
.iter()
.map(|warning| format!("{}$:ignore", warning))
.collect::<Vec<_>>()
.join(","),
);
let hash = simple_hash(&self.tsconfig); let hash = simple_hash(&self.tsconfig);
build.add_output_stamp(format!("tests/svelte-check.{hash}")); build.add_output_stamp(format!("tests/svelte-check.{hash}"));
} }
fn hide_progress(&self) -> bool { fn hide_last_line(&self) -> bool {
true true
} }
} }
@ -312,12 +298,15 @@ pub struct ViteTest {
impl BuildAction for ViteTest { impl BuildAction for ViteTest {
fn command(&self) -> &str { fn command(&self) -> &str {
"$yarn vitest:once" if cfg!(windows) {
"cmd /c yarn vitest:once"
} else {
"./yarn vitest:once"
}
} }
fn files(&mut self, build: &mut impl build::FilesHandle) { fn files(&mut self, build: &mut impl build::FilesHandle) {
build.add_inputs("vitest", inputs![":node_modules:vitest"]); build.add_inputs("vitest", inputs![":node_modules:vitest"]);
build.add_inputs("yarn", inputs![":yarn:bin"]);
build.add_inputs("", &self.deps); build.add_inputs("", &self.deps);
build.add_output_stamp("tests/vitest"); build.add_output_stamp("tests/vitest");
} }
@ -468,7 +457,11 @@ pub struct SveltekitBuild {
impl BuildAction for SveltekitBuild { impl BuildAction for SveltekitBuild {
fn command(&self) -> &str { fn command(&self) -> &str {
if std::env::var("HMR").is_err() { if std::env::var("HMR").is_err() {
"$yarn build" if cfg!(windows) {
"cmd /c yarn build"
} else {
"./yarn build"
}
} else { } else {
"echo" "echo"
} }
@ -476,7 +469,6 @@ impl BuildAction for SveltekitBuild {
fn files(&mut self, build: &mut impl build::FilesHandle) { fn files(&mut self, build: &mut impl build::FilesHandle) {
build.add_inputs("node_modules", inputs![":node_modules"]); build.add_inputs("node_modules", inputs![":node_modules"]);
build.add_inputs("yarn", inputs![":yarn:bin"]);
build.add_inputs("", &self.deps); build.add_inputs("", &self.deps);
build.add_inputs("", inputs!["yarn.lock"]); build.add_inputs("", inputs!["yarn.lock"]);
build.add_output_stamp("sveltekit.marker"); build.add_output_stamp("sveltekit.marker");

View file

@ -21,26 +21,26 @@ pub fn protoc_archive(platform: Platform) -> OnlineArchive {
match platform { match platform {
Platform::LinuxX64 => { Platform::LinuxX64 => {
OnlineArchive { OnlineArchive {
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-linux-x86_64.zip", url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-linux-x86_64.zip",
sha256: "96553041f1a91ea0efee963cb16f462f5985b4d65365f3907414c360044d8065", sha256: "f90d0dd59065fef94374745627336d622702b67f0319f96cee894d41a974d47a",
} }
}, }
Platform::LinuxArm => { Platform::LinuxArm => {
OnlineArchive { OnlineArchive {
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-linux-aarch_64.zip", url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-linux-aarch_64.zip",
sha256: "6c554de11cea04c56ebf8e45b54434019b1cd85223d4bbd25c282425e306ecc2", sha256: "f3d8eb5839d6186392d8c7b54fbeabbb6fcdd90618a500b77cb2e24faa245cad",
} }
}, }
Platform::MacX64 | Platform::MacArm => { Platform::MacX64 | Platform::MacArm => {
OnlineArchive { OnlineArchive {
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-osx-universal_binary.zip", url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-osx-universal_binary.zip",
sha256: "99ea004549c139f46da5638187a85bbe422d78939be0fa01af1aa8ab672e395f", sha256: "e3324d3bc2e9bc967a0bec2472e0ec73b26f952c7c87f2403197414f780c3c6c",
} }
}, }
Platform::WindowsX64 | Platform::WindowsArm => { Platform::WindowsX64 => {
OnlineArchive { OnlineArchive {
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-win64.zip", url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-win64.zip",
sha256: "70381b116ab0d71cb6a5177d9b17c7c13415866603a0fd40d513dafe32d56c35", sha256: "3657053024faa439ff5f8c1dd2ee06bac0f9b9a3d660e99944f015a7451e87ec",
} }
} }
} }
@ -67,7 +67,7 @@ fn clang_format_archive(platform: Platform) -> OnlineArchive {
sha256: "238be68d9478163a945754f06a213483473044f5a004c4125d3d9d8d3556466e", sha256: "238be68d9478163a945754f06a213483473044f5a004c4125d3d9d8d3556466e",
} }
} }
Platform::WindowsX64 | Platform::WindowsArm=> { Platform::WindowsX64 => {
OnlineArchive { OnlineArchive {
url: "https://github.com/ankitects/clang-format-binaries/releases/download/anki-2021-01-09/clang-format_windows_x86_64.zip", url: "https://github.com/ankitects/clang-format-binaries/releases/download/anki-2021-01-09/clang-format_windows_x86_64.zip",
sha256: "7d9f6915e3f0fb72407830f0fc37141308d2e6915daba72987a52f309fbeaccc", sha256: "7d9f6915e3f0fb72407830f0fc37141308d2e6915daba72987a52f309fbeaccc",

View file

@ -9,7 +9,6 @@ use maplit::hashmap;
use crate::action::BuildAction; use crate::action::BuildAction;
use crate::archives::download_and_extract; use crate::archives::download_and_extract;
use crate::archives::with_exe;
use crate::archives::OnlineArchive; use crate::archives::OnlineArchive;
use crate::archives::Platform; use crate::archives::Platform;
use crate::hash::simple_hash; use crate::hash::simple_hash;
@ -17,113 +16,82 @@ use crate::input::BuildInput;
use crate::inputs; use crate::inputs;
use crate::Build; use crate::Build;
// To update, run 'cargo run --bin update_uv'. /// When updating this, pyoxidizer.bzl needs updating too, but it uses different
// You'll need to do this when bumping Python versions, as uv bakes in /// files.
// the latest known version. pub fn python_archive(platform: Platform) -> OnlineArchive {
// When updating Python version, make sure to update version tag in BuildWheel
// too.
pub fn uv_archive(platform: Platform) -> OnlineArchive {
match platform { match platform {
Platform::LinuxX64 => { Platform::LinuxX64 => {
OnlineArchive { OnlineArchive {
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-unknown-linux-gnu.tar.gz", url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64_v2-unknown-linux-gnu-install_only.tar.gz",
sha256: "909278eb197c5ed0e9b5f16317d1255270d1f9ea4196e7179ce934d48c4c2545", sha256: "9426bca501ae0a257392b10719e2e20ff5fa5e22a3ce4599d6ad0b3139f86417",
} }
}, }
Platform::LinuxArm => { Platform::LinuxArm => {
OnlineArchive { OnlineArchive {
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-unknown-linux-gnu.tar.gz", url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-aarch64-unknown-linux-gnu-install_only.tar.gz",
sha256: "0b2ad9fe4295881615295add8cc5daa02549d29cc9a61f0578e397efcf12f08f", sha256: "7d19e1ecd6e582423f7c74a0c67491eaa982ce9d5c5f35f0e4289f83127abcb8",
} }
}, }
Platform::MacX64 => { Platform::MacX64 => {
OnlineArchive { OnlineArchive {
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-apple-darwin.tar.gz", url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64-apple-darwin-install_only.tar.gz",
sha256: "d785753ac092e25316180626aa691c5dfe1fb075290457ba4fdb72c7c5661321", sha256: "5a0bf895a5cb08d6d008140abb41bb2c8cd638a665273f7d8eb258bc89de439b",
} }
}, }
Platform::MacArm => { Platform::MacArm => {
OnlineArchive { OnlineArchive {
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-apple-darwin.tar.gz", url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-aarch64-apple-darwin-install_only.tar.gz",
sha256: "721f532b73171586574298d4311a91d5ea2c802ef4db3ebafc434239330090c6", sha256: "bf0cd90204a2cc6da48cae1e4b32f48c9f7031fbe1238c5972104ccb0155d368",
} }
}, }
Platform::WindowsX64 => { Platform::WindowsX64 => {
OnlineArchive { OnlineArchive {
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-pc-windows-msvc.zip", url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64-pc-windows-msvc-shared-install_only.tar.gz",
sha256: "e199b10bef1a7cc540014483e7f60f825a174988f41020e9d2a6b01bd60f0669", sha256: "8f0544cd593984f7ecb90c685931249c579302124b9821064873f3a14ed07005",
}
},
Platform::WindowsArm => {
OnlineArchive {
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-pc-windows-msvc.zip",
sha256: "bb40708ad549ad6a12209cb139dd751bf0ede41deb679ce7513ce197bd9ef234",
} }
} }
} }
} }
pub fn setup_uv(build: &mut Build, platform: Platform) -> Result<()> { /// Returns the Python binary, which can be used to create venvs.
let uv_binary = match env::var("UV_BINARY") { /// Downloads if missing.
pub fn setup_python(build: &mut Build) -> Result<()> {
// if changing this, make sure you remove out/pyenv
let python_binary = match env::var("PYTHON_BINARY") {
Ok(path) => { Ok(path) => {
assert!( assert!(
Utf8Path::new(&path).is_absolute(), Utf8Path::new(&path).is_absolute(),
"UV_BINARY must be absolute" "PYTHON_BINARY must be absolute"
); );
path.into() path.into()
} }
Err(_) => { Err(_) => {
download_and_extract( download_and_extract(
build, build,
"uv", "python",
uv_archive(platform), python_archive(build.host_platform),
hashmap! { "bin" => [ hashmap! { "bin" => [
with_exe("uv") if cfg!(windows) { "python.exe" } else { "bin/python3"}
] }, ] },
)?; )?;
inputs![":extract:uv:bin"] inputs![":extract:python:bin"]
} }
}; };
build.add_dependency("uv_binary", uv_binary); build.add_dependency("python_binary", python_binary);
// Our macOS packaging needs access to the x86 binary on ARM.
if cfg!(target_arch = "aarch64") {
download_and_extract(
build,
"uv_mac_x86",
uv_archive(Platform::MacX64),
hashmap! { "bin" => [
with_exe("uv")
] },
)?;
}
// Our Linux packaging needs access to the ARM binary on x86
if cfg!(target_arch = "x86_64") {
download_and_extract(
build,
"uv_lin_arm",
uv_archive(Platform::LinuxArm),
hashmap! { "bin" => [
with_exe("uv")
] },
)?;
}
Ok(()) Ok(())
} }
pub struct PythonEnvironment { pub struct PythonEnvironment {
pub deps: BuildInput, pub folder: &'static str,
// todo: rename pub base_requirements_txt: BuildInput,
pub venv_folder: &'static str, pub requirements_txt: BuildInput,
pub extra_args: &'static str,
pub extra_binary_exports: &'static [&'static str], pub extra_binary_exports: &'static [&'static str],
} }
impl BuildAction for PythonEnvironment { impl BuildAction for PythonEnvironment {
fn command(&self) -> &str { fn command(&self) -> &str {
if env::var("OFFLINE_BUILD").is_err() { if env::var("OFFLINE_BUILD").is_err() {
"$runner pyenv $uv_binary $builddir/$pyenv_folder -- $extra_args" "$runner pyenv $python_binary $builddir/$pyenv_folder $system_pkgs $base_requirements $requirements"
} else { } else {
"echo 'OFFLINE_BUILD is set. Using the existing PythonEnvironment.'" "echo 'OFFLINE_BUILD is set. Using the existing PythonEnvironment.'"
} }
@ -131,7 +99,7 @@ impl BuildAction for PythonEnvironment {
fn files(&mut self, build: &mut impl crate::build::FilesHandle) { fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
let bin_path = |binary: &str| -> Vec<String> { let bin_path = |binary: &str| -> Vec<String> {
let folder = self.venv_folder; let folder = self.folder;
let path = if cfg!(windows) { let path = if cfg!(windows) {
format!("{folder}/scripts/{binary}.exe") format!("{folder}/scripts/{binary}.exe")
} else { } else {
@ -140,24 +108,17 @@ impl BuildAction for PythonEnvironment {
vec![path] vec![path]
}; };
build.add_inputs("", &self.deps);
build.add_variable("pyenv_folder", self.venv_folder);
if env::var("OFFLINE_BUILD").is_err() { if env::var("OFFLINE_BUILD").is_err() {
build.add_inputs("uv_binary", inputs![":uv_binary"]); build.add_inputs("python_binary", inputs![":python_binary"]);
build.add_variable("pyenv_folder", self.folder);
// Add --python flag to extra_args if PYTHON_BINARY is set build.add_inputs("base_requirements", &self.base_requirements_txt);
let mut args = self.extra_args.to_string(); build.add_inputs("requirements", &self.requirements_txt);
if let Ok(python_binary) = env::var("PYTHON_BINARY") { build.add_outputs_ext("pip", bin_path("pip"), true);
args = format!("--python {python_binary} {args}");
}
build.add_variable("extra_args", args);
} }
build.add_outputs_ext("bin", bin_path("python"), true); build.add_outputs_ext("bin", bin_path("python"), true);
for binary in self.extra_binary_exports { for binary in self.extra_binary_exports {
build.add_outputs_ext(*binary, bin_path(binary), true); build.add_outputs_ext(*binary, bin_path(binary), true);
} }
build.add_output_stamp(format!("{}/.stamp", self.venv_folder));
} }
fn check_output_timestamps(&self) -> bool { fn check_output_timestamps(&self) -> bool {
@ -185,7 +146,7 @@ impl BuildAction for PythonTypecheck {
build.add_output_stamp(format!("tests/python_typecheck.{hash}")); build.add_output_stamp(format!("tests/python_typecheck.{hash}"));
} }
fn hide_progress(&self) -> bool { fn hide_last_line(&self) -> bool {
true true
} }
} }
@ -193,19 +154,31 @@ impl BuildAction for PythonTypecheck {
struct PythonFormat<'a> { struct PythonFormat<'a> {
pub inputs: &'a BuildInput, pub inputs: &'a BuildInput,
pub check_only: bool, pub check_only: bool,
pub isort_ini: &'a BuildInput,
} }
impl BuildAction for PythonFormat<'_> { impl BuildAction for PythonFormat<'_> {
fn command(&self) -> &str { fn command(&self) -> &str {
"$ruff format $mode $in && $ruff check --select I --fix $in" "$black -t py39 -q $check --color $in && $
$isort --color --settings-path $isort_ini $check $in"
} }
fn files(&mut self, build: &mut impl crate::build::FilesHandle) { fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
build.add_inputs("in", self.inputs); build.add_inputs("in", self.inputs);
build.add_inputs("ruff", inputs![":pyenv:ruff"]); build.add_inputs("black", inputs![":pyenv:black"]);
build.add_inputs("isort", inputs![":pyenv:isort"]);
let hash = simple_hash(self.inputs); let hash = simple_hash(self.inputs);
build.add_variable("mode", if self.check_only { "--check" } else { "" }); build.add_env_var("BLACK_CACHE_DIR", "out/python/black.cache.{hash}");
build.add_inputs("isort_ini", self.isort_ini);
build.add_variable(
"check",
if self.check_only {
"--diff --check"
} else {
""
},
);
build.add_output_stamp(format!( build.add_output_stamp(format!(
"tests/python_format.{}.{hash}", "tests/python_format.{}.{hash}",
@ -215,52 +188,49 @@ impl BuildAction for PythonFormat<'_> {
} }
pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Result<()> { pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Result<()> {
let isort_ini = &inputs![".isort.cfg"];
build.add_action( build.add_action(
format!("check:format:python:{group}"), &format!("check:format:python:{group}"),
PythonFormat { PythonFormat {
inputs: &inputs, inputs: &inputs,
check_only: true, check_only: true,
isort_ini,
}, },
)?; )?;
build.add_action( build.add_action(
format!("format:python:{group}"), &format!("format:python:{group}"),
PythonFormat { PythonFormat {
inputs: &inputs, inputs: &inputs,
check_only: false, check_only: false,
isort_ini,
}, },
)?; )?;
Ok(()) Ok(())
} }
pub struct RuffCheck { pub struct PythonLint {
pub folders: &'static [&'static str], pub folders: &'static [&'static str],
pub pylint_ini: BuildInput,
pub deps: BuildInput, pub deps: BuildInput,
pub check_only: bool,
} }
impl BuildAction for RuffCheck { impl BuildAction for PythonLint {
fn command(&self) -> &str { fn command(&self) -> &str {
"$ruff check $folders $mode" "$pylint --rcfile $pylint_ini -sn -j $cpus $folders"
} }
fn files(&mut self, build: &mut impl crate::build::FilesHandle) { fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
build.add_inputs("", &self.deps); build.add_inputs("", &self.deps);
build.add_inputs("", inputs![".ruff.toml"]); build.add_inputs("pylint", inputs![":pyenv:pylint"]);
build.add_inputs("ruff", inputs![":pyenv:ruff"]); build.add_inputs("pylint_ini", &self.pylint_ini);
build.add_variable("folders", self.folders.join(" ")); build.add_variable("folders", self.folders.join(" "));
build.add_variable( // On a 16 core system, values above 10 do not improve wall clock time,
"mode", // but waste extra cores that could be working on other tests.
if self.check_only { build.add_variable("cpus", num_cpus::get().min(10).to_string());
""
} else {
"--fix --unsafe-fixes"
},
);
let hash = simple_hash(&self.deps); let hash = simple_hash(&self.deps);
let kind = if self.check_only { "check" } else { "fix" }; build.add_output_stamp(format!("tests/python_lint.{hash}"));
build.add_output_stamp(format!("tests/python_ruff.{kind}.{hash}"));
} }
} }
@ -281,7 +251,7 @@ impl BuildAction for PythonTest {
build.add_variable("folder", self.folder); build.add_variable("folder", self.folder);
build.add_variable( build.add_variable(
"pythonpath", "pythonpath",
self.python_path.join(if cfg!(windows) { ";" } else { ":" }), &self.python_path.join(if cfg!(windows) { ";" } else { ":" }),
); );
build.add_env_var("PYTHONPATH", "$pythonpath"); build.add_env_var("PYTHONPATH", "$pythonpath");
build.add_env_var("ANKI_TEST_MODE", "1"); build.add_env_var("ANKI_TEST_MODE", "1");
@ -289,7 +259,7 @@ impl BuildAction for PythonTest {
build.add_output_stamp(format!("tests/python_pytest.{hash}")); build.add_output_stamp(format!("tests/python_pytest.{hash}"));
} }
fn hide_progress(&self) -> bool { fn hide_last_line(&self) -> bool {
true true
} }
} }

View file

@ -30,12 +30,12 @@ impl Build {
) )
.unwrap(); .unwrap();
for (key, value) in &self.variables { for (key, value) in &self.variables {
writeln!(&mut buf, "{key} = {value}").unwrap(); writeln!(&mut buf, "{} = {}", key, value).unwrap();
} }
buf.push('\n'); buf.push('\n');
for (key, value) in &self.pools { for (key, value) in &self.pools {
writeln!(&mut buf, "pool {key}\n depth = {value}").unwrap(); writeln!(&mut buf, "pool {}\n depth = {}", key, value).unwrap();
} }
buf.push('\n'); buf.push('\n');

View file

@ -15,6 +15,7 @@ camino.workspace = true
clap.workspace = true clap.workspace = true
flate2.workspace = true flate2.workspace = true
junction.workspace = true junction.workspace = true
reqwest = { workspace = true, features = ["rustls-tls", "rustls-tls-native-roots"] }
sha2.workspace = true sha2.workspace = true
tar.workspace = true tar.workspace = true
termcolor.workspace = true termcolor.workspace = true
@ -23,9 +24,3 @@ which.workspace = true
xz2.workspace = true xz2.workspace = true
zip.workspace = true zip.workspace = true
zstd.workspace = true zstd.workspace = true
[target.'cfg(windows)'.dependencies]
reqwest = { workspace = true, features = ["native-tls"] }
[target.'cfg(not(windows))'.dependencies]
reqwest = { workspace = true, features = ["rustls-tls", "rustls-tls-native-roots"] }

View file

@ -65,7 +65,7 @@ fn sha2_data(data: &[u8]) -> String {
let mut digest = sha2::Sha256::new(); let mut digest = sha2::Sha256::new();
digest.update(data); digest.update(data);
let result = digest.finalize(); let result = digest.finalize();
format!("{result:x}") format!("{:x}", result)
} }
enum CompressionKind { enum CompressionKind {

View file

@ -7,8 +7,6 @@ use std::io::Write;
use std::process::Command; use std::process::Command;
use std::time::Instant; use std::time::Instant;
use anki_process::CommandExt;
use anyhow::Context;
use camino::Utf8Path; use camino::Utf8Path;
use camino::Utf8PathBuf; use camino::Utf8PathBuf;
use clap::Args; use clap::Args;
@ -67,10 +65,7 @@ pub fn run_build(args: BuildArgs) {
"MYPY_CACHE_DIR", "MYPY_CACHE_DIR",
build_root.join("tests").join("mypy").into_string(), build_root.join("tests").join("mypy").into_string(),
) )
.env( .env("PYTHONPYCACHEPREFIX", build_root.join("pycache"))
"PYTHONPYCACHEPREFIX",
std::path::absolute(build_root.join("pycache")).unwrap(),
)
// commands will not show colors by default, as we do not provide a tty // commands will not show colors by default, as we do not provide a tty
.env("FORCE_COLOR", "1") .env("FORCE_COLOR", "1")
.env("MYPY_FORCE_COLOR", "1") .env("MYPY_FORCE_COLOR", "1")
@ -80,9 +75,7 @@ pub fn run_build(args: BuildArgs) {
} }
// run build // run build
let Ok(mut status) = command.status() else { let mut status = command.status().expect("ninja not installed");
panic!("\nn2 and ninja missing/failed. did you forget 'bash tools/install-n2'?");
};
if !status.success() && Instant::now().duration_since(start_time).as_secs() < 3 { if !status.success() && Instant::now().duration_since(start_time).as_secs() < 3 {
// if the build fails quickly, there's a reasonable chance that build.ninja // if the build fails quickly, there's a reasonable chance that build.ninja
// references a file that has been renamed/deleted. We currently don't // references a file that has been renamed/deleted. We currently don't
@ -138,7 +131,7 @@ fn setup_build_root() -> Utf8PathBuf {
true true
}; };
if create { if create {
println!("Switching build root to {new_target}"); println!("Switching build root to {}", new_target);
std::os::unix::fs::symlink(new_target, build_root).unwrap(); std::os::unix::fs::symlink(new_target, build_root).unwrap();
} }
} }
@ -169,12 +162,11 @@ fn maybe_update_buildhash(build_root: &Utf8Path) {
fn get_buildhash() -> String { fn get_buildhash() -> String {
let output = Command::new("git") let output = Command::new("git")
.args(["rev-parse", "--short=8", "HEAD"]) .args(["rev-parse", "--short=8", "HEAD"])
.utf8_output() .output()
.context( .expect("git");
"Make sure you're building from a clone of the git repo, and that 'git' is installed.", assert!(output.status.success(),
) "Invoking 'git' failed. Make sure you're building from a clone of the git repo, and that 'git' is installed.");
.unwrap(); String::from_utf8(output.stdout).unwrap().trim().into()
output.stdout.trim().into()
} }
fn write_if_changed(path: &Utf8Path, contents: &str) { fn write_if_changed(path: &Utf8Path, contents: &str) {

View file

@ -0,0 +1,62 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::env;
use std::fs;
use std::process::Command;
use camino::Utf8PathBuf;
use clap::Args;
use crate::run::run_command;
#[derive(Args, Debug)]
pub struct BuildArtifactsArgs {
bundle_root: Utf8PathBuf,
pyoxidizer_bin: String,
}
pub fn build_artifacts(args: BuildArtifactsArgs) {
// build.rs doesn't declare inputs from venv, so we need to force a rebuild to
// ensure changes to our libs/the venv get included
let artifacts = args.bundle_root.join("artifacts");
if artifacts.exists() {
fs::remove_dir_all(&artifacts).unwrap();
}
let bundle_root = args.bundle_root.canonicalize_utf8().unwrap();
let build_folder = bundle_root.join("build");
if build_folder.exists() {
fs::remove_dir_all(&build_folder).unwrap();
}
run_command(
Command::new(&args.pyoxidizer_bin)
.args([
"--system-rust",
"run-build-script",
"qt/bundle/build.rs",
"--var",
"venv",
"out/bundle/pyenv",
"--var",
"build",
build_folder.as_str(),
])
.env("CARGO_MANIFEST_DIR", "qt/bundle")
.env("CARGO_TARGET_DIR", "out/bundle/rust")
.env("PROFILE", "release")
.env("OUT_DIR", &artifacts)
.env("TARGET", env!("TARGET"))
.env("SDKROOT", "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk")
.env("MACOSX_DEPLOYMENT_TARGET", macos_deployment_target())
.env("CARGO_BUILD_TARGET", env!("TARGET")),
);
}
pub fn macos_deployment_target() -> &'static str {
if env!("TARGET") == "x86_64-apple-darwin" {
"10.13.4"
} else {
"11"
}
}

View file

@ -0,0 +1,53 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::process::Command;
use anki_process::CommandExt;
use camino::Utf8Path;
use camino::Utf8PathBuf;
use super::artifacts::macos_deployment_target;
use crate::run::run_command;
pub fn build_bundle_binary() {
let mut features = String::from("build-mode-prebuilt-artifacts");
if cfg!(target_os = "linux") || cfg!(target_os = "macos") {
features.push_str(",global-allocator-jemalloc,allocator-jemalloc");
}
let mut command = Command::new("cargo");
command
.args([
"build",
"--manifest-path=qt/bundle/Cargo.toml",
"--target-dir=out/bundle/rust",
"--release",
"--no-default-features",
])
.arg(format!("--features={features}"))
.env(
"DEFAULT_PYTHON_CONFIG_RS",
// included in main.rs, so relative to qt/bundle/src
"../../../out/bundle/artifacts/",
)
.env(
"PYO3_CONFIG_FILE",
Utf8Path::new("out/bundle/artifacts/pyo3-build-config-file.txt")
.canonicalize_utf8()
.unwrap(),
)
.env("MACOSX_DEPLOYMENT_TARGET", macos_deployment_target())
.env("SDKROOT", "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk")
.env("CARGO_BUILD_TARGET", env!("TARGET"));
if env!("TARGET") == "x86_64-apple-darwin" {
let xcode_path = Command::run_with_output(["xcode-select", "-p"]).unwrap();
let ld_classic = Utf8PathBuf::from(xcode_path.stdout.trim())
.join("Toolchains/XcodeDefault.xctoolchain/usr/bin/ld-classic");
if ld_classic.exists() {
// work around XCode 15's default linker not supporting macOS 10.15-12.
command.env("RUSTFLAGS", &format!("-Clink-arg=-fuse-ld={ld_classic}"));
}
}
run_command(&mut command);
}

View file

@ -0,0 +1,156 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::env;
use std::fs;
use std::process::Command;
use camino::Utf8Path;
use camino::Utf8PathBuf;
use clap::Args;
use clap::ValueEnum;
use crate::paths::absolute_msys_path;
use crate::paths::unix_path;
use crate::run::run_command;
#[derive(Clone, Copy, ValueEnum, Debug)]
enum DistKind {
Standard,
Alternate,
}
#[derive(Args, Debug)]
pub struct BuildDistFolderArgs {
kind: DistKind,
folder_root: Utf8PathBuf,
}
pub fn build_dist_folder(args: BuildDistFolderArgs) {
let BuildDistFolderArgs { kind, folder_root } = args;
fs::create_dir_all(&folder_root).unwrap();
// Start with Qt, as it's the largest, and we use --delete to ensure there are
// no stale files in lib/. Skipped on macOS as Qt is handled later.
if !cfg!(target_os = "macos") {
copy_qt_from_venv(kind, &folder_root);
}
clean_top_level_files(&folder_root);
copy_binary_and_pylibs(&folder_root);
if cfg!(target_os = "linux") {
copy_linux_extras(kind, &folder_root);
} else if cfg!(windows) {
copy_windows_extras(&folder_root);
}
fs::write(folder_root.with_extension("stamp"), b"").unwrap();
}
fn copy_qt_from_venv(kind: DistKind, folder_root: &Utf8Path) {
let python39 = if cfg!(windows) { "" } else { "python3.9/" };
let qt_root = match kind {
DistKind::Standard => {
folder_root.join(format!("../pyenv/lib/{python39}site-packages/PyQt6"))
}
DistKind::Alternate => {
folder_root.join(format!("../pyenv-qt5/lib/{python39}site-packages/PyQt5"))
}
};
let src_path = absolute_msys_path(&qt_root);
let lib_path = folder_root.join("lib");
fs::create_dir_all(&lib_path).unwrap();
let dst_path = with_slash(absolute_msys_path(&lib_path));
run_command(Command::new("rsync").args([
"-a",
"--delete",
"--exclude-from",
"qt/bundle/qt.exclude",
&src_path,
&dst_path,
]));
}
fn copy_linux_extras(kind: DistKind, folder_root: &Utf8Path) {
// add README, installer, etc
run_command(Command::new("rsync").args(["-a", "qt/bundle/lin/", &with_slash(folder_root)]));
// add extra IME plugins from download
let lib_path = folder_root.join("lib");
let src_path = folder_root
.join("../../extracted/linux_qt_plugins")
.join(match kind {
DistKind::Standard => "qt6",
DistKind::Alternate => "qt5",
});
let dst_path = lib_path.join(match kind {
DistKind::Standard => "PyQt6/Qt6/plugins",
DistKind::Alternate => "PyQt5/Qt5/plugins",
});
run_command(Command::new("rsync").args(["-a", &with_slash(src_path), &with_slash(dst_path)]));
}
fn copy_windows_extras(folder_root: &Utf8Path) {
run_command(Command::new("rsync").args([
"-a",
"out/extracted/win_amd64_audio/",
&with_slash(folder_root),
]));
}
fn clean_top_level_files(folder_root: &Utf8Path) {
let mut to_remove = vec![];
for entry in fs::read_dir(folder_root).unwrap() {
let entry = entry.unwrap();
if entry.file_name() == "lib" {
continue;
} else {
to_remove.push(entry.path());
}
}
for path in to_remove {
if path.is_dir() {
fs::remove_dir_all(path).unwrap()
} else {
fs::remove_file(path).unwrap()
}
}
}
fn with_slash<P>(path: P) -> String
where
P: AsRef<str>,
{
format!("{}/", path.as_ref())
}
fn copy_binary_and_pylibs(folder_root: &Utf8Path) {
let binary = folder_root
.join("../rust")
.join(env!("TARGET"))
.join("release")
.join(if cfg!(windows) { "anki.exe" } else { "anki" });
let extra_files = folder_root
.join("../build")
.join(env!("TARGET"))
.join("release/resources/extra_files");
run_command(Command::new("rsync").args([
"-a",
"--exclude",
"PyQt6",
// misleading, as it misses the GPL PyQt, and our Rust/JS
// dependencies
"--exclude",
"COPYING.txt",
&unix_path(&binary),
&with_slash(unix_path(&extra_files)),
&with_slash(unix_path(folder_root)),
]));
let google_py = if cfg!(windows) {
folder_root.join("../pyenv/lib/site-packages/google")
} else {
folder_root.join("../pyenv/lib/python3.9/site-packages/google")
};
run_command(Command::new("rsync").args([
"-a",
&unix_path(&google_py),
&with_slash(unix_path(&folder_root.join("lib"))),
]));
}

View file

@ -1,5 +1,6 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub mod http_client; pub mod artifacts;
pub mod login; pub mod binary;
pub mod folder;

View file

@ -7,6 +7,7 @@
mod archive; mod archive;
mod build; mod build;
mod bundle;
mod paths; mod paths;
mod pyenv; mod pyenv;
mod rsync; mod rsync;
@ -18,6 +19,11 @@ use archive::archive_command;
use archive::ArchiveArgs; use archive::ArchiveArgs;
use build::run_build; use build::run_build;
use build::BuildArgs; use build::BuildArgs;
use bundle::artifacts::build_artifacts;
use bundle::artifacts::BuildArtifactsArgs;
use bundle::binary::build_bundle_binary;
use bundle::folder::build_dist_folder;
use bundle::folder::BuildDistFolderArgs;
use clap::Parser; use clap::Parser;
use clap::Subcommand; use clap::Subcommand;
use pyenv::setup_pyenv; use pyenv::setup_pyenv;
@ -42,6 +48,9 @@ enum Command {
Rsync(RsyncArgs), Rsync(RsyncArgs),
Run(RunArgs), Run(RunArgs),
Build(BuildArgs), Build(BuildArgs),
BuildArtifacts(BuildArtifactsArgs),
BuildBundleBinary,
BuildDistFolder(BuildDistFolderArgs),
#[clap(subcommand)] #[clap(subcommand)]
Archive(ArchiveArgs), Archive(ArchiveArgs),
} }
@ -53,6 +62,9 @@ fn main() -> Result<()> {
Command::Rsync(args) => rsync_files(args), Command::Rsync(args) => rsync_files(args),
Command::Yarn(args) => setup_yarn(args), Command::Yarn(args) => setup_yarn(args),
Command::Build(args) => run_build(args), Command::Build(args) => run_build(args),
Command::BuildArtifacts(args) => build_artifacts(args),
Command::BuildBundleBinary => build_bundle_binary(),
Command::BuildDistFolder(args) => build_dist_folder(args),
Command::Archive(args) => archive_command(args)?, Command::Archive(args) => archive_command(args)?,
}; };
Ok(()) Ok(())

View file

@ -16,3 +16,8 @@ pub fn absolute_msys_path(path: &Utf8Path) -> String {
// and \ -> / // and \ -> /
format!("/{drive}/{}", path[7..].replace('\\', "/")) format!("/{drive}/{}", path[7..].replace('\\', "/"))
} }
/// Converts backslashes to forward slashes
pub fn unix_path(path: &Utf8Path) -> String {
path.as_str().replace('\\', "/")
}

View file

@ -1,7 +1,6 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::fs;
use std::process::Command; use std::process::Command;
use camino::Utf8Path; use camino::Utf8Path;
@ -11,10 +10,12 @@ use crate::run::run_command;
#[derive(Args)] #[derive(Args)]
pub struct PyenvArgs { pub struct PyenvArgs {
uv_bin: String, python_bin: String,
pyenv_folder: String, pyenv_folder: String,
#[arg(trailing_var_arg = true)] initial_reqs: String,
extra_args: Vec<String>, reqs: Vec<String>,
#[arg(long, allow_hyphen_values(true))]
venv_args: Vec<String>,
} }
/// Set up a venv if one doesn't already exist, and then sync packages with /// Set up a venv if one doesn't already exist, and then sync packages with
@ -22,32 +23,35 @@ pub struct PyenvArgs {
pub fn setup_pyenv(args: PyenvArgs) { pub fn setup_pyenv(args: PyenvArgs) {
let pyenv_folder = Utf8Path::new(&args.pyenv_folder); let pyenv_folder = Utf8Path::new(&args.pyenv_folder);
// On first run, ninja creates an empty bin/ folder which breaks the initial let pyenv_bin_folder = pyenv_folder.join(if cfg!(windows) { "scripts" } else { "bin" });
// install. But we don't want to indiscriminately remove the folder, or let pyenv_python = pyenv_bin_folder.join("python");
// macOS Gatekeeper needs to rescan the files each time. let pip_sync = pyenv_bin_folder.join("pip-sync");
if pyenv_folder.exists() {
let cache_tag = pyenv_folder.join("CACHEDIR.TAG"); if !pyenv_python.exists() {
if !cache_tag.exists() { run_command(
fs::remove_dir_all(pyenv_folder).expect("Failed to remove existing pyenv folder"); Command::new(&args.python_bin)
.args(["-m", "venv"])
.args(args.venv_args)
.arg(pyenv_folder),
);
if cfg!(windows) {
// the first install on Windows throws an error the first time pip is upgraded,
// so we install it twice and swallow the first error
let _output = Command::new(&pyenv_python)
.args(["-m", "pip", "install", "-r", &args.initial_reqs])
.output()
.unwrap();
} }
run_command(Command::new(pyenv_python).args([
"-m",
"pip",
"install",
"-r",
&args.initial_reqs,
]));
} }
let mut command = Command::new(args.uv_bin); run_command(Command::new(pip_sync).args(&args.reqs));
// remove UV_* environment variables to avoid interference
for (key, _) in std::env::vars() {
if key.starts_with("UV_") || key == "VIRTUAL_ENV" {
command.env_remove(key);
}
}
run_command(
command
.env("UV_PROJECT_ENVIRONMENT", args.pyenv_folder.clone())
.args(["sync", "--locked", "--no-config"])
.args(args.extra_args),
);
// Write empty stamp file
fs::write(pyenv_folder.join(".stamp"), "").expect("Failed to write stamp file");
} }

View file

@ -1,6 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::io::ErrorKind;
use std::process::Command; use std::process::Command;
use anki_io::create_dir_all; use anki_io::create_dir_all;
@ -43,7 +44,7 @@ fn split_env(s: &str) -> Result<(String, String), std::io::Error> {
if let Some((k, v)) = s.split_once('=') { if let Some((k, v)) = s.split_once('=') {
Ok((k.into(), v.into())) Ok((k.into(), v.into()))
} else { } else {
Err(std::io::Error::other("invalid env var")) Err(std::io::Error::new(ErrorKind::Other, "invalid env var"))
} }
} }
@ -83,7 +84,7 @@ fn split_args(args: Vec<String>) -> Vec<Vec<String>> {
pub fn run_command(command: &mut Command) { pub fn run_command(command: &mut Command) {
if let Err(err) = command.ensure_success() { if let Err(err) = command.ensure_success() {
println!("{err}"); println!("{}", err);
std::process::exit(1); std::process::exit(1);
} }
} }

View file

@ -28,11 +28,7 @@ pub fn setup_yarn(args: YarnArgs) {
.arg("--ignore-scripts"), .arg("--ignore-scripts"),
); );
} else { } else {
run_command( run_command(Command::new(&args.yarn_bin).arg("install"));
Command::new(&args.yarn_bin)
.arg("install")
.arg("--immutable"),
);
} }
std::fs::write(args.stamp, b"").unwrap(); std::fs::write(args.stamp, b"").unwrap();

View file

@ -1,4 +1,4 @@
[toolchain] [toolchain]
channel = "nightly-2025-03-20" channel = "nightly-2023-09-02"
profile = "minimal" profile = "minimal"
components = ["rustfmt"] components = ["rustfmt"]

File diff suppressed because it is too large Load diff

View file

@ -3,29 +3,32 @@
For info on contributing things other than code, such as translations, decks For info on contributing things other than code, such as translations, decks
and add-ons, please see https://docs.ankiweb.net/contrib and add-ons, please see https://docs.ankiweb.net/contrib
With most users now on 2.1, the past 2 years have been focused on paying down some
of the technical debt that Anki's codebase has built up over the years, and making
changes that will make future maintenance and refactoring easier. A lot of Anki's
"business logic" has been migrated to Rust, which AnkiMobile and AnkiDroid
can also take advantage of - previously a lot of effort was wasted writing the same
code for each platform, and then debugging differences in the implementations.
Considerable effort has also been put into improving the Python side of things,
with type hints added to the majority of the codebase, automatic linting/formatting,
and refactoring of parts of the code.
The import/export code remains to be done, and this will likely
take a number of months to work through. Until that is complete, new features
will not be the top priority, unless they are easy wins as part of the refactoring
process.
If you are planning to contribute any non-trivial changes, please reach out
on the support site before you begin work. Some areas (primarily pylib/) are
likely to change/conflict with other work, and larger changes will likely need
to wait until the refactoring process is done.
## Help wanted ## Help wanted
If you'd like to contribute but don't know what to work on, please take a look If you'd like to contribute but don't know what to work on, please take a look
at the [issues tab](https://github.com/ankitects/anki/issues) of the Anki repo at the [issues tab](https://github.com/ankitects/anki/issues) of the Anki repo
on GitHub. on GitHub.
## Larger changes
Before starting work on larger changes, especially ones that aren't listed on the
issue tracker, please reach out on the forums before you begin work, so we can let
you know whether they're likely to be accepted or not. When you spent a bunch of time
on a PR that ends up getting rejected, it's no fun for either you or us.
## Refactoring
Please avoid PRs that focus on refactoring. Every PR has a cost to review, and a chance
of introducing accidental regressions, and often these costs are not worth it for
slightly more elegant code.
That's not to say there's no value in refactoring. But such changes are usually better done
in a PR that happens to be working in the same area - for example, making small changes
to the code as part of fixing a bug, or a larger refactor when introducing a new feature.
## Type hints ## Type hints
Most of Anki's Python code now has type hints, which improve code completion, Most of Anki's Python code now has type hints, which improve code completion,

View file

@ -85,7 +85,7 @@ When formatting issues are reported, they can be fixed with
./ninja format ./ninja format
``` ```
## Fixing ruff/eslint/copyright header issues ## Fixing eslint/copyright header issues
``` ```
./ninja fix ./ninja fix
@ -133,7 +133,7 @@ To build wheels on Mac/Linux:
The generated wheels are in out/wheels. You can then install them by copying the paths into a pip install command. The generated wheels are in out/wheels. You can then install them by copying the paths into a pip install command.
Follow the steps [on the beta site](https://betas.ankiweb.net/#via-pypipip), but replace the Follow the steps [on the beta site](https://betas.ankiweb.net/#via-pypipip), but replace the
`pip install --upgrade --pre aqt` line with something like: `pip install --upgrade --pre aqt[qt6]` line with something like:
``` ```
/my/pyenv/bin/pip install --upgrade out/wheels/*.whl /my/pyenv/bin/pip install --upgrade out/wheels/*.whl
@ -141,6 +141,18 @@ Follow the steps [on the beta site](https://betas.ankiweb.net/#via-pypipip), but
(On Windows you'll need to list out the filenames manually instead of using a wildcard). (On Windows you'll need to list out the filenames manually instead of using a wildcard).
You'll also need to install PyQt:
```
$ /my/pyenv/bin/pip install pyqt6 pyqt6-webengine
```
or
```
$ my/pyenv/bin/pip install pyqt5 pyqtwebengine
```
## Cleaning up build files ## Cleaning up build files
Apart from submodule checkouts, most build files go into the `out/` folder (and Apart from submodule checkouts, most build files go into the `out/` folder (and
@ -190,10 +202,13 @@ in the collection2.log file will also be printed on stdout.
If ANKI_PROFILE_CODE is set, Python profiling data will be written on exit. If ANKI_PROFILE_CODE is set, Python profiling data will be written on exit.
# Installer/launcher # Binary Bundles
- The anki-release package is created/published with the scripts in qt/release. Anki's official binary packages are created with `./ninja bundle`. The bundling
- The installer/launcher is created with the build scripts in qt/launcher/{platform}. process was created specifically for the official builds, and is provided as-is;
we are unfortunately not able to provide assistance with any issues you may run
into when using it. You'll need to run
`git submodule update --checkout qt/bundle/PyOxidizer` first.
## Mixing development and study ## Mixing development and study

View file

@ -1,78 +1,35 @@
# This is a user-contributed Dockerfile. No official support is available. # This Dockerfile uses three stages.
# 1. Compile anki (and dependencies) and build python wheels.
# 2. Create a virtual environment containing anki and its dependencies.
# 3. Create a final image that only includes anki's virtual environment and required
# system packages.
ARG PYTHON_VERSION="3.9"
ARG DEBIAN_FRONTEND="noninteractive" ARG DEBIAN_FRONTEND="noninteractive"
FROM ubuntu:24.04 AS build # Build anki.
FROM python:$PYTHON_VERSION AS build
RUN curl -fsSL https://github.com/bazelbuild/bazelisk/releases/download/v1.7.4/bazelisk-linux-amd64 \
> /usr/local/bin/bazel \
&& chmod +x /usr/local/bin/bazel \
# Bazel expects /usr/bin/python
&& ln -s /usr/local/bin/python /usr/bin/python
WORKDIR /opt/anki WORKDIR /opt/anki
ENV PYTHON_VERSION="3.13" COPY . .
# Build python wheels.
# System deps
RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
git \
build-essential \
pkg-config \
libssl-dev \
libbz2-dev \
libreadline-dev \
libsqlite3-dev \
libffi-dev \
zlib1g-dev \
liblzma-dev \
ca-certificates \
ninja-build \
rsync \
libglib2.0-0 \
libgl1 \
libx11-6 \
libxext6 \
libxrender1 \
libxkbcommon0 \
libxkbcommon-x11-0 \
libxcb1 \
libxcb-render0 \
libxcb-shm0 \
libxcb-icccm4 \
libxcb-image0 \
libxcb-keysyms1 \
libxcb-randr0 \
libxcb-shape0 \
libxcb-xfixes0 \
libxcb-xinerama0 \
libxcb-xinput0 \
libsm6 \
libice6 \
&& rm -rf /var/lib/apt/lists/*
# install rust with rustup
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
# Install uv and Python 3.13 with uv
RUN curl -LsSf https://astral.sh/uv/install.sh | sh \
&& ln -s /root/.local/bin/uv /usr/local/bin/uv
ENV PATH="/root/.local/bin:${PATH}"
RUN uv python install ${PYTHON_VERSION} --default
COPY . .
RUN ./tools/build RUN ./tools/build
# Install pre-compiled Anki. # Install pre-compiled Anki.
FROM python:3.13-slim AS installer FROM python:${PYTHON_VERSION}-slim as installer
WORKDIR /opt/anki/ WORKDIR /opt/anki/
COPY --from=build /opt/anki/out/wheels/ wheels/ COPY --from=build /opt/anki/wheels/ wheels/
# Use virtual environment. # Use virtual environment.
RUN python -m venv venv \ RUN python -m venv venv \
&& ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \ && ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \
&& ./venv/bin/python -m pip install --no-cache-dir /opt/anki/wheels/*.whl && ./venv/bin/python -m pip install --no-cache-dir /opt/anki/wheels/*.whl
# We use another build stage here so we don't include the wheels in the final image. # We use another build stage here so we don't include the wheels in the final image.
FROM python:3.13-slim AS final FROM python:${PYTHON_VERSION}-slim as final
COPY --from=installer /opt/anki/venv /opt/anki/venv COPY --from=installer /opt/anki/venv /opt/anki/venv
ENV PATH=/opt/anki/venv/bin:$PATH ENV PATH=/opt/anki/venv/bin:$PATH
# Install run-time dependencies. # Install run-time dependencies.
@ -102,9 +59,9 @@ RUN apt-get update \
libxrender1 \ libxrender1 \
libxtst6 \ libxtst6 \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Add non-root user. # Add non-root user.
RUN useradd --create-home anki RUN useradd --create-home anki
USER anki USER anki
WORKDIR /work WORKDIR /work
ENTRYPOINT ["/opt/anki/venv/bin/anki"] ENTRYPOINT ["/opt/anki/venv/bin/anki"]
LABEL maintainer="Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com>"

View file

@ -1,84 +0,0 @@
Anki's codebase uses three layers.
1. The web frontend, created in Svelte and typescript,
2. The Python layer and
3. The core Rust layer.
Each layer can can makes RPC (Remote Procedure Call) to the layers below it. While it should be avoided, Python can also invoke Typescript functions. The Rust layers never make calls to the other layers. Note that it can make RPC to AnkiWeb and other servers, which is out of scope of this document.
In this document we'll provide examples of bridge between languages, explaining:
- where the RPC is declared,
- where it is called (with the appropriate imports) and
- where it is implemented.
Imitating those examples should allow you to make call and create new RPCs.
## Declaring RPCs
Let's consider the method `NewDeck` of `DecksServices`. It's declared in [decks.proto](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/proto/anki/decks.proto#L14) as `rpc NewDeck(generic.Empty) returns (Deck);`. This means this methods takes no argument (technically, an argument containing no information), and returns a [`Deck`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/proto/anki/decks.proto#L54).
Read [protobuf](./protobuf.md) to learn more about how those input and output types are defined.
If the RPC implementation is in Python, it should be declared in the service [frontend.proto](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/proto/anki/frontend.proto#L24C3-L24C66)'s `FrontendService`. RPCs declared in any other services are implemented in Rust.
## Making a Remote Procedure Call
In this section we'll consider how to make Remote Procedure Call (RPC) from languages used in Anki. Languages used for AnkiDroid and AnkiMobile are out of scope of this document.
### Making a RPC from Python
Python can invoke the `NewDeck` method with [`col._backend.new_deck()`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/pylib/anki/decks.py#L168). This python method takes no argument and returns a `Deck` value.
However, most Python code should not call this method directly. Instead it should call [`col.decks.new_deck()`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/pylib/anki/decks.py#L166). Generally speaking, all back-end functions called from Python should be called through a helper method defined in `pylib/anki/`. The `_backend` part is an implementation detail that most callers should ignore. This is especially important because add-ons should expect a relatively stable API independent of the implementation details of the RPC.
### Invoking method from TypeScript
Let's consider the method [`rpc GetCsvMetadata(CsvMetadataRequest) returns (CsvMetadata);`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/proto/anki/import_export.proto#L20) from `ImportExportService`..
It's used in the TypeScript class [`ImportCsvState`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/ts/routes/import-csv/lib.ts#L102), as an asynchronous function. It's argument is a single javascript object, whose keys are as in [`CsvMetadataRequest`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/proto/anki/import_export.proto#L138) and it returns a `CsvMetadata`.
The method was imported with `import { getCsvMetadata } from "@generated/backend";` and the types were imported with `import type { CsvMetadata } from "@generated/anki/import_export_pb";`. Note that it was not necessary to import the input type given that it's simply an untyped javascript object.
## Implementation
Let's now look at implementations of those RPCs.
### Implementation in Rust
The method NewDeck is implemented in Rust's [DecksService](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/rslib/src/decks/service.rs#L21) as `fn new_deck(&mut self) -> error::Result<anki_proto::decks::Deck>`. It should be noted that the method name was changed from Pascal case to snake case, and the rps's argument of type `generic.Empty` is ignored.
### Implementation in Python
Let's consider the implementation of the method [DeckOptionsRequireClose](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/qt/aqt/mediasrv.py#L578). It's defined as `def deck_options_require_close() -> bytes:`. In this case, there should be a returned value. However, it'll be ignored, so returning `b""` is perfectly fine.
Note that the incoming HTTP request is not processed on the main thread. In order to do any work with the GUI, we should call `aqt.mw.taskman.run_on_main`.
## Invoking a TypeScript method from Python
This case should be avoided if possible, as we generally should avoid
calls to the upper layer. Contrary to the previous cases, we don't use
protobuf.
### Calling a TS function.
Let's take as Example [`export function getTypedAnswer(): string | null`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/ts/reviewer/index.ts#L35). It's an exported function, and its return type can be encoded in JSON.
It's called in the Reviewer class through [`self.web.evalWithCallback("getTypedAnswer();", self._onTypedAnswer)`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/qt/aqt/reviewer.py#L785). The result is then sent to [`_onTypedAnswer`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/qt/aqt/reviewer.py#L787).
If no return value is needed, `web.eval` would have been sufficient.
### Calling a Svelte method
Let's now consider the case where the method we want to call is implemented in a Svelte library. Let's take as example [`deckOptionsPendingChanges`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/ts/routes/deck-options/%5BdeckId%5D/%2Bpage.svelte#L17). We define it with:
```js
globalThis.anki || = {};
globalThis.anki.methodName = async (): Promise<void>=>{body}
```
Note that if the function is asynchronous, you can't directly send the
result to a callback. Instead your function will have to call a post
method that will be sent to Python or Rust.
This method is called in [deckoptions.py](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/qt/aqt/deckoptions.py#L68) with `self.web.eval("anki.deckOptionsPendingChanges();"`.

View file

@ -8,16 +8,24 @@ mentioned there no longer apply:
https://forums.ankiweb.net/t/guide-how-to-build-and-run-anki-from-source-with-xubuntu-20-04/12865 https://forums.ankiweb.net/t/guide-how-to-build-and-run-anki-from-source-with-xubuntu-20-04/12865
You can see a full list of buildtime and runtime requirements by looking at the You can see a full list of buildtime and runtime requirements by looking at the
[Dockerfile](../.buildkite/linux/docker/Dockerfile) used to build the [Dockerfiles](../.buildkite/linux/docker/Dockerfile.amd64) used to build the
official releases. official releases.
Glibc is required - if you are on a distro like Alpine that uses musl, things
may not work.
Users on ARM64, see the notes at the bottom of this file before proceeding.
**Ensure some basic tools are installed**: **Ensure some basic tools are installed**:
``` ```
$ sudo apt install bash grep findutils curl gcc g++ make git rsync $ sudo apt install bash grep findutils curl gcc g++ make git rsync ninja-build
``` ```
- The 'find' utility is 'findutils' on Debian. - The 'find' utility is 'findutils' on Debian.
- Your distro may call the package 'ninja' instead of 'ninja-build', or it
may not have a version new enough - if so, install from the zip mentioned in
development.md.
## Missing Libraries ## Missing Libraries
@ -45,17 +53,15 @@ error while loading shared libraries: libcrypt.so.1: cannot open shared object f
To play and record audio during development, install mpv and lame. To play and record audio during development, install mpv and lame.
## Glibc and Qt ## ARM64 support
Anki requires a recent glibc. Other platforms download PyQt binary wheels from PyPI. There are no PyQt wheels available
for ARM Linux, so you will need to rely on your system-provided libraries instead. Your distro
will need to have Python 3.9 or later.
If you are using a distro that uses musl, Anki will not work. After installing the system libraries (eg 'sudo apt install python3-pyqt6.qt{quick,webengine} python3-venv'),
You can use your system's Qt libraries if they are Qt 6.2 or later, if
you wish. After installing the system libraries (eg:
'sudo apt install python3-pyqt6.qt{quick,webengine} python3-venv pyqt6-dev-tools'),
find the place they are installed (eg '/usr/lib/python3/dist-packages'). On modern Ubuntu, you'll find the place they are installed (eg '/usr/lib/python3/dist-packages'). On modern Ubuntu, you'll
also need 'sudo apt remove python3-protobuf'. Then before running any commands like './run', tell Anki where need 'sudo apt remove python3-protobuf'. Then before running any commands like './run', tell Anki where
the packages can be found: the packages can be found:
``` ```
@ -63,6 +69,13 @@ export PYTHONPATH=/usr/lib/python3/dist-packages
export PYTHON_BINARY=/usr/bin/python3 export PYTHON_BINARY=/usr/bin/python3
``` ```
There are a few things to be aware of:
- You should use ./run and not tools/run-qt5\*, even if your system libraries are Qt5.
- If your system libraries are Qt5, when creating an aqt wheel, the wheel will not work
on Qt6 environments.
- Some of the './ninja check' tests are broken on ARM Linux.
## Packaging considerations ## Packaging considerations
Python, node and protoc are downloaded as part of the build. You can optionally define Python, node and protoc are downloaded as part of the build. You can optionally define

View file

@ -1,9 +1,3 @@
ProtoBuf is a format used both to save data in storage and transmit
data between services. You can think of it as similar to JSON with
schemas, given that you can use basic types, list and records. Except
that it's usually transmitted and saved in an efficient byteform and
not in a human readable way.
# Protocol Buffers # Protocol Buffers
Anki uses [different implementations of Protocol Buffers](./architecture.md#protobuf) Anki uses [different implementations of Protocol Buffers](./architecture.md#protobuf)
@ -98,6 +92,12 @@ should preferably be assigned a number between 1 and 15. If a message contains
Protobuf has an official Python implementation with an extensive [reference](https://developers.google.com/protocol-buffers/docs/reference/python-generated). Protobuf has an official Python implementation with an extensive [reference](https://developers.google.com/protocol-buffers/docs/reference/python-generated).
- Every message used in aqt or pylib must be added to the respective `.pylintrc`
to avoid failing type checks. The unqualified protobuf message's name must be
used, not an alias from `collection.py` for example. This should be taken into
account when choosing a message name in order to prevent skipping typechecking
a Python class of the same name.
### Typescript ### Typescript
Anki uses [protobuf-es](https://github.com/bufbuild/protobuf-es), which offers Anki uses [protobuf-es](https://github.com/bufbuild/protobuf-es), which offers

View file

@ -1,40 +1,32 @@
FROM rust:1.85.0-alpine3.20 AS builder FROM rust:1.76-alpine3.19 AS builder
ARG ANKI_VERSION ARG ANKI_VERSION
RUN apk update && apk add --no-cache build-base protobuf && rm -rf /var/cache/apk/* RUN apk update && apk add --no-cache build-base protobuf && rm -rf /var/cache/apk/*
RUN cargo install --git https://github.com/ankitects/anki.git \ RUN cargo install --git https://github.com/ankitects/anki.git \
--tag ${ANKI_VERSION} \ --tag ${ANKI_VERSION} \
--root /anki-server \ --root /anki-server \
--locked \ anki-sync-server
anki-sync-server
FROM alpine:3.21.0 FROM alpine:3.19.1
# Default PUID and PGID values (can be overridden at runtime). Use these to RUN adduser -D -h /home/anki anki
# ensure the files on the volume have the permissions you need.
ENV PUID=1000
ENV PGID=1000
COPY --from=builder /anki-server/bin/anki-sync-server /usr/local/bin/anki-sync-server COPY --from=builder /anki-server/bin/anki-sync-server /usr/local/bin/anki-sync-server
RUN apk update && apk add --no-cache bash su-exec && rm -rf /var/cache/apk/*
RUN apk update && apk add --no-cache bash && rm -rf /var/cache/apk/*
EXPOSE 8080 USER anki
COPY entrypoint.sh /entrypoint.sh ENV SYNC_PORT=${SYNC_PORT:-"8080"}
RUN chmod +x /entrypoint.sh
EXPOSE ${SYNC_PORT}
ENTRYPOINT ["/entrypoint.sh"]
CMD ["anki-sync-server"] CMD ["anki-sync-server"]
# This health check will work for Anki versions 24.08.x and newer. # TODO - consider exposing endpoint /health to check on health cause currently it will return 404 error
# For older versions, it may incorrectly report an unhealthy status, which should not be the case. # HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ # CMD wget -qO- http://localhost:${SYNC_PORT} || exit 1
CMD wget -qO- http://127.0.0.1:8080/health || exit 1
VOLUME /anki_data LABEL maintainer="Jean Khawand <jk@jeankhawand.com>"
LABEL maintainer="Jean Khawand <jk@jeankhawand.com>"

View file

@ -1,33 +0,0 @@
FROM rust:1.85.0 AS builder
ARG ANKI_VERSION
RUN apt-get update && apt-get install -y build-essential protobuf-compiler && apt-get clean && rm -rf /var/lib/apt/lists/*
RUN cargo install --git https://github.com/ankitects/anki.git \
--tag ${ANKI_VERSION} \
--root /anki-server \
--locked \
anki-sync-server
FROM gcr.io/distroless/cc-debian12
COPY --from=builder /anki-server/bin/anki-sync-server /usr/bin/anki-sync-server
# Note that as a user of the container you should NOT overwrite these values
# for safety and simplicity reasons
ENV SYNC_PORT=8080
ENV SYNC_BASE=/anki_data
EXPOSE ${SYNC_PORT}
CMD ["anki-sync-server"]
# This health check will work for Anki versions 24.08.x and newer.
# For older versions, it may incorrectly report an unhealthy status, which should not be the case.
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD ["anki-sync-server", "--healthcheck"]
VOLUME /anki_data
LABEL maintainer="Jean Khawand <jk@jeankhawand.com>"

View file

@ -10,27 +10,13 @@ the build products and runtime dependencies from the rest of your system.
- [x] [Docker](https://docs.docker.com/get-started/) - [x] [Docker](https://docs.docker.com/get-started/)
| **Aspect** | **Dockerfile** | **Dockerfile.distroless** |
| ---------------------- | ---------------------------------------------------------- | --------------------------------------------------------- |
| **Shell & Tools** | ✅ Includes shell and tools | ❌ Minimal, no shell or tools |
| **Debugging** | ✅ Easier debugging with shell and tools | ❌ Harder to debug due to minimal environment |
| **Health Checks** | ✅ Supports complex health checks | ❌ Health checks need to be simple or directly executable |
| **Image Size** | ❌ Larger image size | ✅ Smaller image size |
| **Customization** | ✅ Easier to customize with additional packages | ❌ Limited customization options |
| **Attack Surface** | ❌ Larger attack surface due to more installed packages | ✅ Reduced attack surface |
| **Libraries** | ✅ More libraries available | ❌ Limited libraries |
| **Start-up Time** | ❌ Slower start-up time due to larger image size | ✅ Faster start-up time |
| **Tool Compatibility** | ✅ Compatible with more tools and libraries | ❌ Compatibility limitations with certain tools |
| **Maintenance** | ❌ Higher maintenance due to larger image and dependencies | ✅ Lower maintenance with minimal base image |
| **Custom uid/gid** | ✅ It's possible to pass in PUID and PGID | ❌ PUID and PGID are not supported |
# Building image # Building image
To proceed with building, you must specify the Anki version you want, by replacing `<version>` with something like `24.11` and `<Dockerfile>` with the chosen Dockerfile (e.g., `Dockerfile` or `Dockerfile.distroless`) To proceed with building, you must specify the Anki version you want, by replacing `<version>` with something like `23.12.1`.
```bash ```bash
# Execute this command from this directory # Ensure you are running this command inside /docs/syncserver
docker build -f <Dockerfile> --no-cache --build-arg ANKI_VERSION=<version> -t anki-sync-server . docker build --no-cache --build-arg ANKI_VERSION=<version> -t anki-sync-server .
``` ```
# Run container # Run container
@ -39,57 +25,14 @@ Once done with build, you can proceed with running this image with the following
```bash ```bash
# this will create anki server # this will create anki server
docker run -d \ docker run -d -e "SYNC_USER1=admin:admin" -p 8080:8080 --name anki-sync-server anki-sync-server
-e "SYNC_USER1=admin:admin" \
-p 8080:8080 \
--mount type=volume,src=anki-sync-server-data,dst=/anki_data \
--name anki-sync-server \
anki-sync-server
``` ```
If the image you are using was built with `Dockerfile` you can specify the However, if you want to have multiple users, you have to use the following approach:
`PUID` and `PGID` env variables for the user and group id of the process that
will run the anki-sync-server process. This is valuable when you want the files
written and read from the `/anki_data` volume to belong to a particular
user/group e.g. to access it from the host or another container. Note the the
ids chosen for `PUID` and `PGID` must not already be in use inside the
container (1000 and above is fine). For example add `-e "PUID=1050"` and `-e
"PGID=1050"` to the above command.
If you want to have multiple Anki users that can sync their devices, you can
specify multiple `SYNC_USER` as follows:
```bash ```bash
# this will create anki server with multiple users # this will create anki server with multiple users
docker run -d \ docker run -d -e "SYNC_USER1=test:test" -e "SYNC_USER2=test2:test2" -p 8080:8080 --name anki-sync-server anki-sync-server
-e "SYNC_USER1=admin:admin" \
-e "SYNC_USER2=admin2:admin2" \
-p 8080:8080 \
--mount type=volume,src=anki-sync-server-data,dst=/anki_data \
--name anki-sync-server \
anki-sync-server
``` ```
Moreover, you can pass additional env vars mentioned Moreover, you can pass additional env vars mentioned [here](https://docs.ankiweb.net/sync-server.html)
[here](https://docs.ankiweb.net/sync-server.html). Note that `SYNC_BASE` and
`SYNC_PORT` will be ignored. In the first case for safety reasons, to avoid
accidentally placing data outside the volume and the second for simplicity
since the internal port of the container does not matter given that you can
change the external one.
# Upgrading
If your image was built after January 2025 then you can just build a new image
and start a new container with the same configuration as the previous
container. Everything should work as expected.
If the image you were running was built **before January 2025** then it did not
contain a volume, meaning all syncserver data was stored inside the container.
If you discard the container, for example because you want to build a new
container using an updated image, then your syncserver data will be lost.
The easiest way of working around this is by ensuring at least one of your
devices is fully in sync with your syncserver before upgrading the Docker
container. Then after upgrading the container when you try to sync your device
it will tell you that the server has no data. You will then be given the option
of uploading all local data from the device to syncserver.

View file

@ -1,30 +0,0 @@
#!/bin/sh
set -o errexit
set -o nounset
set -o pipefail
# Default PUID and PGID if not provided
export PUID=${PUID:-1000}
export PGID=${PGID:-1000}
# These values are fixed and cannot be overwritten from the outside for
# convenience and safety reasons
export SYNC_PORT=8080
export SYNC_BASE=/anki_data
# Check if group exists, create if not
if ! getent group anki-group > /dev/null 2>&1; then
addgroup -g "$PGID" anki-group
fi
# Check if user exists, create if not
if ! id -u anki > /dev/null 2>&1; then
adduser -D -H -u "$PUID" -G anki-group anki
fi
# Fix ownership of mounted volumes
mkdir -p /anki_data
chown anki:anki-group /anki_data
# Run the provided command as the `anki` user
exec su-exec anki "$@"

View file

@ -9,12 +9,7 @@ You must be running 64 bit Windows 10, version 1703 or newer.
**Rustup**: **Rustup**:
As mentioned in development.md, rustup must be installed. If you're on As mentioned in development.md, rustup must be installed. If you're on
ARM Windows and install the ARM64 version of rust-up, from this project folder, ARM Windows, you must set the default target to x86_64-pc-windows-msvc.
run
```
rustup target add x86_64-pc-windows-msvc
```
**Visual Studio**: **Visual Studio**:

View file

@ -16,6 +16,7 @@ camino.workspace = true
clap.workspace = true clap.workspace = true
fluent-syntax.workspace = true fluent-syntax.workspace = true
itertools.workspace = true itertools.workspace = true
lazy_static.workspace = true
regex.workspace = true regex.workspace = true
serde_json.workspace = true serde_json.workspace = true
snafu.workspace = true snafu.workspace = true

@ -1 +1 @@
Subproject commit 480ef0da728c7ea3485c58529ae7ee02be3e5dba Subproject commit 092fcb838a1f2065cf71d2314d33cb82ff61cc1e

View file

@ -1,17 +1,5 @@
actions-add = Add actions-add = Add
# Action in context menu:
# In the browser sidebar, when in "Select" mode, right-click on the
# selected criteria elements. In the context menu, click on "Search" to open
# a submenu. This entry in the submenu creates a search term that matches
# cards/notes meeting ALL of the selected criteria.
# https://github.com/ankitects/anki/pull/1044
actions-all-selected = All selected actions-all-selected = All selected
# Action in context menu:
# In the browser sidebar, when in "Select" mode, right-click on the
# selected criteria elements. In the context menu, click on "Search" to open
# a submenu. This entry in the submenu creates a search term that matches
# cards/notes meeting ANY of the selected criteria.
# https://github.com/ankitects/anki/pull/1044
actions-any-selected = Any selected actions-any-selected = Any selected
actions-cancel = Cancel actions-cancel = Cancel
actions-choose = Choose actions-choose = Choose
@ -24,7 +12,6 @@ actions-decks = Decks
actions-decrement-value = Decrement value actions-decrement-value = Decrement value
actions-delete = Delete actions-delete = Delete
actions-export = Export actions-export = Export
actions-empty-cards = Empty Cards
actions-filter = Filter actions-filter = Filter
actions-help = Help actions-help = Help
actions-increment-value = Increment value actions-increment-value = Increment value
@ -50,8 +37,6 @@ actions-select = Select
actions-shortcut-key = Shortcut key: { $val } actions-shortcut-key = Shortcut key: { $val }
actions-suspend-card = Suspend Card actions-suspend-card = Suspend Card
actions-set-due-date = Set Due Date actions-set-due-date = Set Due Date
actions-toggle-load-balancer = Toggle Load Balancer
actions-grade-now = Grade Now
actions-answer-card = Answer Card actions-answer-card = Answer Card
actions-unbury-unsuspend = Unbury/Unsuspend actions-unbury-unsuspend = Unbury/Unsuspend
actions-add-deck = Add Deck actions-add-deck = Add Deck
@ -62,9 +47,9 @@ actions-update-card = Update Card
actions-update-deck = Update Deck actions-update-deck = Update Deck
actions-forget-card = Reset Card actions-forget-card = Reset Card
actions-build-filtered-deck = Build Deck actions-build-filtered-deck = Build Deck
actions-add-notetype = Add Note Type actions-add-notetype = Add Notetype
actions-remove-notetype = Remove Note Type actions-remove-notetype = Remove Notetype
actions-update-notetype = Update Note Type actions-update-notetype = Update Notetype
actions-update-config = Update Config actions-update-config = Update Config
actions-card-info = Card Info actions-card-info = Card Info
actions-previous-card-info = Previous Card Info actions-previous-card-info = Previous Card Info
@ -72,12 +57,10 @@ actions-previous-card-info = Previous Card Info
# input is required before it can be performed. E.g. "Export..." vs. "Delete". # input is required before it can be performed. E.g. "Export..." vs. "Delete".
actions-with-ellipsis = { $action }... actions-with-ellipsis = { $action }...
actions-fullscreen-unsupported = Full screen mode is not supported for your video driver. Try switching to a different one from the preferences screen. actions-fullscreen-unsupported = Full screen mode is not supported for your video driver. Try switching to a different one from the preferences screen.
## Flags
actions-flag-number = Flag { $number } actions-flag-number = Flag { $number }
## The same translation may used for two independent actions:
## searching for cards with a flag of the specified color, and
## toggling the flag of the specified color on a card.
actions-flag-red = Red actions-flag-red = Red
actions-flag-orange = Orange actions-flag-orange = Orange
actions-flag-green = Green actions-flag-green = Green
@ -85,13 +68,9 @@ actions-flag-blue = Blue
actions-flag-pink = Pink actions-flag-pink = Pink
actions-flag-turquoise = Turquoise actions-flag-turquoise = Turquoise
actions-flag-purple = Purple actions-flag-purple = Purple
##
actions-set-flag = Set Flag actions-set-flag = Set Flag
actions-nothing-to-undo = Nothing to undo actions-nothing-to-undo = Nothing to undo
actions-nothing-to-redo = Nothing to redo actions-nothing-to-redo = Nothing to redo
actions-auto-advance = Auto Advance actions-auto-advance = Auto Advance
actions-auto-advance-activated = Auto Advance enabled actions-auto-advance-activated = Auto Advance enabled
actions-auto-advance-deactivated = Auto Advance disabled actions-auto-advance-deactivated = Auto Advance disabled
actions-processing = Processing...

View file

@ -7,6 +7,6 @@ adding-history = History
adding-note-deleted = (Note deleted) adding-note-deleted = (Note deleted)
adding-shortcut = Shortcut: { $val } adding-shortcut = Shortcut: { $val }
adding-the-first-field-is-empty = The first field is empty. adding-the-first-field-is-empty = The first field is empty.
adding-you-have-a-cloze-deletion-note = You have a cloze note type but have not made any cloze deletions. Proceed? adding-you-have-a-cloze-deletion-note = You have a cloze notetype but have not made any cloze deletions. Proceed?
adding-cloze-outside-cloze-notetype = Cloze deletion can only be used on cloze note types. adding-cloze-outside-cloze-notetype = Cloze deletion can only be used on cloze notetypes.
adding-cloze-outside-cloze-field = Cloze deletion can only be used in fields which use the 'cloze:' filter. This is typically the first field. adding-cloze-outside-cloze-field = Cloze deletion can only be used in fields which use the 'cloze:' filter. This is typically the first field.

View file

@ -28,9 +28,8 @@ browsing-cards-deleted-with-deckname =
browsing-change-deck = Change Deck browsing-change-deck = Change Deck
browsing-change-deck2 = Change Deck... browsing-change-deck2 = Change Deck...
browsing-change-note-type = Change Note Type browsing-change-note-type = Change Note Type
# Action in a context menu (right mouse-click on a card type)
browsing-change-note-type2 = Change Note Type... browsing-change-note-type2 = Change Note Type...
browsing-change-notetype = Change Note Type browsing-change-notetype = Change Notetype
browsing-clear-unused-tags = Clear Unused Tags browsing-clear-unused-tags = Clear Unused Tags
browsing-confirm-saved-search-overwrite = A saved search with the name { $name } already exists. Do you want to overwrite it? browsing-confirm-saved-search-overwrite = A saved search with the name { $name } already exists. Do you want to overwrite it?
browsing-created = Created browsing-created = Created
@ -83,7 +82,6 @@ browsing-reschedule = Reschedule
browsing-search-bar-hint = Search cards/notes (type text, then press Enter) browsing-search-bar-hint = Search cards/notes (type text, then press Enter)
browsing-search-in = Search in: browsing-search-in = Search in:
browsing-search-within-formatting-slow = Search within formatting (slow) browsing-search-within-formatting-slow = Search within formatting (slow)
browsing-select-deck = Select Deck
browsing-selected-notes-only = Selected notes only browsing-selected-notes-only = Selected notes only
browsing-shift-position-of-existing-cards = Shift position of existing cards browsing-shift-position-of-existing-cards = Shift position of existing cards
browsing-sidebar = Sidebar browsing-sidebar = Sidebar
@ -143,7 +141,7 @@ browsing-tooltip-card-modified = The last time changes were made to a card, incl
browsing-tooltip-note-modified = The last time changes were made to a note, usually field content or tag edits browsing-tooltip-note-modified = The last time changes were made to a note, usually field content or tag edits
browsing-tooltip-card = The name of a card's card template browsing-tooltip-card = The name of a card's card template
browsing-tooltip-cards = The number of cards a note has browsing-tooltip-cards = The number of cards a note has
browsing-tooltip-notetype = The name of a note's note type browsing-tooltip-notetype = The name of a note's notetype
browsing-tooltip-question = The front side of a card, customisable in the card template editor browsing-tooltip-question = The front side of a card, customisable in the card template editor
browsing-tooltip-answer = The back side of a card, customisable in the card template editor browsing-tooltip-answer = The back side of a card, customisable in the card template editor
browsing-studied-today = Studied browsing-studied-today = Studied

View file

@ -23,25 +23,13 @@ card-stats-review-log-type-review = Review
card-stats-review-log-type-relearn = Relearn card-stats-review-log-type-relearn = Relearn
card-stats-review-log-type-filtered = Filtered card-stats-review-log-type-filtered = Filtered
card-stats-review-log-type-manual = Manual card-stats-review-log-type-manual = Manual
card-stats-review-log-type-rescheduled = Rescheduled
card-stats-review-log-elapsed-time = Elapsed Time
card-stats-no-card = (No card to display.) card-stats-no-card = (No card to display.)
card-stats-custom-data = Custom Data card-stats-custom-data = Custom Data
card-stats-fsrs-stability = Stability card-stats-fsrs-stability = Stability
card-stats-fsrs-difficulty = Difficulty card-stats-fsrs-difficulty = Difficulty
card-stats-fsrs-retrievability = Retrievability card-stats-fsrs-retrievability = Retrievability
card-stats-fsrs-forgetting-curve-title = Forgetting Curve
card-stats-fsrs-forgetting-curve-first-week = First Week
card-stats-fsrs-forgetting-curve-first-month = First Month
card-stats-fsrs-forgetting-curve-first-year = First Year
card-stats-fsrs-forgetting-curve-all-time = All Time
card-stats-fsrs-forgetting-curve-desired-retention = Desired Retention
## Window Titles ## Window Titles
card-stats-current-card = Current Card ({ $context }) card-stats-current-card = Current Card ({ $context })
card-stats-previous-card = Previous Card ({ $context }) card-stats-previous-card = Previous Card ({ $context })
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
card-stats-fsrs-forgetting-curve-probability-of-recalling = Probability of Recall

View file

@ -20,13 +20,12 @@ card-templates-night-mode = Night Mode
# on a mobile device. # on a mobile device.
card-templates-add-mobile-class = Add Mobile Class card-templates-add-mobile-class = Add Mobile Class
card-templates-preview-settings = Options card-templates-preview-settings = Options
card-templates-invalid-template-number = Card template { $number } in note type '{ $notetype }' has a problem. card-templates-invalid-template-number = Card template { $number } in notetype '{ $notetype }' has a problem.
card-templates-identical-front = The front side is identical to card template { $number }. card-templates-identical-front = The front side is identical to card template { $number }.
card-templates-no-front-field = Expected to find a field replacement on the front of the card template. card-templates-no-front-field = Expected to find a field replacement on the front of the card template.
card-templates-missing-cloze = Expected to find '{ "{{" }cloze:Text{ "}}" }' or similar on the front and back of the card template. card-templates-missing-cloze = Expected to find '{ "{{" }cloze:Text{ "}}" }' or similar on the front and back of the card template.
card-templates-extraneous-cloze = 'cloze:' can only be used on cloze note types. card-templates-extraneous-cloze = 'cloze:' can only be used on cloze notetypes.
card-templates-see-preview = See the preview for more information. card-templates-see-preview = See the preview for more information.
card-templates-field-not-found = Field '{ $field }' not found.
card-templates-changes-saved = Changes saved. card-templates-changes-saved = Changes saved.
card-templates-discard-changes = Discard changes? card-templates-discard-changes = Discard changes?
card-templates-add-card-type = Add Card Type... card-templates-add-card-type = Add Card Type...
@ -37,7 +36,6 @@ card-templates-card = Card { $val }
card-templates-card-types-for = Card Types for { $val } card-templates-card-types-for = Card Types for { $val }
card-templates-cloze = Cloze { $val } card-templates-cloze = Cloze { $val }
card-templates-deck-override = Deck Override... card-templates-deck-override = Deck Override...
card-templates-copy-info = Copy Info to Clipboard
card-templates-delete-the-as-card-type-and = Delete the '{ $template }' card type, and its { $cards }? card-templates-delete-the-as-card-type-and = Delete the '{ $template }' card type, and its { $cards }?
card-templates-enter-deck-to-place-new = Enter deck to place new { $val } cards in, or leave blank: card-templates-enter-deck-to-place-new = Enter deck to place new { $val } cards in, or leave blank:
card-templates-enter-new-card-position-1 = Enter new card position (1...{ $val }): card-templates-enter-new-card-position-1 = Enter new card position (1...{ $val }):
@ -60,6 +58,7 @@ card-templates-this-will-create-card-proceed =
} }
card-templates-type-boxes-warning = Only one typing box per card template is supported. card-templates-type-boxes-warning = Only one typing box per card template is supported.
card-templates-restore-to-default = Restore to Default card-templates-restore-to-default = Restore to Default
card-templates-restore-to-default-confirmation = This will reset all fields and templates in this note type to their default values, removing any extra fields/templates and their content, and any custom styling. Do you wish to proceed? card-templates-restore-to-default-confirmation = This will reset all fields and templates in this notetype to their default
card-templates-restored-to-default = Note type has been restored to its original state. values, removing any extra fields/templates and their content, and any custom styling. Do you wish to proceed?
card-templates-restored-to-default = Notetype has been restored to its original state.

View file

@ -8,7 +8,7 @@ change-notetype-will-discard-cards = Will remove the following cards:
change-notetype-fields = Fields change-notetype-fields = Fields
change-notetype-templates = Templates change-notetype-templates = Templates
change-notetype-to-from-cloze = change-notetype-to-from-cloze =
When changing to or from a Cloze note type, card numbers remain unchanged. When changing to or from a Cloze notetype, card numbers remain unchanged.
If changing to a regular note type, and there are more cloze deletions If changing to a regular notetype, and there are more cloze deletions
than available card templates, any extra cards will be removed. than available card templates, any extra cards will be removed.

View file

@ -5,11 +5,6 @@ database-check-card-properties =
[one] Fixed { $count } invalid card property. [one] Fixed { $count } invalid card property.
*[other] Fixed { $count } invalid card properties. *[other] Fixed { $count } invalid card properties.
} }
database-check-card-last-review-time-empty =
{ $count ->
[one] Added last review time to { $count } card.
*[other] Added last review time to { $count } cards.
}
database-check-missing-templates = database-check-missing-templates =
{ $count -> { $count ->
[one] Deleted { $count } card with missing template. [one] Deleted { $count } card with missing template.
@ -56,7 +51,7 @@ database-check-fixed-invalid-ids =
*[other] Fixed { $count } objects with timestamps in the future. *[other] Fixed { $count } objects with timestamps in the future.
} }
# "db-check" is always in English # "db-check" is always in English
database-check-notetypes-recovered = One or more note types were missing. The notes that used them have been given new note types starting with "db-check", but field names and card design have been lost, so you may be better off restoring from an automatic backup. database-check-notetypes-recovered = One or more notetypes were missing. The notes that used them have been given new notetypes starting with "db-check", but field names and card design have been lost, so you may be better off restoring from an automatic backup.
## Progress info ## Progress info

View file

@ -24,7 +24,7 @@ deck-config-review-limit-tooltip =
if cards are ready for review. if cards are ready for review.
deck-config-limit-deck-v3 = deck-config-limit-deck-v3 =
When studying a deck that has subdecks inside it, the limits set on each When studying a deck that has subdecks inside it, the limits set on each
subdeck control the maximum number of cards gathered from that particular deck. subdeck control the maximum number of cards drawn from that particular deck.
The selected deck's limits control the total cards that will be shown. The selected deck's limits control the total cards that will be shown.
deck-config-limit-new-bound-by-reviews = deck-config-limit-new-bound-by-reviews =
The review limit affects the new limit. For example, if your review limit is The review limit affects the new limit. For example, if your review limit is
@ -33,9 +33,9 @@ deck-config-limit-new-bound-by-reviews =
shown. shown.
deck-config-limit-interday-bound-by-reviews = deck-config-limit-interday-bound-by-reviews =
The review limit also affects interday learning cards. When applying the limit, The review limit also affects interday learning cards. When applying the limit,
interday learning cards are gathered first, then review cards. interday learning cards are fetched first, then reviews.
deck-config-tab-description = deck-config-tab-description =
- `Preset`: The limit applies to all decks using this preset. - `Preset`: The limit is shared with all decks using this preset.
- `This deck`: The limit is specific to this deck. - `This deck`: The limit is specific to this deck.
- `Today only`: Make a temporary change to this deck's limit. - `Today only`: Make a temporary change to this deck's limit.
deck-config-new-cards-ignore-review-limit = New cards ignore review limit deck-config-new-cards-ignore-review-limit = New cards ignore review limit
@ -45,10 +45,9 @@ deck-config-new-cards-ignore-review-limit-tooltip =
will be shown regardless of the review limit. will be shown regardless of the review limit.
deck-config-apply-all-parent-limits = Limits start from top deck-config-apply-all-parent-limits = Limits start from top
deck-config-apply-all-parent-limits-tooltip = deck-config-apply-all-parent-limits-tooltip =
By default, the daily limits of a higher-level deck do not apply if you're studying from its subdeck. By default, limits start from the deck you select. If this option is enabled, the limits will
If this option is enabled, the limits will
start from the top-level deck instead, which can be useful if you wish to study individual start from the top-level deck instead, which can be useful if you wish to study individual
subdecks, while enforcing a total limit on cards for the deck tree. sub-decks, while enforcing a total limit on cards/day.
deck-config-affects-entire-collection = Affects the entire collection. deck-config-affects-entire-collection = Affects the entire collection.
## Daily limit tabs: please try to keep these as short as the English version, ## Daily limit tabs: please try to keep these as short as the English version,
@ -62,7 +61,7 @@ deck-config-today-only = Today only
deck-config-learning-steps = Learning steps deck-config-learning-steps = Learning steps
# Please don't translate `1m`, `2d` # Please don't translate `1m`, `2d`
-deck-config-delay-hint = Delays are typically minutes (e.g. `1m`) or days (e.g. `2d`), but hours (e.g. `1h`) and seconds (e.g. `30s`) are also supported. -deck-config-delay-hint = Delays are typically minutes (eg `1m`) or days (eg `2d`), but hours (eg `1h`) and seconds (eg `30s`) are also supported.
deck-config-learning-steps-tooltip = deck-config-learning-steps-tooltip =
One or more delays, separated by spaces. The first delay will be used One or more delays, separated by spaces. The first delay will be used
when you press the `Again` button on a new card, and is 1 minute by default. when you press the `Again` button on a new card, and is 1 minute by default.
@ -83,7 +82,7 @@ deck-config-new-insertion-order-tooltip =
deck-config-new-insertion-order-sequential = Sequential (oldest cards first) deck-config-new-insertion-order-sequential = Sequential (oldest cards first)
deck-config-new-insertion-order-random = Random deck-config-new-insertion-order-random = Random
deck-config-new-insertion-order-random-with-v3 = deck-config-new-insertion-order-random-with-v3 =
With the v3 scheduler, it is better to leave this set to sequential, and With the V3 scheduler, it is better to leave this set to sequential, and
adjust the new card gather order instead. adjust the new card gather order instead.
## Lapses section ## Lapses section
@ -101,7 +100,7 @@ deck-config-leech-threshold-tooltip =
think of a mnemonic to help you remember it. think of a mnemonic to help you remember it.
# See actions-suspend-card and scheduling-tag-only for the wording # See actions-suspend-card and scheduling-tag-only for the wording
deck-config-leech-action-tooltip = deck-config-leech-action-tooltip =
`Tag Only`: Add a 'leech' tag to the note, and display a pop-up. `Tag Only`: Add a "leech" tag to the note, and display a pop-up.
`Suspend Card`: In addition to tagging the note, hide the card until it is `Suspend Card`: In addition to tagging the note, hide the card until it is
manually unsuspended. manually unsuspended.
@ -113,7 +112,7 @@ deck-config-bury-new-siblings = Bury new siblings
deck-config-bury-review-siblings = Bury review siblings deck-config-bury-review-siblings = Bury review siblings
deck-config-bury-interday-learning-siblings = Bury interday learning siblings deck-config-bury-interday-learning-siblings = Bury interday learning siblings
deck-config-bury-new-tooltip = deck-config-bury-new-tooltip =
Whether other `new` cards of the same note (e.g. reverse cards, adjacent cloze deletions) Whether other `new` cards of the same note (eg reverse cards, adjacent cloze deletions)
will be delayed until the next day. will be delayed until the next day.
deck-config-bury-review-tooltip = Whether other `review` cards of the same note will be delayed until the next day. deck-config-bury-review-tooltip = Whether other `review` cards of the same note will be delayed until the next day.
deck-config-bury-interday-learning-tooltip = deck-config-bury-interday-learning-tooltip =
@ -121,7 +120,7 @@ deck-config-bury-interday-learning-tooltip =
will be delayed until the next day. will be delayed until the next day.
deck-config-bury-priority-tooltip = deck-config-bury-priority-tooltip =
When Anki gathers cards, it first gathers intraday learning cards, then When Anki gathers cards, it first gathers intraday learning cards, then
interday learning cards, then review cards, and finally new cards. This affects interday learning cards, then reviews, and finally new cards. This affects
how burying works: how burying works:
- If you have all burying options enabled, the sibling that comes earliest in - If you have all burying options enabled, the sibling that comes earliest in
@ -132,44 +131,57 @@ deck-config-bury-priority-tooltip =
learning or review cards, and you may see both a review sibling and new sibling in the learning or review cards, and you may see both a review sibling and new sibling in the
same session. same session.
## Gather order and sort order of cards ## Ordering section
deck-config-ordering-title = Display Order deck-config-ordering-title = Display Order
deck-config-new-gather-priority = New card gather order deck-config-new-gather-priority = New card gather order
deck-config-new-gather-priority-tooltip-2 = deck-config-new-gather-priority-tooltip-2 =
`Deck`: Gathers cards from each subdeck in order, starting from the top. Cards from each subdeck are `Deck`: gathers cards from each deck in order, starting from the top. Cards from each deck are
gathered in ascending position. If the daily limit of the selected deck is reached, gathering gathered in ascending position. If the daily limit of the selected deck is reached, gathering
can stop before all subdecks have been checked. This order is fastest in large collections, and may stop before all decks have been checked. This order is fastest in large collections, and
allows you to prioritize subdecks that are closer to the top. allows you to prioritize subdecks that are closer to the top.
`Ascending position`: Gathers cards by ascending position (due #), which is typically `Ascending position`: gathers cards by ascending position (due #), which is typically
the oldest-added first. the oldest-added first.
`Descending position`: Gathers cards by descending position (due #), which is typically `Descending position`: gathers cards by descending position (due #), which is typically
the latest-added first. the latest-added first.
`Random notes`: Picks notes at random, then gathers all of its cards. `Random notes`: gathers cards of randomly selected notes. When sibling burying is
disabled, this allows all cards of a note to be seen in a session (eg. both a front->back
and back->front card)
`Random cards`: Gathers cards in a random order. `Random cards`: gathers cards completely randomly.
deck-config-new-gather-priority-deck = Deck
deck-config-new-gather-priority-deck-then-random-notes = Deck then random notes
deck-config-new-gather-priority-position-lowest-first = Ascending position
deck-config-new-gather-priority-position-highest-first = Descending position
deck-config-new-gather-priority-random-notes = Random notes
deck-config-new-gather-priority-random-cards = Random cards
deck-config-new-card-sort-order = New card sort order deck-config-new-card-sort-order = New card sort order
deck-config-new-card-sort-order-tooltip-2 = deck-config-new-card-sort-order-tooltip-2 =
`Card type, then order gathered`: Shows cards in order of card type number. `Card type`: Displays cards in order of card type number. If you have sibling burying
Cards of each card type number are shown in the order they were gathered. disabled, this will ensure all front→back cards are seen before any back→front cards.
If you have sibling burying disabled, this will ensure all front→back cards are seen before any back→front cards.
This is useful to have all cards of the same note shown in the same session, but not This is useful to have all cards of the same note shown in the same session, but not
too close to one another. too close to one another.
`Order gathered`: Shows cards exactly as they were gathered. If sibling burying is disabled, `Order gathered`: Shows cards exactly as they were gathered. If sibling burying is disabled,
this will typically result in all cards of a note being seen one after the other. this will typically result in all cards of a note being seen one after the other.
`Card type, then random`: Shows cards in order of card type number. Cards of each card `Card type, then random`: Like `Card type`, but shuffles the cards of each card
type number are shown in a random order. This order is useful if you don't want sibling cards type number. If you use `Ascending position` to gather the oldest cards, you could use
to appear too close to each other, but still want the cards to appear in a random order. this setting to see those cards in a random order, but still ensure cards of the same
note do not end up too close to one another.
`Random note, then card type`: Picks notes at random, then shows all of its cards `Random note, then card type`: Picks notes at random, then shows all of their siblings
in order. in order.
`Random`: Shows cards in a random order. `Random`: Fully shuffles the gathered cards.
deck-config-sort-order-card-template-then-random = Card type, then random
deck-config-sort-order-random-note-then-template = Random note, then card type
deck-config-sort-order-random = Random
deck-config-sort-order-template-then-gather = Card type
deck-config-sort-order-gather = Order gathered
deck-config-new-review-priority = New/review order deck-config-new-review-priority = New/review order
deck-config-new-review-priority-tooltip = When to show new cards in relation to review cards. deck-config-new-review-priority-tooltip = When to show new cards in relation to review cards.
deck-config-interday-step-priority = Interday learning/review order deck-config-interday-step-priority = Interday learning/review order
@ -177,8 +189,11 @@ deck-config-interday-step-priority-tooltip =
When to show (re)learning cards that cross a day boundary. When to show (re)learning cards that cross a day boundary.
The review limit is always applied first to interday learning cards, and The review limit is always applied first to interday learning cards, and
then review cards. This option will control the order the gathered cards are shown in, then reviews. This option will control the order the gathered cards are shown in,
but interday learning cards will always be gathered first. but interday learning cards will always be gathered first.
deck-config-review-mix-mix-with-reviews = Mix with reviews
deck-config-review-mix-show-after-reviews = Show after reviews
deck-config-review-mix-show-before-reviews = Show before reviews
deck-config-review-sort-order = Review sort order deck-config-review-sort-order = Review sort order
deck-config-review-sort-order-tooltip = deck-config-review-sort-order-tooltip =
The default order prioritizes cards that have been waiting longest, so that The default order prioritizes cards that have been waiting longest, so that
@ -186,84 +201,40 @@ deck-config-review-sort-order-tooltip =
first. If you have a large backlog that will take more than a few days to first. If you have a large backlog that will take more than a few days to
clear, or wish to see cards in subdeck order, you may find the alternate clear, or wish to see cards in subdeck order, you may find the alternate
sort orders preferable. sort orders preferable.
deck-config-sort-order-due-date-then-random = Due date, then random
deck-config-sort-order-due-date-then-deck = Due date, then deck
deck-config-sort-order-deck-then-due-date = Deck, then due date
deck-config-sort-order-ascending-intervals = Ascending intervals
deck-config-sort-order-descending-intervals = Descending intervals
deck-config-sort-order-ascending-ease = Ascending ease
deck-config-sort-order-descending-ease = Descending ease
deck-config-sort-order-ascending-difficulty = Ascending difficulty
deck-config-sort-order-descending-difficulty = Descending difficulty
deck-config-sort-order-relative-overdueness = Relative overdueness
deck-config-display-order-will-use-current-deck = deck-config-display-order-will-use-current-deck =
Anki will use the display order from the deck you Anki will use the display order from the deck you
select to study, and not any subdecks it may have. select to study, and not any subdecks it may have.
## Gather order and sort order of cards Combobox entries
# Gather new cards ordered by deck.
deck-config-new-gather-priority-deck = Deck
# Gather new cards ordered by deck, then ordered by random notes, ensuring all cards of the same note are grouped together.
deck-config-new-gather-priority-deck-then-random-notes = Deck, then random notes
# Gather new cards ordered by position number, ascending (lowest to highest).
deck-config-new-gather-priority-position-lowest-first = Ascending position
# Gather new cards ordered by position number, descending (highest to lowest).
deck-config-new-gather-priority-position-highest-first = Descending position
# Gather the cards ordered by random notes, ensuring all cards of the same note are grouped together.
deck-config-new-gather-priority-random-notes = Random notes
# Gather new cards randomly.
deck-config-new-gather-priority-random-cards = Random cards
# Sort the cards first by their type, in ascending order (alphabetically), then randomized within each type.
deck-config-sort-order-card-template-then-random = Card type, then random
# Sort the notes first randomly, then the cards by their type, in ascending order (alphabetically), within each note.
deck-config-sort-order-random-note-then-template = Random note, then card type
# Sort the cards randomly.
deck-config-sort-order-random = Random
# Sort the cards first by their type, in ascending order (alphabetically), then by the order they were gathered, in ascending order (oldest to newest).
deck-config-sort-order-template-then-gather = Card type, then order gathered
# Sort the cards by the order they were gathered, in ascending order (oldest to newest).
deck-config-sort-order-gather = Order gathered
# How new cards or interday learning cards are mixed with review cards.
deck-config-review-mix-mix-with-reviews = Mix with reviews
# How new cards or interday learning cards are mixed with review cards.
deck-config-review-mix-show-after-reviews = Show after reviews
# How new cards or interday learning cards are mixed with review cards.
deck-config-review-mix-show-before-reviews = Show before reviews
# Sort the cards first by due date, in ascending order (oldest due date to newest), then randomly within the same due date.
deck-config-sort-order-due-date-then-random = Due date, then random
# Sort the cards first by due date, in ascending order (oldest due date to newest), then by deck within the same due date.
deck-config-sort-order-due-date-then-deck = Due date, then deck
# Sort the cards first by deck, then by due date in ascending order (oldest due date to newest) within the same deck.
deck-config-sort-order-deck-then-due-date = Deck, then due date
# Sort the cards by the interval, in ascending order (shortest to longest).
deck-config-sort-order-ascending-intervals = Ascending intervals
# Sort the cards by the interval, in descending order (longest to shortest).
deck-config-sort-order-descending-intervals = Descending intervals
# Sort the cards by ease, in ascending order (lowest to highest ease).
deck-config-sort-order-ascending-ease = Ascending ease
# Sort the cards by ease, in descending order (highest to lowest ease).
deck-config-sort-order-descending-ease = Descending ease
# Sort the cards by difficulty, in ascending order (easiest to hardest).
deck-config-sort-order-ascending-difficulty = Easy cards first
# Sort the cards by difficulty, in descending order (hardest to easiest).
deck-config-sort-order-descending-difficulty = Difficult cards first
# Sort the cards by retrievability percentage, in ascending order (0% to 100%, least retrievable to most easily retrievable).
deck-config-sort-order-retrievability-ascending = Ascending retrievability
# Sort the cards by retrievability percentage, in descending order (100% to 0%, most easily retrievable to least retrievable).
deck-config-sort-order-retrievability-descending = Descending retrievability
## Timer section ## Timer section
deck-config-timer-title = Timers deck-config-timer-title = Timer
deck-config-maximum-answer-secs = Maximum answer seconds deck-config-maximum-answer-secs = Maximum answer seconds
deck-config-maximum-answer-secs-tooltip = deck-config-maximum-answer-secs-tooltip =
The maximum number of seconds to record for a single review. If an answer The maximum number of seconds to record for a single review. If an answer
exceeds this time (because you stepped away from the screen for example), exceeds this time (because you stepped away from the screen for example),
the time taken will be recorded as the limit you have set. the time taken will be recorded as the limit you have set.
deck-config-show-answer-timer-tooltip = deck-config-show-answer-timer-tooltip =
On the Study screen, show a timer that counts the time you're In the review screen, show a timer that counts the number of seconds you're
taking to study each card. taking to review each card.
deck-config-stop-timer-on-answer = Stop on-screen timer on answer deck-config-stop-timer-on-answer = Stop timer on answer
deck-config-stop-timer-on-answer-tooltip = deck-config-stop-timer-on-answer-tooltip =
Whether to stop the on-screen timer when the answer is revealed. Whether to stop the timer when the answer is revealed.
This doesn't affect statistics. This doesn't affect statistics.
## Auto Advance section ## Auto Advance section
deck-config-seconds-to-show-question = Seconds to show question for deck-config-seconds-to-show-question = Seconds to show question for
deck-config-seconds-to-show-question-tooltip-3 = When auto advance is activated, the number of seconds to wait before applying the question action. Set to 0 to disable. deck-config-seconds-to-show-question-tooltip-2 = When auto advance is activated, the number of seconds to wait before revealing the answer. Set to 0 to disable.
deck-config-seconds-to-show-answer = Seconds to show answer for deck-config-seconds-to-show-answer = Seconds to show answer for
deck-config-seconds-to-show-answer-tooltip-2 = When auto advance is activated, the number of seconds to wait before applying the answer action. Set to 0 to disable. deck-config-seconds-to-show-answer-tooltip-2 = When auto advance is activated, the number of seconds to wait before applying the answer action. Set to 0 to disable.
deck-config-question-action-show-answer = Show Answer deck-config-question-action-show-answer = Show Answer
@ -271,8 +242,8 @@ deck-config-question-action-show-reminder = Show Reminder
deck-config-question-action = Question action deck-config-question-action = Question action
deck-config-question-action-tool-tip = The action to perform after the question is shown, and time has elapsed. deck-config-question-action-tool-tip = The action to perform after the question is shown, and time has elapsed.
deck-config-answer-action = Answer action deck-config-answer-action = Answer action
deck-config-answer-action-tooltip-2 = The action to perform after the answer is shown, and time has elapsed. deck-config-answer-action-tooltip = The action to perform on the current card before automatically advancing to the next one.
deck-config-wait-for-audio-tooltip-2 = Wait for audio to finish before automatically applying the question action or answer action. deck-config-wait-for-audio-tooltip = Wait for audio to finish before automatically revealing answer or next question.
## Audio section ## Audio section
@ -280,7 +251,7 @@ deck-config-audio-title = Audio
deck-config-disable-autoplay = Don't play audio automatically deck-config-disable-autoplay = Don't play audio automatically
deck-config-disable-autoplay-tooltip = deck-config-disable-autoplay-tooltip =
When enabled, Anki will not play audio automatically. When enabled, Anki will not play audio automatically.
It can be played manually by clicking/tapping on an audio icon, or by using the Replay action. It can be played manually by clicking/tapping on an audio icon, or by using the replay audio action.
deck-config-skip-question-when-replaying = Skip question when replaying answer deck-config-skip-question-when-replaying = Skip question when replaying answer
deck-config-always-include-question-audio-tooltip = deck-config-always-include-question-audio-tooltip =
Whether the question audio should be included when the Replay action is Whether the question audio should be included when the Replay action is
@ -308,22 +279,6 @@ deck-config-minimum-interval-tooltip = The minimum interval given to a review ca
deck-config-custom-scheduling = Custom scheduling deck-config-custom-scheduling = Custom scheduling
deck-config-custom-scheduling-tooltip = Affects the entire collection. Use at your own risk! deck-config-custom-scheduling-tooltip = Affects the entire collection. Use at your own risk!
## Easy Days section.
deck-config-easy-days-title = Easy Days
deck-config-easy-days-monday = Mon
deck-config-easy-days-tuesday = Tue
deck-config-easy-days-wednesday = Wed
deck-config-easy-days-thursday = Thu
deck-config-easy-days-friday = Fri
deck-config-easy-days-saturday = Sat
deck-config-easy-days-sunday = Sun
deck-config-easy-days-normal = Normal
deck-config-easy-days-reduced = Reduced
deck-config-easy-days-minimum = Minimum
deck-config-easy-days-no-normal-days = At least one day should be set to '{ deck-config-easy-days-normal }'.
deck-config-easy-days-change = Existing reviews will not be rescheduled unless '{ deck-config-reschedule-cards-on-change }' is enabled in the FSRS options.
## Adding/renaming ## Adding/renaming
deck-config-add-group = Add Preset deck-config-add-group = Add Preset
@ -345,7 +300,7 @@ deck-config-confirm-remove-name = Remove { $name }?
deck-config-save-button = Save deck-config-save-button = Save
deck-config-save-to-all-subdecks = Save to All Subdecks deck-config-save-to-all-subdecks = Save to All Subdecks
deck-config-save-and-optimize = Optimize All Presets deck-config-save-and-optimize = Optimize All Presets
deck-config-revert-button-tooltip = Restore this setting to its default value? deck-config-revert-button-tooltip = Restore this setting to its default value.
## These strings are shown via the Description button at the bottom of the ## These strings are shown via the Description button at the bottom of the
## overview screen. ## overview screen.
@ -372,8 +327,6 @@ deck-config-learning-step-above-graduating-interval = The graduating interval sh
deck-config-good-above-easy = The easy interval should be at least as long as the graduating interval. deck-config-good-above-easy = The easy interval should be at least as long as the graduating interval.
deck-config-relearning-steps-above-minimum-interval = The minimum lapse interval should be at least as long as your final relearning step. deck-config-relearning-steps-above-minimum-interval = The minimum lapse interval should be at least as long as your final relearning step.
deck-config-maximum-answer-secs-above-recommended = Anki can schedule your reviews more efficiently when you keep each question short. deck-config-maximum-answer-secs-above-recommended = Anki can schedule your reviews more efficiently when you keep each question short.
deck-config-too-short-maximum-interval = A maximum interval less than 6 months is not recommended.
deck-config-ignore-before-info = (Approximately) { $included }/{ $totalCards } cards will be used to optimize the FSRS parameters.
## Selecting a deck ## Selecting a deck
@ -382,8 +335,10 @@ deck-config-which-deck = Which deck would you like to display options for?
## Messages related to the FSRS scheduler ## Messages related to the FSRS scheduler
deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }... deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }...
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters. deck-config-invalid-weights = Parameters must be either left blank to use the defaults, or must be 17 comma-separated numbers.
deck-config-not-enough-history = Insufficient review history to perform this operation. deck-config-not-enough-history = Insufficient review history to perform this operation.
deck-config-unable-to-determine-desired-retention =
Unable to determine a minimum recommended retention.
deck-config-must-have-400-reviews = deck-config-must-have-400-reviews =
{ $count -> { $count ->
[one] Only { $count } review was found. [one] Only { $count } review was found.
@ -392,37 +347,40 @@ deck-config-must-have-400-reviews =
# Numbers that control how aggressively the FSRS algorithm schedules cards # Numbers that control how aggressively the FSRS algorithm schedules cards
deck-config-weights = FSRS parameters deck-config-weights = FSRS parameters
deck-config-compute-optimal-weights = Optimize FSRS parameters deck-config-compute-optimal-weights = Optimize FSRS parameters
deck-config-optimize-button = Optimize Current Preset deck-config-compute-minimum-recommended-retention = Minimum recommended retention
# Indicates that a given function or label, provided via the "text" variable, operates slowly. deck-config-optimize-button = Optimize
deck-config-slow-suffix = { $text } (slow)
deck-config-compute-button = Compute deck-config-compute-button = Compute
deck-config-ignore-before = Ignore cards reviewed before deck-config-ignore-before = Ignore reviews before
deck-config-time-to-optimize = It's been a while - using the Optimize All Presets button is recommended. deck-config-optimize-all-tip = You can optimize all presets at once by using the dropdown button next to "Save".
deck-config-evaluate-button = Evaluate deck-config-evaluate-button = Evaluate
deck-config-desired-retention = Desired retention deck-config-desired-retention = Desired retention
deck-config-historical-retention = Historical retention deck-config-historical-retention = Historical retention
deck-config-smaller-is-better = Smaller numbers indicate a better fit to your review history. deck-config-smaller-is-better = Smaller numbers indicate a better fit to your review history.
deck-config-steps-too-large-for-fsrs = When FSRS is enabled, steps of 1 day or more are not recommended. deck-config-steps-too-large-for-fsrs = When FSRS is enabled, steps of 1 day or more are not recommended.
deck-config-get-params = Get Params deck-config-get-params = Get Params
deck-config-fsrs-on-all-clients =
Please ensure all of your Anki clients are Anki(Mobile) 23.10+ or AnkiDroid 2.17+. FSRS will
not work correctly if one of your clients is older.
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
deck-config-complete = { $num }% complete. deck-config-complete = { $num }% complete.
deck-config-iterations = Iteration: { $count }... deck-config-iterations = Iteration: { $count }...
deck-config-reschedule-cards-on-change = Reschedule cards on change deck-config-reschedule-cards-on-change = Reschedule cards on change
deck-config-fsrs-tooltip = deck-config-fsrs-tooltip =
Affects the entire collection. Affects the entire collection.
The Free Spaced Repetition Scheduler (FSRS) is an alternative to Anki's legacy SuperMemo 2 (SM-2) algorithm. The Free Spaced Repetition Scheduler (FSRS) is an alternative to Anki's legacy SuperMemo 2 (SM2) scheduler.
By more accurately determining how likely you are to forget a card, it can help you remember By more accurately determining when you are likely to forget, it can help you remember
more material in the same amount of time. This setting is shared by all presets. more material in the same amount of time. This setting is shared by all deck presets.
If you previously used the 'custom scheduling' version of FSRS, please make
sure you clear out the custom scheduling section before enabling this option.
deck-config-desired-retention-tooltip = deck-config-desired-retention-tooltip =
By default, Anki schedules cards so that you have a 90% chance of remembering them when The default value of 0.9 will schedule cards so you have a 90% chance of remembering them when
they come up for review again. If you increase this value, Anki will show cards more frequently they come up for review again. If you increase this value, Anki will show cards more frequently
to increase the chances of you remembering them. If you decrease the value, Anki will show cards to increase the chances of you remembering them. If you decrease the value, Anki will show cards
less frequently, and you will forget more of them. Be conservative when adjusting this - higher less frequently, and you will forget more of them. Be conservative when adjusting this - higher
values will greatly increase your workload, and lower values can be demoralizing when you forget values will greatly increase your workload, and lower values can be demoralizing when you forget
a lot of material. a lot of material.
deck-config-desired-retention-tooltip2 =
The workload values provided by the info box are a rough approximation. For a greater level of accuracy, use the simulator.
deck-config-historical-retention-tooltip = deck-config-historical-retention-tooltip =
When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will
assume that when you did those old reviews, you remembered 90% of the material. If your old retention assume that when you did those old reviews, you remembered 90% of the material. If your old retention
@ -430,12 +388,12 @@ deck-config-historical-retention-tooltip =
the missing reviews. the missing reviews.
Your review history may be incomplete for two reasons: Your review history may be incomplete for two reasons:
1. Because you're using the 'ignore cards reviewed before' option. 1. Because you've used the 'ignore reviews before' option.
2. Because you previously deleted review logs to free up space, or imported material from a different 2. Because you previously deleted review logs to free up space, or imported material from a different
SRS program. SRS program.
The latter is quite rare, so unless you're using the former option, you probably don't need to adjust The latter is quite rare, so unless you've used the former option, you probably don't need to adjust
this option. this setting.
deck-config-weights-tooltip2 = deck-config-weights-tooltip2 =
FSRS parameters affect how cards are scheduled. Anki will start with default parameters. You can use FSRS parameters affect how cards are scheduled. Anki will start with default parameters. You can use
the option below to optimize the parameters to best match your performance in decks using this preset. the option below to optimize the parameters to best match your performance in decks using this preset.
@ -448,44 +406,44 @@ deck-config-reschedule-cards-on-change-tooltip =
will be changed. will be changed.
deck-config-reschedule-cards-warning = deck-config-reschedule-cards-warning =
Depending on your desired retention, this can result in a large number of cards becoming Depending on your desired retention, this can result in a large number of cards becoming
due, so is not recommended when first switching from SM-2. due, so is not recommended when first switching from SM2.
Use this option sparingly, as it will add a review entry to each of your cards, and Use this option sparingly, as it will add a review entry to each of your cards, and
increase the size of your collection. increase the size of your collection.
deck-config-ignore-before-tooltip-2 = deck-config-ignore-before-tooltip =
If set, cards reviewed before the provided date will be ignored when optimizing FSRS parameters. If set, reviews before the provided date will be ignored when optimizing & evaluating FSRS parameters.
This can be useful if you imported someone else's scheduling data, or have changed the way you use the answer buttons. This can be useful if you imported someone else's scheduling data, or have changed the way you use the answer buttons.
deck-config-compute-optimal-weights-tooltip2 = deck-config-compute-optimal-weights-tooltip2 =
When you click the Optimize button, FSRS will analyze your review history, and generate parameters that are When you click the Optimize button, FSRS will analyze your review history, and generate parameters that are
optimal for your memory and the content you're studying. If your decks vary wildly in subjective difficulty, it optimal for your memory and the content you're studying. If your decks vary wildly in difficulty, it
is recommended to assign them separate presets, as the parameters for easy decks and hard decks will be different. is recommended to assign them separate presets, as the parameters for easy decks and hard decks will be different.
You don't need to optimize your parameters frequently - once every few months is sufficient. You don't need to optimize your parameters frequently - once every few months is sufficient.
By default, parameters will be calculated from the review history of all decks using the current preset. You can By default, parameters will be calculated from the review history of all decks using the current preset. You can
optionally adjust the search before calculating the parameters, if you'd like to alter which cards are used for optionally adjust the search before calculating the parameters, if you'd like to alter which cards are used for
optimizing the parameters. optimizing the parameters.
deck-config-compute-optimal-retention-tooltip3 =
This tool assumes that youre starting with 0 learned cards, and will attempt to find the desired retention value
that will lead to the most material learnt, in the least amount of time. To accurately simulate your learning process,
this feature requires a minimum of 400+ reviews. The calculated number can serve as a reference when deciding what to
set your desired retention to. You may wish to choose a higher desired retention, if youre willing to trade more study
time for a greater recall rate. Setting your desired retention lower than the minimum is not recommended, as it will
lead to a higher workload, because of the high forgetting rate.
deck-config-please-save-your-changes-first = Please save your changes first. deck-config-please-save-your-changes-first = Please save your changes first.
deck-config-workload-factor-change = Approximate workload: {$factor}x deck-config-a-100-day-interval =
(compared to {$previousDR}% desired retention) { $days ->
deck-config-workload-factor-unchanged = The higher this value, the more frequently cards will be shown to you. [one] A 100 day interval will become { $days } day.
deck-config-desired-retention-too-low = Your desired retention is very low, which can lead to very long intervals. *[other] A 100 day interval will become { $days } days.
deck-config-desired-retention-too-high = Your desired retention is very high, which can lead to very short intervals. }
deck-config-percent-of-reviews = deck-config-percent-of-reviews =
{ $reviews -> { $reviews ->
[one] { $pct }% of { $reviews } review [one] { $pct }% of { $reviews } review
*[other] { $pct }% of { $reviews } reviews *[other] { $pct }% of { $reviews } reviews
} }
deck-config-percent-input = { $pct }%
# This message appears during FSRS parameter optimization.
deck-config-checking-for-improvement = Checking for improvement...
deck-config-optimizing-preset = Optimizing preset { $current_count }/{ $total_count }... deck-config-optimizing-preset = Optimizing preset { $current_count }/{ $total_count }...
deck-config-fsrs-must-be-enabled = FSRS must be enabled first. deck-config-fsrs-must-be-enabled = FSRS must be enabled first.
deck-config-fsrs-params-optimal = The FSRS parameters currently appear to be optimal. deck-config-fsrs-params-optimal = The FSRS parameters currently appear to be optimal.
deck-config-fsrs-params-no-reviews = No reviews found. Make sure this preset is assigned to all decks (including subdecks) that you want to optimize, and try again.
deck-config-wait-for-audio = Wait for audio deck-config-wait-for-audio = Wait for audio
deck-config-show-reminder = Show Reminder deck-config-show-reminder = Show Reminder
deck-config-answer-again = Answer Again deck-config-answer-again = Answer Again
@ -493,69 +451,9 @@ deck-config-answer-hard = Answer Hard
deck-config-answer-good = Answer Good deck-config-answer-good = Answer Good
deck-config-days-to-simulate = Days to simulate deck-config-days-to-simulate = Days to simulate
deck-config-desired-retention-below-optimal = Your desired retention is below optimal. Increasing it is recommended. deck-config-desired-retention-below-optimal = Your desired retention is below optimal. Increasing it is recommended.
# Description of the y axis in the FSRS simulation
# diagram (Deck options -> FSRS) showing the total number of
# cards that can be recalled or retrieved on a specific date.
deck-config-fsrs-simulator-experimental = FSRS Simulator (Experimental)
deck-config-fsrs-simulate-desired-retention-experimental = FSRS Desired Retention Simulator (Experimental)
deck-config-fsrs-simulate-save-preset = After optimizing, please save your deck preset before running the simulator.
deck-config-fsrs-desired-retention-help-me-decide-experimental = Help Me Decide (Experimental)
deck-config-additional-new-cards-to-simulate = Additional new cards to simulate
deck-config-simulate = Simulate
deck-config-clear-last-simulate = Clear Last Simulation
deck-config-fsrs-simulator-radio-count = Reviews
deck-config-advanced-settings = Advanced Settings
deck-config-smooth-graph = Smooth graph
deck-config-suspend-leeches = Suspend leeches
deck-config-save-options-to-preset = Save Changes to Preset
deck-config-save-options-to-preset-confirm = Overwrite the options in your current preset with the options that are currently set in the simulator?
# Radio button in the FSRS simulation diagram (Deck options -> FSRS) selecting
# to show the total number of cards that can be recalled or retrieved on a
# specific date.
deck-config-fsrs-simulator-radio-memorized = Memorized
deck-config-fsrs-simulator-radio-ratio = Time / Memorized Ratio
# $time here is pre-formatted e.g. "10 Seconds"
deck-config-fsrs-simulator-ratio-tooltip = { $time } per memorized card
## Messages related to the FSRS schedulers health check. The health check determines whether the correlation between FSRS predictions and your memory is good or bad. It can be optionally triggered as part of the "Optimize" function.
# Checkbox
deck-config-health-check = Check health when optimizing
# Message box showing the result of the health check
deck-config-fsrs-bad-fit-warning = Health Check:
Your memory is difficult for FSRS to predict. Recommendations:
- Suspend or reformulate any cards you constantly forget.
- Use the answer buttons consistently. Keep in mind that "Hard" is a passing grade, not a failing grade.
- Understand before you memorize.
If you follow these suggestions, performance will usually improve over the next few months.
# Message box showing the result of the health check
deck-config-fsrs-good-fit = Health Check:
FSRS can adapt to your memory well.
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future. ## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
deck-config-unable-to-determine-desired-retention =
Unable to determine a minimum recommended retention.
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
deck-config-compute-optimal-retention-tooltip4 =
This tool will attempt to find the desired retention value
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if youre
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
deck-config-plotted-on-x-axis = (Plotted on the X-axis)
deck-config-a-100-day-interval =
{ $days ->
[one] A 100 day interval will become { $days } day.
*[other] A 100 day interval will become { $days } days.
}
deck-config-fsrs-simulator-y-axis-title-time = Review Time/Day
deck-config-fsrs-simulator-y-axis-title-count = Review Count/Day
deck-config-fsrs-simulator-y-axis-title-memorized = Memorized Total
deck-config-bury-siblings = Bury siblings deck-config-bury-siblings = Bury siblings
deck-config-do-not-bury = Do not bury siblings deck-config-do-not-bury = Do not bury siblings
deck-config-bury-if-new = Bury if new deck-config-bury-if-new = Bury if new
@ -572,20 +470,12 @@ deck-config-bury-tooltip =
When using the V3 scheduler, interday learning cards can also be buried. Interday When using the V3 scheduler, interday learning cards can also be buried. Interday
learning cards are cards with a current learning step of one or more days. learning cards are cards with a current learning step of one or more days.
deck-config-seconds-to-show-question-tooltip = When auto advance is activated, the number of seconds to wait before revealing the answer. Set to 0 to disable.
deck-config-answer-action-tooltip = The action to perform on the current card before automatically advancing to the next one.
deck-config-wait-for-audio-tooltip = Wait for audio to finish before automatically revealing answer or next question.
deck-config-ignore-before-tooltip =
If set, reviews before the provided date will be ignored when optimizing & evaluating FSRS parameters.
This can be useful if you imported someone else's scheduling data, or have changed the way you use the answer buttons.
deck-config-compute-optimal-retention-tooltip = deck-config-compute-optimal-retention-tooltip =
This tool assumes you're starting with 0 cards, and will attempt to calculate the amount of material you'll This tool assumes you're starting with 0 cards, and will attempt to calculate the amount of material you'll
be able to retain in the given time frame. The estimated retention will greatly depend on your inputs, and be able to retain in the given time frame. The estimated retention will greatly depend on your inputs, and
if it significantly differs from 0.9, it's a sign that the time you've allocated each day is either too low if it significantly differs from 0.9, it's a sign that the time you've allocated each day is either too low
or too high for the amount of cards you're trying to learn. This number can be useful as a reference, but it or too high for the amount of cards you're trying to learn. This number can be useful as a reference, but it
is not recommended to copy it into the desired retention field. is not recommended to copy it into the desired retention field.
deck-config-health-check-tooltip1 = This will show a warning if FSRS struggles to adapt to your memory.
deck-config-health-check-tooltip2 = Health check is performed only when using Optimize Current Preset.
deck-config-compute-optimal-retention = Compute minimum recommended retention deck-config-compute-optimal-retention = Compute minimum recommended retention
deck-config-predicted-optimal-retention = Minimum recommended retention: { $num } deck-config-predicted-optimal-retention = Minimum recommended retention: { $num }
@ -609,16 +499,3 @@ deck-config-compute-optimal-retention-tooltip2 =
reference when deciding what to set your desired retention to. You may wish to choose a higher desired retention, reference when deciding what to set your desired retention to. You may wish to choose a higher desired retention,
if youre willing to trade more study time for a greater recall rate. Setting your desired retention lower than if youre willing to trade more study time for a greater recall rate. Setting your desired retention lower than
the minimum is not recommended, as it will lead to more work without benefit. the minimum is not recommended, as it will lead to more work without benefit.
deck-config-compute-optimal-retention-tooltip3 =
This tool assumes that youre starting with 0 learned cards, and will attempt to find the desired retention value
that will lead to the most material learnt, in the least amount of time. To accurately simulate your learning process,
this feature requires a minimum of 400+ reviews. The calculated number can serve as a reference when deciding what to
set your desired retention to. You may wish to choose a higher desired retention, if youre willing to trade more study
time for a greater recall rate. Setting your desired retention lower than the minimum is not recommended, as it will
lead to a higher workload, because of the high forgetting rate.
deck-config-seconds-to-show-question-tooltip-2 = When auto advance is activated, the number of seconds to wait before revealing the answer. Set to 0 to disable.
deck-config-invalid-weights = Parameters must be either left blank to use the defaults, or must be 17 comma-separated numbers.
deck-config-fsrs-on-all-clients =
Please ensure all of your Anki clients are Anki(Mobile) 23.10+ or AnkiDroid 2.17+. FSRS will
not work correctly if one of your clients is older.
deck-config-optimize-all-tip = You can optimize all presets at once by using the dropdown button next to "Save".

View file

@ -5,17 +5,26 @@ decks-create-deck = Create Deck
decks_create_even_if_empty = Create/update this deck even if empty decks_create_even_if_empty = Create/update this deck even if empty
decks-custom-steps-in-minutes = Custom steps (in minutes) decks-custom-steps-in-minutes = Custom steps (in minutes)
decks-deck = Deck decks-deck = Deck
decks-decreasing-intervals = Decreasing intervals
decks-delete-deck = Delete Deck decks-delete-deck = Delete Deck
decks-enable-second-filter = Enable second filter decks-enable-second-filter = Enable second filter
decks-filter = Filter: decks-filter = Filter:
decks-filter-2 = Filter 2 decks-filter-2 = Filter 2
decks-get-shared = Get Shared decks-get-shared = Get Shared
decks-import-file = Import File decks-import-file = Import File
decks-increasing-intervals = Increasing intervals
decks-latest-added-first = Latest added first
decks-limit-to = Limit to decks-limit-to = Limit to
decks-minutes = minutes decks-minutes = minutes
decks-most-lapses = Most lapses
decks-new-deck-name = New deck name: decks-new-deck-name = New deck name:
decks-no-deck = [no deck] decks-no-deck = [no deck]
decks-oldest-seen-first = Oldest seen first
decks-order-added = Order added
decks-order-due = Order due
decks-please-select-something = Please select something. decks-please-select-something = Please select something.
decks-random = Random
decks-relative-overdueness = Relative overdueness
decks-repeat-failed-cards-after = Delay Repeat failed cards after decks-repeat-failed-cards-after = Delay Repeat failed cards after
# e.g. "Delay for Again", "Delay for Hard", "Delay for Good" # e.g. "Delay for Again", "Delay for Hard", "Delay for Good"
decks-delay-for-button = Delay for { $button } decks-delay-for-button = Delay for { $button }
@ -28,28 +37,3 @@ decks-learn-header = Learn
# The count of cards waiting to be reviewed # The count of cards waiting to be reviewed
decks-review-header = Due decks-review-header = Due
decks-zero-minutes-hint = (0 = return card to original deck) decks-zero-minutes-hint = (0 = return card to original deck)
## Sort order of cards
# Combobox entry: Sort the cards by the date they were added, in ascending order (oldest to newest)
decks-order-added = Order added
# Combobox entry: Sort the cards by the date they were added, in descending order (newest to oldest)
decks-latest-added-first = Latest added first
# Combobox entry: Sort the cards by due date, in ascending order (oldest due date to newest)
decks-order-due = Order due
# Combobox entry: Sort the cards by the number of lapses, in descending order (most lapses to least lapses)
decks-most-lapses = Most lapses
# Combobox entry: Sort the cards by the interval, in ascending order (shortest to longest)
decks-increasing-intervals = Increasing intervals
# Combobox entry: Sort the cards by the interval, in descending order (longest to shortest)
decks-decreasing-intervals = Decreasing intervals
# Combobox entry: Sort the cards by the last review date, in ascending order (oldest seen to newest seen)
decks-oldest-seen-first = Oldest seen first
# Combobox entry: Sort the cards in random order
decks-random = Random
## These strings are no longer used - you do not need to translate them if they
## are not already translated.
# Combobox entry: Sort the cards by relative overdueness, in descending order (most overdue to least overdue)
decks-relative-overdueness = Relative overdueness

View file

@ -10,7 +10,6 @@ editing-center = Center
editing-change-color = Change color editing-change-color = Change color
editing-cloze-deletion = Cloze deletion (new card) editing-cloze-deletion = Cloze deletion (new card)
editing-cloze-deletion-repeat = Cloze deletion (same card) editing-cloze-deletion-repeat = Cloze deletion (same card)
editing-copy-image = Copy image
editing-couldnt-record-audio-have-you-installed = Couldn't record audio. Have you installed 'lame'? editing-couldnt-record-audio-have-you-installed = Couldn't record audio. Have you installed 'lame'?
editing-customize-card-templates = Customize Card Templates editing-customize-card-templates = Customize Card Templates
editing-customize-fields = Customize Fields editing-customize-fields = Customize Fields
@ -36,8 +35,6 @@ editing-mathjax-chemistry = MathJax chemistry
editing-mathjax-inline = MathJax inline editing-mathjax-inline = MathJax inline
editing-mathjax-placeholder = Press { $accept } to accept, { $newline } for new line. editing-mathjax-placeholder = Press { $accept } to accept, { $newline } for new line.
editing-media = Media editing-media = Media
editing-open-image = Open image
editing-show-in-folder = Show in folder
editing-ordered-list = Ordered list editing-ordered-list = Ordered list
editing-outdent = Decrease indent editing-outdent = Decrease indent
editing-paste = Paste editing-paste = Paste
@ -96,7 +93,6 @@ editing-image-occlusion-rectangle-tool = Rectangle
editing-image-occlusion-ellipse-tool = Ellipse editing-image-occlusion-ellipse-tool = Ellipse
editing-image-occlusion-polygon-tool = Polygon editing-image-occlusion-polygon-tool = Polygon
editing-image-occlusion-text-tool = Text editing-image-occlusion-text-tool = Text
editing-image-occlusion-fill-tool = Fill with colour
editing-image-occlusion-toggle-mask-editor = Toggle Mask Editor editing-image-occlusion-toggle-mask-editor = Toggle Mask Editor
editing-image-occlusion-reset = Reset Image Occlusion editing-image-occlusion-reset = Reset Image Occlusion
editing-image-occlusion-confirm-reset = Are you sure you want to reset this image occlusion? editing-image-occlusion-confirm-reset = Are you sure you want to reset this image occlusion?

View file

@ -5,7 +5,7 @@ errors-100-tags-max =
A maximum of 100 tags can be selected. Listing the A maximum of 100 tags can be selected. Listing the
tags you want instead of the ones you don't want is usually simpler, and there tags you want instead of the ones you don't want is usually simpler, and there
is no need to select child tags if you have selected a parent tag. is no need to select child tags if you have selected a parent tag.
errors-multiple-notetypes-selected = Please select notes from only one note type. errors-multiple-notetypes-selected = Please select notes from only one notetype.
errors-please-check-database = Please use the Check Database action, then try again. errors-please-check-database = Please use the Check Database action, then try again.
errors-please-check-media = Please use the Check Media action, then try again. errors-please-check-media = Please use the Check Media action, then try again.
errors-collection-too-new = This collection requires a newer version of Anki to open. errors-collection-too-new = This collection requires a newer version of Anki to open.

View file

@ -40,5 +40,5 @@ exporting-processed-media-files =
*[other] Processed { $count } media files... *[other] Processed { $count } media files...
} }
exporting-include-deck = Include deck name exporting-include-deck = Include deck name
exporting-include-notetype = Include note type name exporting-include-notetype = Include notetype name
exporting-include-guid = Include unique identifier exporting-include-guid = Include unique identifier

View file

@ -8,14 +8,13 @@ importing-anki2-files-are-not-directly-importable = .anki2 files are not directl
importing-appeared-twice-in-file = Appeared twice in file: { $val } importing-appeared-twice-in-file = Appeared twice in file: { $val }
importing-by-default-anki-will-detect-the = By default, Anki will detect the character between fields, such as a tab, comma, and so on. If Anki is detecting the character incorrectly, you can enter it here. Use \t to represent tab. importing-by-default-anki-will-detect-the = By default, Anki will detect the character between fields, such as a tab, comma, and so on. If Anki is detecting the character incorrectly, you can enter it here. Use \t to represent tab.
importing-cannot-merge-notetypes-of-different-kinds = importing-cannot-merge-notetypes-of-different-kinds =
Cloze note types cannot be merged with regular note types. Cloze notetypes cannot be merged with regular notetypes.
You may still import the file with '{ importing-merge-notetypes }' disabled. You may still import the file with '{ importing-merge-notetypes }' disabled.
importing-change = Change importing-change = Change
importing-colon = Colon importing-colon = Colon
importing-comma = Comma importing-comma = Comma
importing-empty-first-field = Empty first field: { $val } importing-empty-first-field = Empty first field: { $val }
importing-field-separator = Field separator importing-field-separator = Field separator
importing-field-separator-guessed = Field separator (guessed)
importing-field-mapping = Field mapping importing-field-mapping = Field mapping
importing-field-of-file-is = Field <b>{ $val }</b> of file is: importing-field-of-file-is = Field <b>{ $val }</b> of file is:
importing-fields-separated-by = Fields separated by: { $val } importing-fields-separated-by = Fields separated by: { $val }
@ -34,13 +33,13 @@ importing-map-to = Map to { $val }
importing-map-to-tags = Map to Tags importing-map-to-tags = Map to Tags
importing-mapped-to = mapped to <b>{ $val }</b> importing-mapped-to = mapped to <b>{ $val }</b>
importing-mapped-to-tags = mapped to <b>Tags</b> importing-mapped-to-tags = mapped to <b>Tags</b>
# the action of combining two existing note types to create a new one # the action of combining two existing notetypes to create a new one
importing-merge-notetypes = Merge note types importing-merge-notetypes = Merge notetypes
importing-merge-notetypes-help = importing-merge-notetypes-help =
If checked, and you or the deck author altered the schema of a note type, Anki will If checked, and you or the deck author altered the schema of a notetype, Anki will
merge the two versions instead of keeping both. merge the two versions instead of keeping both.
Altering a note type's schema means adding, removing, or reordering fields or templates, Altering a notetype's schema means adding, removing, or reordering fields or templates,
or changing the sort field. or changing the sort field.
As a counterexample, changing the front side of an existing template does *not* constitute As a counterexample, changing the front side of an existing template does *not* constitute
a schema change. a schema change.
@ -48,11 +47,10 @@ importing-merge-notetypes-help =
Warning: This will require a one-way sync, and may mark existing notes as modified. Warning: This will require a one-way sync, and may mark existing notes as modified.
importing-mnemosyne-20-deck-db = Mnemosyne 2.0 Deck (*.db) importing-mnemosyne-20-deck-db = Mnemosyne 2.0 Deck (*.db)
importing-multicharacter-separators-are-not-supported-please = Multi-character separators are not supported. Please enter one character only. importing-multicharacter-separators-are-not-supported-please = Multi-character separators are not supported. Please enter one character only.
importing-new-deck-will-be-created = A new deck will be created: { $name }
importing-notes-added-from-file = Notes added from file: { $val } importing-notes-added-from-file = Notes added from file: { $val }
importing-notes-found-in-file = Notes found in file: { $val } importing-notes-found-in-file = Notes found in file: { $val }
importing-notes-skipped-as-theyre-already-in = Notes skipped, as up-to-date copies are already in your collection: { $val } importing-notes-skipped-as-theyre-already-in = Notes skipped, as up-to-date copies are already in your collection: { $val }
importing-notes-skipped-update-due-to-notetype = Notes not updated, as note type has been modified since you first imported the notes: { $val } importing-notes-skipped-update-due-to-notetype = Notes not updated, as notetype has been modified since you first imported the notes: { $val }
importing-notes-updated-as-file-had-newer = Notes updated, as file had newer version: { $val } importing-notes-updated-as-file-had-newer = Notes updated, as file had newer version: { $val }
importing-include-reviews = Include reviews importing-include-reviews = Include reviews
importing-also-import-progress = Import any learning progress importing-also-import-progress = Import any learning progress
@ -66,18 +64,14 @@ importing-with-deck-configs-help =
If enabled, any deck options that the deck sharer included will also be imported. If enabled, any deck options that the deck sharer included will also be imported.
Otherwise, all decks will be assigned the default preset. Otherwise, all decks will be assigned the default preset.
importing-packaged-anki-deckcollection-apkg-colpkg-zip = Packaged Anki Deck/Collection (*.apkg *.colpkg *.zip) importing-packaged-anki-deckcollection-apkg-colpkg-zip = Packaged Anki Deck/Collection (*.apkg *.colpkg *.zip)
importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz)
# the '|' character # the '|' character
importing-pipe = Pipe importing-pipe = Pipe
# Warning displayed when the csv import preview table is clipped (some columns were hidden)
# $count is intended to be a large number (1000 and above)
importing-preview-truncated =
{ $count ->
*[other] Only the first { $count } columns are shown. If this doesn't seem right, try changing the field separator.
}
importing-rows-had-num1d-fields-expected-num2d = '{ $row }' had { $found } fields, expected { $expected } importing-rows-had-num1d-fields-expected-num2d = '{ $row }' had { $found } fields, expected { $expected }
importing-selected-file-was-not-in-utf8 = Selected file was not in UTF-8 format. Please see the importing section of the manual. importing-selected-file-was-not-in-utf8 = Selected file was not in UTF-8 format. Please see the importing section of the manual.
importing-semicolon = Semicolon importing-semicolon = Semicolon
importing-skipped = Skipped importing-skipped = Skipped
importing-supermemo-xml-export-xml = Supermemo XML export (*.xml)
importing-tab = Tab importing-tab = Tab
importing-tag-modified-notes = Tag modified notes: importing-tag-modified-notes = Tag modified notes:
importing-text-separated-by-tabs-or-semicolons = Text separated by tabs or semicolons (*) importing-text-separated-by-tabs-or-semicolons = Text separated by tabs or semicolons (*)
@ -96,10 +90,10 @@ importing-update-notes = Update notes
importing-update-notes-help = importing-update-notes-help =
When to update an existing note in your collection. By default, this is only done When to update an existing note in your collection. By default, this is only done
if the matching imported note was more recently modified. if the matching imported note was more recently modified.
importing-update-notetypes = Update note types importing-update-notetypes = Update notetypes
importing-update-notetypes-help = importing-update-notetypes-help =
When to update an existing note type in your collection. By default, this is only done When to update an existing notetype in your collection. By default, this is only done
if the matching imported note type was more recently modified. Changes to template text if the matching imported notetype was more recently modified. Changes to template text
and styling can always be imported, but for schema changes (e.g. the number or order of and styling can always be imported, but for schema changes (e.g. the number or order of
fields has changed), the '{ importing-merge-notetypes }' option will also need to be enabled. fields has changed), the '{ importing-merge-notetypes }' option will also need to be enabled.
importing-note-added = importing-note-added =
@ -154,7 +148,7 @@ importing-file = File
# "Match scope: notetype / notetype and deck". Controls how duplicates are matched. # "Match scope: notetype / notetype and deck". Controls how duplicates are matched.
importing-match-scope = Match scope importing-match-scope = Match scope
# Used with the 'match scope' option # Used with the 'match scope' option
importing-notetype-and-deck = Note type and deck importing-notetype-and-deck = Notetype and deck
importing-cards-added = importing-cards-added =
{ $count -> { $count ->
[one] { $count } card added. [one] { $count } card added.
@ -188,8 +182,8 @@ importing-conflicting-notes-skipped =
} }
importing-conflicting-notes-skipped2 = importing-conflicting-notes-skipped2 =
{ $count -> { $count ->
[one] { $count } note was not imported, because its note type has changed, and '{ importing-merge-notetypes }' was not enabled. [one] { $count } note was not imported, because its notetype has changed, and '{ importing-merge-notetypes }' was not enabled.
*[other] { $count } notes were not imported, because their note type has changed, and '{ importing-merge-notetypes }' was not enabled. *[other] { $count } notes were not imported, because their notetype has changed, and '{ importing-merge-notetypes }' was not enabled.
} }
importing-import-log = Import Log importing-import-log = Import Log
importing-no-notes-in-file = No notes found in file. importing-no-notes-in-file = No notes found in file.
@ -204,8 +198,8 @@ importing-status = Status
importing-duplicate-note-added = Duplicate note added importing-duplicate-note-added = Duplicate note added
importing-added-new-note = New note added importing-added-new-note = New note added
importing-existing-note-skipped = Note skipped, as an up-to-date copy is already in your collection importing-existing-note-skipped = Note skipped, as an up-to-date copy is already in your collection
importing-note-skipped-update-due-to-notetype = Note not updated, as note type has been modified since you first imported the note importing-note-skipped-update-due-to-notetype = Note not updated, as notetype has been modified since you first imported the note
importing-note-skipped-update-due-to-notetype2 = Note not updated, as note type has been modified since you first imported the note, and '{ importing-merge-notetypes }' was not enabled importing-note-skipped-update-due-to-notetype2 = Note not updated, as notetype has been modified since you first imported the note, and '{ importing-merge-notetypes }' was not enabled
importing-note-updated-as-file-had-newer = Note updated, as file had newer version importing-note-updated-as-file-had-newer = Note updated, as file had newer version
importing-note-skipped-due-to-missing-notetype = Note skipped, as its notetype was missing importing-note-skipped-due-to-missing-notetype = Note skipped, as its notetype was missing
importing-note-skipped-due-to-missing-deck = Note skipped, as its deck was missing importing-note-skipped-due-to-missing-deck = Note skipped, as its deck was missing
@ -217,18 +211,15 @@ importing-field-separator-help =
Please note that if this character appears in any field itself, the field has to be Please note that if this character appears in any field itself, the field has to be
quoted accordingly to the CSV standard. Spreadsheet programs like LibreOffice will quoted accordingly to the CSV standard. Spreadsheet programs like LibreOffice will
do this automatically. do this automatically.
It cannot be changed if the text file forces use of a specific separator via a file header.
If a file header is not present, Anki will try to guess what the separator is.
importing-allow-html-in-fields-help = importing-allow-html-in-fields-help =
Enable this if the file contains HTML formatting. E.g. if the file contains the string Enable this if the file contains HTML formatting. E.g. if the file contains the string
'&lt;br&gt;', it will appear as a line break on your card. On the other hand, with this '&lt;br&gt;', it will appear as a line break on your card. On the other hand, with this
option disabled, the literal characters '&lt;br&gt;' will be rendered. option disabled, the literal characters '&lt;br&gt;' will be rendered.
importing-notetype-help = importing-notetype-help =
Newly-imported notes will have this note type, and only existing notes with this Newly-imported notes will have this notetype, and only existing notes with this
note type will be updated. notetype will be updated.
You can choose which fields in the file correspond to which note type fields with the You can choose which fields in the file correspond to which notetype fields with the
mapping tool. mapping tool.
importing-deck-help = Imported cards will be placed in this deck. importing-deck-help = Imported cards will be placed in this deck.
importing-existing-notes-help = importing-existing-notes-help =
@ -238,7 +229,7 @@ importing-existing-notes-help =
- `{ importing-preserve }`: Do nothing. - `{ importing-preserve }`: Do nothing.
- `{ importing-duplicate }`: Create a new note. - `{ importing-duplicate }`: Create a new note.
importing-match-scope-help = importing-match-scope-help =
Only existing notes with the same note type will be checked for duplicates. This can Only existing notes with the same notetype will be checked for duplicates. This can
additionally be restricted to notes with cards in the same deck. additionally be restricted to notes with cards in the same deck.
importing-tag-all-notes-help = importing-tag-all-notes-help =
These tags will be added to both newly-imported and updated notes. These tags will be added to both newly-imported and updated notes.
@ -251,5 +242,3 @@ importing-importing-collection = Importing collection...
importing-unable-to-import-filename = Unable to import { $filename }: file type not supported importing-unable-to-import-filename = Unable to import { $filename }: file type not supported
importing-notes-that-could-not-be-imported = Notes that could not be imported as note type has changed: { $val } importing-notes-that-could-not-be-imported = Notes that could not be imported as note type has changed: { $val }
importing-added = Added importing-added = Added
importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz)
importing-supermemo-xml-export-xml = Supermemo XML export (*.xml)

View file

@ -1,4 +1,4 @@
notetypes-notetype = Note Type notetypes-notetype = Notetype
## Default field names in newly created note types ## Default field names in newly created note types

View file

@ -27,6 +27,7 @@ preferences-show-remaining-card-count = Show remaining card count
preferences-some-settings-will-take-effect-after = Some settings will take effect after you restart Anki. preferences-some-settings-will-take-effect-after = Some settings will take effect after you restart Anki.
preferences-tab-synchronisation = Synchronization preferences-tab-synchronisation = Synchronization
preferences-synchronize-audio-and-images-too = Synchronize audio and images too preferences-synchronize-audio-and-images-too = Synchronize audio and images too
preferences-not-logged-in = Not currently logged in to AnkiWeb.
preferences-login-successful-sync-now = Log-in successful. Save preferences and sync now? preferences-login-successful-sync-now = Log-in successful. Save preferences and sync now?
preferences-timebox-time-limit = Timebox time limit preferences-timebox-time-limit = Timebox time limit
preferences-user-interface-size = User interface size preferences-user-interface-size = User interface size
@ -34,13 +35,12 @@ preferences-when-adding-default-to-current-deck = When adding, default to curren
preferences-you-can-restore-backups-via-fileswitch = You can restore backups via File > Switch Profile. preferences-you-can-restore-backups-via-fileswitch = You can restore backups via File > Switch Profile.
preferences-legacy-timezone-handling = Legacy timezone handling (buggy, but required for AnkiDroid <= 2.14) preferences-legacy-timezone-handling = Legacy timezone handling (buggy, but required for AnkiDroid <= 2.14)
preferences-default-search-text = Default search text preferences-default-search-text = Default search text
preferences-default-search-text-example = e.g. "deck:current" preferences-default-search-text-example = eg. 'deck:current '
preferences-theme = Theme preferences-theme = Theme
preferences-theme-follow-system = Follow System preferences-theme-follow-system = Follow System
preferences-theme-light = Light preferences-theme-light = Light
preferences-theme-dark = Dark preferences-theme-dark = Dark
preferences-v3-scheduler = V3 scheduler preferences-v3-scheduler = V3 scheduler
preferences-check-for-updates = Check for program updates
preferences-ignore-accents-in-search = Ignore accents in search (slower) preferences-ignore-accents-in-search = Ignore accents in search (slower)
preferences-backup-explanation = preferences-backup-explanation =
Anki periodically backs up your collection. After backups are more than 2 days old, Anki periodically backs up your collection. After backups are more than 2 days old,
@ -77,24 +77,9 @@ preferences-network-timeout = Network timeout
preferences-reset-window-sizes = Reset Window Sizes preferences-reset-window-sizes = Reset Window Sizes
preferences-reset-window-sizes-complete = Window sizes and locations have been reset. preferences-reset-window-sizes-complete = Window sizes and locations have been reset.
preferences-shortcut-placeholder = Enter an unused shortcut key, or leave empty to disable. preferences-shortcut-placeholder = Enter an unused shortcut key, or leave empty to disable.
preferences-third-party-services = Third-Party Services
preferences-ankihub-not-logged-in = Not currently logged in to AnkiHub.
preferences-ankiweb-intro = AnkiWeb is a free service that lets you keep your flashcard data in sync across your devices, and provides a way to recover the data if your device breaks or is lost.
preferences-ankihub-intro = AnkiHub provides collaborative deck editing and additional study tools. A paid subscription is required to access certain features.
preferences-third-party-description = Third-party services are unaffiliated with and not endorsed by Anki. Use of these services may require payment.
## URL scheme related
preferences-url-schemes = URL Schemes
preferences-url-scheme-prompt = Allowed URL Schemes (space-separated):
preferences-url-scheme-warning = Blocked attempt to open `{ $link }`, which may be a security issue.
If you trust the deck author and wish to proceed, you can add `{ $scheme }` to your allowed URL Schemes.
preferences-url-scheme-allow-once = Allow Once
preferences-url-scheme-always-allow = Always Allow
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future. ## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
preferences-basic = Basic preferences-basic = Basic
preferences-reviewer = Reviewer preferences-reviewer = Reviewer
preferences-media = Media preferences-media = Media
preferences-not-logged-in = Not currently logged in to AnkiWeb.

View file

@ -142,7 +142,7 @@ scheduling-reviews = Reviews
scheduling-seconds = seconds scheduling-seconds = seconds
scheduling-set-all-decks-below-to = Set all decks below { $val } to this option group? scheduling-set-all-decks-below-to = Set all decks below { $val } to this option group?
scheduling-set-for-all-subdecks = Set for all subdecks scheduling-set-for-all-subdecks = Set for all subdecks
scheduling-show-answer-timer = Show on-screen timer scheduling-show-answer-timer = Show answer timer
scheduling-show-new-cards-after-reviews = Show new cards after reviews scheduling-show-new-cards-after-reviews = Show new cards after reviews
scheduling-show-new-cards-before-reviews = Show new cards before reviews scheduling-show-new-cards-before-reviews = Show new cards before reviews
scheduling-show-new-cards-in-order-added = Show new cards in order added scheduling-show-new-cards-in-order-added = Show new cards in order added
@ -172,11 +172,6 @@ scheduling-set-due-date-done =
[one] Set due date of { $cards } card. [one] Set due date of { $cards } card.
*[other] Set due date of { $cards } cards. *[other] Set due date of { $cards } cards.
} }
scheduling-graded-cards-done =
{ $cards ->
[one] Graded { $cards } card.
*[other] Graded { $cards } cards.
}
scheduling-forgot-cards = scheduling-forgot-cards =
{ $cards -> { $cards ->
[one] Reset { $cards } card. [one] Reset { $cards } card.

View file

@ -43,6 +43,17 @@ statistics-in-time-span-years =
[one] in { $amount } year [one] in { $amount } year
*[other] in { $amount } years *[other] in { $amount } years
} }
statistics-cards =
{ $cards ->
[one] { $cards } card
*[other] { $cards } cards
}
# a count of how many cards have been answered, eg "Total: 34 reviews"
statistics-reviews =
{ $reviews ->
[one] { $reviews } review
*[other] { $reviews } reviews
}
# Shown at the bottom of the deck list, and in the statistics screen. # Shown at the bottom of the deck list, and in the statistics screen.
# eg "Studied 3 cards in 13 seconds today (4.33s/card)." # eg "Studied 3 cards in 13 seconds today (4.33s/card)."
# The { statistics-in-time-span-seconds } part should be pasted in from the English # The { statistics-in-time-span-seconds } part should be pasted in from the English
@ -58,29 +69,6 @@ statistics-studied-today =
*[years] { statistics-in-time-span-years } *[years] { statistics-in-time-span-years }
} today } today
({ $secs-per-card }s/card) ({ $secs-per-card }s/card)
##
statistics-cards =
{ $cards ->
[one] { $cards } card
*[other] { $cards } cards
}
statistics-notes =
{ $notes ->
[one] { $notes } note
*[other] { $notes } notes
}
# a count of how many cards have been answered, eg "Total: 34 reviews"
statistics-reviews =
{ $reviews ->
[one] { $reviews } review
*[other] { $reviews } reviews
}
# This fragment of the tooltip in the FSRS simulation
# diagram (Deck options -> FSRS) shows the total number of
# cards that can be recalled or retrieved on a specific date.
statistics-memorized = {$memorized} cards memorized
statistics-today-title = Today statistics-today-title = Today
statistics-today-again-count = Again count: statistics-today-again-count = Again count:
statistics-today-type-counts = Learn: { $learnCount }, Review: { $reviewCount }, Relearn: { $relearnCount }, Filtered: { $filteredCount } statistics-today-type-counts = Learn: { $learnCount }, Review: { $reviewCount }, Relearn: { $relearnCount }, Filtered: { $filteredCount }
@ -98,47 +86,6 @@ statistics-counts-learning-cards = Learning
statistics-counts-relearning-cards = Relearning statistics-counts-relearning-cards = Relearning
statistics-counts-title = Card Counts statistics-counts-title = Card Counts
statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards
## Retention represents your actual retention from past reviews, in
## comparison to the "desired retention" setting of FSRS, which forecasts
## future retention. Retention is the percentage of all reviewed cards
## that were marked as "Hard," "Good," or "Easy" within a specific time period.
##
## Most of these strings are used as column / row headings in a table.
## (Excluding -title and -subtitle)
## It is important to keep these translations short so that they do not make
## the table too large to display on a single stats card.
##
## N.B. Stats cards may be very small on mobile devices and when the Stats
## window is certain sizes.
statistics-true-retention-title = Retention
statistics-true-retention-subtitle = Pass rate of cards with an interval ≥ 1 day.
statistics-true-retention-tooltip = If you are using FSRS, your retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data.
statistics-true-retention-range = Range
statistics-true-retention-pass = Pass
statistics-true-retention-fail = Fail
# This will usually be the same as statistics-counts-total-cards
statistics-true-retention-total = Total
statistics-true-retention-count = Count
statistics-true-retention-retention = Retention
# This will usually be the same as statistics-counts-young-cards
statistics-true-retention-young = Young
# This will usually be the same as statistics-counts-mature-cards
statistics-true-retention-mature = Mature
statistics-true-retention-all = All
statistics-true-retention-today = Today
statistics-true-retention-yesterday = Yesterday
statistics-true-retention-week = Last week
statistics-true-retention-month = Last month
statistics-true-retention-year = Last year
statistics-true-retention-all-time = All time
# If there are no reviews within a specific time period, the retention
# percentage cannot be calculated and is displayed as "N/A."
statistics-true-retention-not-applicable = N/A
##
statistics-range-all-time = all statistics-range-all-time = all
statistics-range-1-year-history = last 12 months statistics-range-1-year-history = last 12 months
statistics-range-all-history = all history statistics-range-all-history = all history
@ -149,7 +96,7 @@ statistics-card-ease-title = Card Ease
statistics-card-difficulty-title = Card Difficulty statistics-card-difficulty-title = Card Difficulty
statistics-card-stability-title = Card Stability statistics-card-stability-title = Card Stability
statistics-card-stability-subtitle = The delay at which retrievability falls to 90%. statistics-card-stability-subtitle = The delay at which retrievability falls to 90%.
statistics-median-stability = Median stability statistics-average-stability = Average stability
statistics-card-retrievability-title = Card Retrievability statistics-card-retrievability-title = Card Retrievability
statistics-card-ease-subtitle = The lower the ease, the more frequently a card will appear. statistics-card-ease-subtitle = The lower the ease, the more frequently a card will appear.
statistics-card-difficulty-subtitle2 = The higher the difficulty, the slower stability will increase. statistics-card-difficulty-subtitle2 = The higher the difficulty, the slower stability will increase.
@ -205,7 +152,7 @@ statistics-cards-due =
} }
statistics-backlog-checkbox = Backlog statistics-backlog-checkbox = Backlog
statistics-intervals-title = Review Intervals statistics-intervals-title = Review Intervals
statistics-intervals-subtitle = Delays until review cards are shown again. statistics-intervals-subtitle = Delays until reviews are shown again.
statistics-intervals-day-range = statistics-intervals-day-range =
{ $cards -> { $cards ->
[one] { $cards } card with a { $daysStart }~{ $daysEnd } day interval [one] { $cards } card with a { $daysStart }~{ $daysEnd } day interval
@ -229,7 +176,6 @@ statistics-stability-day-single =
# hour range, eg "From 14:00-15:00" # hour range, eg "From 14:00-15:00"
statistics-hours-range = From { $hourStart }:00~{ $hourEnd }:00 statistics-hours-range = From { $hourStart }:00~{ $hourEnd }:00
statistics-hours-correct = { $correct }/{ $total } correct ({ $percent }%) statistics-hours-correct = { $correct }/{ $total } correct ({ $percent }%)
statistics-hours-correct-info = → (not 'Again')
# the emoji depicts the graph displaying this number # the emoji depicts the graph displaying this number
statistics-hours-reviews = 📊 { $reviews } reviews statistics-hours-reviews = 📊 { $reviews } reviews
# the emoji depicts the graph displaying this number # the emoji depicts the graph displaying this number
@ -256,21 +202,12 @@ statistics-elapsed-time-years = { $amount }y
## ##
statistics-average-for-days-studied = Average for days studied statistics-average-for-days-studied = Average for days studied
# This term is used in a variety of contexts to refers to the total amount of
# items (e.g., cards, mature cards, etc) for a given period, rather than the
# total of all existing items.
statistics-total = Total statistics-total = Total
statistics-days-studied = Days studied statistics-days-studied = Days studied
statistics-average-answer-time-label = Average answer time statistics-average-answer-time-label = Average answer time
statistics-average = Average statistics-average = Average
statistics-median-interval = Median interval statistics-average-interval = Average interval
statistics-due-tomorrow = Due tomorrow statistics-due-tomorrow = Due tomorrow
# This string, Daily load, appears in the Future due table and represents a
# forecasted estimate of the number of cards expected to be reviewed daily in
# the future. Unlike the other strings in the table that display actual data
# derived from the current scheduling (e.g., Average, Due tomorrow),
# Daily load is a projection based on the given data.
statistics-daily-load = Daily load
# eg 5 of 15 (33.3%) # eg 5 of 15 (33.3%)
statistics-amount-of-total-with-percentage = { $amount } of { $total } ({ $percent }%) statistics-amount-of-total-with-percentage = { $amount } of { $total } ({ $percent }%)
statistics-average-over-period = Average over period statistics-average-over-period = Average over period
@ -289,19 +226,10 @@ statistics-cards-per-day =
[one] { $count } card/day [one] { $count } card/day
*[other] { $count } cards/day *[other] { $count } cards/day
} }
statistics-median-ease = Median ease statistics-average-ease = Average ease
statistics-median-difficulty = Median difficulty statistics-average-difficulty = Average difficulty
statistics-average-retrievability = Average retrievability statistics-average-retrievability = Average retrievability
statistics-estimated-total-knowledge = Estimated total knowledge
statistics-save-pdf = Save PDF statistics-save-pdf = Save PDF
statistics-saved = Saved. statistics-saved = Saved.
statistics-stats = stats statistics-stats = stats
statistics-title = Statistics statistics-title = Statistics
## These strings are no longer used - you do not need to translate them if they
## are not already translated.
statistics-average-stability = Average stability
statistics-average-interval = Average interval
statistics-average-ease = Average ease
statistics-average-difficulty = Average difficulty

View file

@ -46,20 +46,6 @@ studying-type-answer-unknown-field = Type answer: unknown field { $val }
studying-unbury = Unbury studying-unbury = Unbury
studying-what-would-you-like-to-unbury = What would you like to unbury? studying-what-would-you-like-to-unbury = What would you like to unbury?
studying-you-havent-recorded-your-voice-yet = You haven't recorded your voice yet. studying-you-havent-recorded-your-voice-yet = You haven't recorded your voice yet.
studying-card-studied-in-minute =
{ $cards ->
[one] { $cards } card
*[other] { $cards } cards
} studied in
{ $minutes ->
[one] { $minutes } minute.
*[other] { $minutes } minutes.
}
studying-question-time-elapsed = Question time elapsed
studying-answer-time-elapsed = Answer time elapsed
## OBSOLETE; you do not need to translate this
studying-card-studied-in = studying-card-studied-in =
{ $count -> { $count ->
[one] { $count } card studied in [one] { $count } card studied in
@ -70,3 +56,5 @@ studying-minute =
[one] { $count } minute. [one] { $count } minute.
*[other] { $count } minutes. *[other] { $count } minutes.
} }
studying-question-time-elapsed = Question time elapsed
studying-answer-time-elapsed = Answer time elapsed

View file

@ -22,7 +22,7 @@ sync-media-log-title = Media Sync Log
sync-conflict = Only one copy of Anki can sync to your account at once. Please wait a few minutes, then try again. sync-conflict = Only one copy of Anki can sync to your account at once. Please wait a few minutes, then try again.
sync-server-error = AnkiWeb encountered a problem. Please try again in a few minutes. sync-server-error = AnkiWeb encountered a problem. Please try again in a few minutes.
sync-client-too-old = Your Anki version is too old. Please update to the latest version to continue syncing. sync-client-too-old = Your Anki version is too old. Please update to the latest version to continue syncing.
sync-wrong-pass = Email or password was incorrect; please try again. sync-wrong-pass = AnkiWeb ID or password was incorrect; please try again.
sync-resync-required = Please sync again. If this message keeps appearing, please post on the support site. sync-resync-required = Please sync again. If this message keeps appearing, please post on the support site.
sync-must-wait-for-end = Anki is currently syncing. Please wait for the sync to complete, then try again. sync-must-wait-for-end = Anki is currently syncing. Please wait for the sync to complete, then try again.
sync-confirm-empty-download = Local collection has no cards. Download from AnkiWeb? sync-confirm-empty-download = Local collection has no cards. Download from AnkiWeb?
@ -43,23 +43,17 @@ sync-conflict-explanation2 =
Once the conflict is resolved, syncing will work as usual. Once the conflict is resolved, syncing will work as usual.
sync-ankiweb-id-label = Email: sync-ankiweb-id-label = AnkiWeb ID:
sync-password-label = Password: sync-password-label = Password:
sync-account-required = sync-account-required =
<h1>Account Required</h1> <h1>Account Required</h1>
A free account is required to keep your collection synchronized. Please <a href="{ $link }">sign up</a> for an account, then enter your details below. A free account is required to keep your collection synchronized. Please <a href="{ $link }">sign up</a> for an account, then enter your details below.
sync-sanity-check-failed = Please use the Check Database function, then sync again. If problems persist, please force a one-way sync in the preferences screen. sync-sanity-check-failed = Please use the Check Database function, then sync again. If problems persist, please force a one-way sync in the preferences screen.
sync-clock-off = Unable to sync - your clock is not set to the correct time. sync-clock-off = Unable to sync - your clock is not set to the correct time.
# “details” expands to a string such as “300.14 MB > 300.00 MB”
sync-upload-too-large = sync-upload-too-large =
Your collection file is too large to send to AnkiWeb. You can reduce its size by removing any unwanted decks (optionally exporting them first), and then using Check Database to shrink the file size down. Your collection file is too large to send to AnkiWeb. You can reduce its
size by removing any unwanted decks (optionally exporting them first), and
{ $details } (uncompressed) then using Check Database to shrink the file size down. ({ $details })
sync-sign-in = Sign in
sync-ankihub-dialog-heading = AnkiHub Login
sync-ankihub-username-label = Username or Email:
sync-ankihub-login-failed = Unable to log in to AnkiHub with the provided credentials.
sync-ankihub-addon-installation = AnkiHub Add-on Installation
## Buttons ## Buttons

Some files were not shown because too many files have changed in this diff Show more